diff --git a/llama_hub/tools/library.json b/llama_hub/tools/library.json index 2fcae72d95..525136dbf7 100644 --- a/llama_hub/tools/library.json +++ b/llama_hub/tools/library.json @@ -71,6 +71,11 @@ "id": "tools/weather", "author": "logan-markewich" }, + "PlaygroundsSubgraphConnectorToolSpec": { + "id": "tools/playgrounds_subgraph_connector", + "author": "tachi", + "keywords": ["subgraph", "blockchain", "playgroundsapi", "graphql", "decentralized", "thegraph"] + }, "PythonFileToolSpec": { "id": "tools/python_file", "author": "ajhofmann" diff --git a/llama_hub/tools/playgrounds_subgraph_connector/README.md b/llama_hub/tools/playgrounds_subgraph_connector/README.md new file mode 100644 index 0000000000..2ec9190fb5 --- /dev/null +++ b/llama_hub/tools/playgrounds_subgraph_connector/README.md @@ -0,0 +1,60 @@ +# playgrounds_subgraph_connector + +Playgrounds API is a service provided by [Playgrounds Analytics](https://playgrounds.network) to streamline interfacing with decentralized subgraphs (indexed blockchain datasets). + +The `PlaygroundsSubgraphConnector` is a tool designed for LLM agents to seamlessly interface with and query subgraphs on The Graph's decentralized network via Playgrounds API. + +This tool is specifically designed to be used alongside [Llama index](https://github.com/jerryjliu/llama_index) or [langchain](https://python.langchain.com/docs/modules/agents/tools/custom_tools) + +- To learn more about Playgrounds API, please visit our website : https://playgrounds.network/ +- Obtain you Playgrounds API Key and get started for free here: https://app.playgrounds.network/signup +- Find any Subgraph (dataset) you need here: https://thegraph.com/explorer + +## Advantages of this tool: + +- **Easy access to Decentralized Subgraphs (Datasets)**: No need for wallet or GRT management. +- **LLM x Blockchain data**: Develop Ai applications that leverage blockchain data seamlessly. + +## Basic Usage: + +To utilize the tool, simply initialize it with the appropriate `identifier` (Subgraph ID or Deployment ID) and `api_key`. Optionally, specify if you're using a deployment ID. + +```python +import openai +from llama_index.agent import OpenAIAgent +from llama_hub.tools.playgrounds_subgraph_connector.base import PlaygroundsSubgraphConnectorToolSpec + +def simple_test(): + """ + Run a simple test querying the financialsDailySnapshots from Uniswap V3 subgraph using OpenAIAgent and Playgrounds API. + """ + # Set the OpenAI API key + openai.api_key = 'YOUR_OPENAI_API_KEY' + + # Initialize the tool specification with the subgraph's identifier and the Playgrounds API key + connector_spec = PlaygroundsSubgraphConnectorToolSpec( + identifier="YOUR_SUBGRAPH_OR_DEPLOYMENT_IDENTIFIER", + api_key="YOUR_PLAYGROUNDS_API_KEY", + use_deployment_id=False # Set to True if using Deployment ID + ) + + # Setup agent with the tool + agent = OpenAIAgent.from_tools(connector_spec.to_tool_list()) + + # Make a query using the agent + response = agent.chat( + 'query the financialsDailySnapshots for id, timestamp, totalValueLockedUSD, and dailyVolumeUSD. only give me the first 2 rows' + ) + print(response) + +if __name__ == "__main__": + simple_test() + +``` + +Visit here for more in-depth [Examples](https://github.com/Tachikoma000/playgrounds_subgraph_connector/blob/main/examples.ipynb). + +This loader is designed to be used as a way to load data into [LlamaIndex](https://github.com/jerryjliu/gpt_index/tree/main/gpt_index) +and/or subsequently used as a Tool in a [LangChain](https://github.com/hwchase17/langchain) Agent. + + diff --git a/llama_hub/tools/playgrounds_subgraph_connector/__init__.py b/llama_hub/tools/playgrounds_subgraph_connector/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/llama_hub/tools/playgrounds_subgraph_connector/base.py b/llama_hub/tools/playgrounds_subgraph_connector/base.py new file mode 100644 index 0000000000..24a12e333c --- /dev/null +++ b/llama_hub/tools/playgrounds_subgraph_connector/base.py @@ -0,0 +1,72 @@ +"""PlaygroundsSubgraphConnectorToolSpec.""" + +from typing import Optional, Union +import requests +from llama_hub.tools.graphql.base import GraphQLToolSpec + +class PlaygroundsSubgraphConnectorToolSpec(GraphQLToolSpec): + """ + Connects to subgraphs on The Graph's decentralized network via the Playgrounds API. + + Attributes: + spec_functions (list): List of functions that specify the tool's capabilities. + url (str): The endpoint URL for the GraphQL requests. + headers (dict): Headers used for the GraphQL requests. + """ + + spec_functions = ["graphql_request"] + + def __init__(self, identifier: str, api_key: str, use_deployment_id: bool = False): + """ + Initialize the connector. + + Args: + identifier (str): Subgraph identifier or Deployment ID. + api_key (str): API key for the Playgrounds API. + use_deployment_id (bool): Flag to indicate if the identifier is a deployment ID. Default is False. + """ + endpoint = "deployments" if use_deployment_id else "subgraphs" + self.url = f"https://api.playgrounds.network/v1/proxy/{endpoint}/id/{identifier}" + self.headers = { + "Content-Type": "application/json", + "Playgrounds-Api-Key": api_key + } + + def graphql_request(self, query: str, variables: Optional[dict] = None, operation_name: Optional[str] = None) -> Union[dict, str]: + """ + Make a GraphQL query. + + Args: + query (str): The GraphQL query string to execute. + variables (dict, optional): Variables for the GraphQL query. Default is None. + operation_name (str, optional): Name of the operation, if multiple operations are present in the query. Default is None. + + Returns: + dict: The response from the GraphQL server if successful. + str: Error message if the request fails. + """ + + payload = {'query': query.strip()} + + if variables: + payload['variables'] = variables + + if operation_name: + payload['operationName'] = operation_name + + try: + response = requests.post(self.url, headers=self.headers, json=payload) + + # Check if the request was successful + response.raise_for_status() + + # Return the JSON response + return response.json() + + except requests.RequestException as e: + # Handle request errors + return str(e) + except ValueError as e: + # Handle JSON decoding errors + return f"Error decoding JSON: {e}" + diff --git a/llama_hub/tools/playgrounds_subgraph_connector/requirements.txt b/llama_hub/tools/playgrounds_subgraph_connector/requirements.txt new file mode 100644 index 0000000000..077c95d8a4 --- /dev/null +++ b/llama_hub/tools/playgrounds_subgraph_connector/requirements.txt @@ -0,0 +1 @@ +requests==2.31.0 \ No newline at end of file