33import httpx
44
55from llama_cloud .client import AsyncLlamaCloud
6- from llama_cloud_services import LlamaParse
6+ from llama_cloud_services import LlamaCloudIndex , LlamaParse
7+ from llama_cloud_services .parse import ResultType
78
89# deployed agents may infer their name from the deployment name
910# Note: Make sure that an agent deployment with this name actually exists
1819INDEX_NAME = "document_qa_index"
1920
2021
21- def get_custom_client () -> httpx .AsyncClient :
22+ @functools .cache
23+ def get_base_cloud_client () -> httpx .AsyncClient :
2224 return httpx .AsyncClient (
2325 timeout = 60 ,
2426 headers = {"Project-Id" : LLAMA_CLOUD_PROJECT_ID }
@@ -32,7 +34,7 @@ def get_llama_cloud_client() -> AsyncLlamaCloud:
3234 return AsyncLlamaCloud (
3335 base_url = LLAMA_CLOUD_BASE_URL ,
3436 token = LLAMA_CLOUD_API_KEY ,
35- httpx_client = get_custom_client (),
37+ httpx_client = get_base_cloud_client (),
3638 )
3739
3840
@@ -45,8 +47,20 @@ def get_llama_parse_client() -> LlamaParse:
4547 adaptive_long_table = True ,
4648 outlined_table_extraction = True ,
4749 output_tables_as_HTML = True ,
48- result_type = "markdown" ,
50+ result_type = ResultType . MD ,
4951 api_key = LLAMA_CLOUD_API_KEY ,
5052 project_id = LLAMA_CLOUD_PROJECT_ID ,
51- custom_client = get_custom_client (),
53+ custom_client = get_base_cloud_client (),
54+ )
55+
56+
57+ @functools .lru_cache (maxsize = None )
58+ def get_index (index_name : str ) -> LlamaCloudIndex :
59+ return LlamaCloudIndex .create_index (
60+ name = index_name ,
61+ project_id = LLAMA_CLOUD_PROJECT_ID ,
62+ api_key = LLAMA_CLOUD_API_KEY ,
63+ base_url = LLAMA_CLOUD_BASE_URL ,
64+ show_progress = True ,
65+ custom_client = get_base_cloud_client (),
5266 )
0 commit comments