-
Notifications
You must be signed in to change notification settings - Fork 781
Closed
Labels
bugSomething isn't workingSomething isn't working
Description
Hello there, for some reason, openai and other model provider is lagging in my place. So I try to using custom model provider.Here's my code
from paperqa import Settings, ask
from paperqa.settings import AgentSettings,ParsingSettings
import os
OPENAI_API_KEY = '***'
model = 'openai/qwen-plus'
embedding_model = "openai/text-embedding-v4"
os.environ["OPENAI_API_KEY"] = OPENAI_API_KEY
llm_config={
"model_list": [{
"model_name": model,
"litellm_params": {
"model": model,
"api_base": "https://dashscope.aliyuncs.com/compatible-mode/v1",
"api_key": OPENAI_API_KEY
}
}]
}
embedding_config={
"model_list": [{
"model_name": embedding_model,
"litellm_params": {
"model": embedding_model,
"api_base": "https://dashscope.aliyuncs.com/compatible-mode/v1",
"api_key": OPENAI_API_KEY
}
}]
}
answer_response = ask(
"What is Paper-QA?",
settings=Settings(
llm=model,
llm_config=llm_config,
summary_llm=model,
summary_llm_config=llm_config,
agent=AgentSettings(agent_llm=model,
agent_llm_config=llm_config),
paper_directory="examples",
embedding=embedding_model,
embedding_config=embedding_config,
parsing=ParsingSettings(use_doc_details=False)
),
)
print("Finished!")Here's how it happens
[17:10:03] Error parsing PaperQA2.pdf, skipping index for this file.
╭───────────────────────────────── Traceback (most recent call last) ──────────────────────────────────╮
│ /share/CADD/biotools/anaconda3-2023.09/envs/paper-qa/lib/python3.11/site-packages/aiohttp/connector. │
│ py:1181 in _wrap_create_connection │
│ │
│ 1178 │ │ │ async with ceil_timeout( │
│ 1179 │ │ │ │ timeout.sock_connect, ceil_threshold=timeout.ceil_threshold │
│ 1180 │ │ │ ): │
│ ❱ 1181 │ │ │ │ sock = await aiohappyeyeballs.start_connection( │
│ 1182 │ │ │ │ │ addr_infos=addr_infos, │
│ 1183 │ │ │ │ │ local_addr_infos=self._local_addr_infos, │
│ 1184 │ │ │ │ │ happy_eyeballs_delay=self._happy_eyeballs_delay, │
│ │
│ /share/CADD/biotools/anaconda3-2023.09/envs/paper-qa/lib/python3.11/site-packages/aiohappyeyeballs/i │
│ mpl.py:149 in start_connection │
│ │
│ 146 │ │ │ │ # We have a mix of OSError and RuntimeError │
│ 147 │ │ │ │ # so we have to pick which one to raise. │
│ 148 │ │ │ │ # and we raise OSError for compatibility │
│ ❱ 149 │ │ │ │ raise OSError(msg) │
│ 150 │ │ finally: │
│ 151 │ │ │ all_exceptions = None # type: ignore[assignment] │
│ 152 │ │ │ exceptions = None # type: ignore[assignment] │
╰──────────────────────────────────────────────────────────────────────────────────────────────────────╯
OSError: Multiple exceptions: [Errno 110] Connect call failed ('128.242.240.91', 443), [Errno 101]
Network is unreachable
The above exception was the direct cause of the following exception:
╭───────────────────────────────── Traceback (most recent call last) ──────────────────────────────────╮
│ /share/CADD/biotools/anaconda3-2023.09/envs/paper-qa/lib/python3.11/site-packages/litellm/llms/custo │
│ m_httpx/aiohttp_transport.py:55 in map_aiohttp_exceptions │
│ │
│ 52 @contextlib.contextmanager │
│ 53 def map_aiohttp_exceptions() -> typing.Iterator[None]: │
│ 54 │ try: │
│ ❱ 55 │ │ yield │
│ 56 │ except Exception as exc: │
│ 57 │ │ mapped_exc = None │
│ 58 │
│ │
│ /share/CADD/biotools/anaconda3-2023.09/envs/paper-qa/lib/python3.11/site-packages/litellm/llms/custo │
│ m_httpx/aiohttp_transport.py:207 in handle_async_request │
│ │
│ 204 │ │ │ │ data = request.stream # type: ignore │
│ 205 │ │ │ │ request.headers.pop("transfer-encoding", None) # handled by aiohttp │
│ 206 │ │ │ │
│ ❱ 207 │ │ │ response = await client_session.request( │
│ 208 │ │ │ │ method=request.method, │
│ 209 │ │ │ │ url=YarlURL(str(request.url), encoded=True), │
│ 210 │ │ │ │ headers=request.headers, │
│ │
│ /share/CADD/biotools/anaconda3-2023.09/envs/paper-qa/lib/python3.11/site-packages/aiohttp/client.py: │
│ 1473 in __aenter__ │
│ │
│ 1470 │ │ return self.__await__() │
│ 1471 │ │
│ 1472 │ async def __aenter__(self) -> _RetType: │
│ ❱ 1473 │ │ self._resp: _RetType = await self._coro │
│ 1474 │ │ return await self._resp.__aenter__() │
│ 1475 │ │
│ 1476 │ async def __aexit__( │
│ │
│ /share/CADD/biotools/anaconda3-2023.09/envs/paper-qa/lib/python3.11/site-packages/aiohttp/client.py: │
│ 761 in _request │
│ │
│ 758 │ │ │ │ │ │ handler = _connect_and_send_request │
│ 759 │ │ │ │ │ │
│ 760 │ │ │ │ │ try: │
│ ❱ 761 │ │ │ │ │ │ resp = await handler(req) │
│ 762 │ │ │ │ │ # Client connector errors should not be retried │
│ 763 │ │ │ │ │ except ( │
│ 764 │ │ │ │ │ │ ConnectionTimeoutError, │
│ │
│ /share/CADD/biotools/anaconda3-2023.09/envs/paper-qa/lib/python3.11/site-packages/aiohttp/client.py: │
│ 716 in _connect_and_send_request │
│ │
│ 713 │ │ │ │ │ │ # connection timeout │
│ 714 │ │ │ │ │ │ assert self._connector is not None │
│ 715 │ │ │ │ │ │ try: │
│ ❱ 716 │ │ │ │ │ │ │ conn = await self._connector.connect( │
│ 717 │ │ │ │ │ │ │ │ req, traces=traces, timeout=real_timeout │
│ 718 │ │ │ │ │ │ │ ) │
│ 719 │ │ │ │ │ │ except asyncio.TimeoutError as exc: │
│ │
│ /share/CADD/biotools/anaconda3-2023.09/envs/paper-qa/lib/python3.11/site-packages/aiohttp/connector. │
│ py:598 in connect │
│ │
│ 595 │ │ │ │ if traces: │
│ 596 │ │ │ │ │ for trace in traces: │
│ 597 │ │ │ │ │ │ await trace.send_connection_create_start() │
│ ❱ 598 │ │ │ │ proto = await self._create_connection(req, traces, timeout) │
│ 599 │ │ │ │ if traces: │
│ 600 │ │ │ │ │ for trace in traces: │
│ 601 │ │ │ │ │ │ await trace.send_connection_create_end() │
│ │
│ /share/CADD/biotools/anaconda3-2023.09/envs/paper-qa/lib/python3.11/site-packages/aiohttp/connector. │
│ py:1122 in _create_connection │
│ │
│ 1119 │ │ if req.proxy: │
│ 1120 │ │ │ _, proto = await self._create_proxy_connection(req, traces, timeout) │
│ 1121 │ │ else: │
│ ❱ 1122 │ │ │ _, proto = await self._create_direct_connection(req, traces, timeout) │
│ 1123 │ │ │
│ 1124 │ │ return proto │
│ 1125 │
│ │
│ /share/CADD/biotools/anaconda3-2023.09/envs/paper-qa/lib/python3.11/site-packages/aiohttp/connector. │
│ py:1494 in _create_direct_connection │
│ │
│ 1491 │ │ │ return transp, proto │
│ 1492 │ │ else: │
│ 1493 │ │ │ assert last_exc is not None │
│ ❱ 1494 │ │ │ raise last_exc │
│ 1495 │ │
│ 1496 │ async def _create_proxy_connection( │
│ 1497 │ │ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" │
│ │
│ /share/CADD/biotools/anaconda3-2023.09/envs/paper-qa/lib/python3.11/site-packages/aiohttp/connector. │
│ py:1463 in _create_direct_connection │
│ │
│ 1460 │ │ │ ) │
│ 1461 │ │ │ │
│ 1462 │ │ │ try: │
│ ❱ 1463 │ │ │ │ transp, proto = await self._wrap_create_connection( │
│ 1464 │ │ │ │ │ self._factory, │
│ 1465 │ │ │ │ │ timeout=timeout, │
│ 1466 │ │ │ │ │ ssl=sslcontext, │
│ │
│ /share/CADD/biotools/anaconda3-2023.09/envs/paper-qa/lib/python3.11/site-packages/aiohttp/connector. │
│ py:1204 in _wrap_create_connection │
│ │
│ 1201 │ │ except OSError as exc: │
│ 1202 │ │ │ if exc.errno is None and isinstance(exc, asyncio.TimeoutError): │
│ 1203 │ │ │ │ raise │
│ ❱ 1204 │ │ │ raise client_error(req.connection_key, exc) from exc │
│ 1205 │ │
│ 1206 │ async def _wrap_existing_connection( │
│ 1207 │ │ self, │
╰──────────────────────────────────────────────────────────────────────────────────────────────────────╯
ClientConnectorError: Cannot connect to host api.openai.com:443 ssl:default [None]
The above exception was the direct cause of the following exception:
╭───────────────────────────────── Traceback (most recent call last) ──────────────────────────────────╮
│ /share/CADD/biotools/anaconda3-2023.09/envs/paper-qa/lib/python3.11/site-packages/openai/_base_clien │
│ t.py:1490 in request │
│ │
│ 1487 │ │ │ │
│ 1488 │ │ │ response = None │
│ 1489 │ │ │ try: │
│ ❱ 1490 │ │ │ │ response = await self._client.send( │
│ 1491 │ │ │ │ │ request, │
│ 1492 │ │ │ │ │ stream=stream or self._should_stream_response_body(request=request), │
│ 1493 │ │ │ │ │ **kwargs, │
│ │
│ /fastone/users/shiny/.local/lib/python3.11/site-packages/httpx/_client.py:1629 in send │
│ │
│ 1626 │ │ │
│ 1627 │ │ auth = self._build_request_auth(request, auth) │
│ 1628 │ │ │
│ ❱ 1629 │ │ response = await self._send_handling_auth( │
│ 1630 │ │ │ request, │
│ 1631 │ │ │ auth=auth, │
│ 1632 │ │ │ follow_redirects=follow_redirects, │
│ │
│ /fastone/users/shiny/.local/lib/python3.11/site-packages/httpx/_client.py:1657 in │
│ _send_handling_auth │
│ │
│ 1654 │ │ │ request = await auth_flow.__anext__() │
│ 1655 │ │ │ │
│ 1656 │ │ │ while True: │
│ ❱ 1657 │ │ │ │ response = await self._send_handling_redirects( │
│ 1658 │ │ │ │ │ request, │
│ 1659 │ │ │ │ │ follow_redirects=follow_redirects, │
│ 1660 │ │ │ │ │ history=history, │
│ │
│ /fastone/users/shiny/.local/lib/python3.11/site-packages/httpx/_client.py:1694 in │
│ _send_handling_redirects │
│ │
│ 1691 │ │ │ for hook in self._event_hooks["request"]: │
│ 1692 │ │ │ │ await hook(request) │
│ 1693 │ │ │ │
│ ❱ 1694 │ │ │ response = await self._send_single_request(request) │
│ 1695 │ │ │ try: │
│ 1696 │ │ │ │ for hook in self._event_hooks["response"]: │
│ 1697 │ │ │ │ │ await hook(response) │
│ │
│ /fastone/users/shiny/.local/lib/python3.11/site-packages/httpx/_client.py:1730 in │
│ _send_single_request │
│ │
│ 1727 │ │ │ ) │
│ 1728 │ │ │
│ 1729 │ │ with request_context(request=request): │
│ ❱ 1730 │ │ │ response = await transport.handle_async_request(request) │
│ 1731 │ │ │
│ 1732 │ │ assert isinstance(response.stream, AsyncByteStream) │
│ 1733 │ │ response.request = request │
│ │
│ /share/CADD/biotools/anaconda3-2023.09/envs/paper-qa/lib/python3.11/site-packages/litellm/llms/custo │
│ m_httpx/aiohttp_transport.py:200 in handle_async_request │
│ │
│ 197 │ │ # Use helper to ensure we have a valid session for the current event loop │
│ 198 │ │ client_session = self._get_valid_client_session() │
│ 199 │ │ │
│ ❱ 200 │ │ with map_aiohttp_exceptions(): │
│ 201 │ │ │ try: │
│ 202 │ │ │ │ data = request.content │
│ 203 │ │ │ except httpx.RequestNotRead: │
│ │
│ /share/CADD/biotools/anaconda3-2023.09/envs/paper-qa/lib/python3.11/contextlib.py:158 in __exit__ │
│ │
│ 155 │ │ │ │ # tell if we get the same exception back │
│ 156 │ │ │ │ value = typ() │
│ 157 │ │ │ try: │
│ ❱ 158 │ │ │ │ self.gen.throw(typ, value, traceback) │
│ 159 │ │ │ except StopIteration as exc: │
│ 160 │ │ │ │ # Suppress StopIteration *unless* it's the same exception that │
│ 161 │ │ │ │ # was passed to throw(). This prevents a StopIteration │
│ │
│ /share/CADD/biotools/anaconda3-2023.09/envs/paper-qa/lib/python3.11/site-packages/litellm/llms/custo │
│ m_httpx/aiohttp_transport.py:69 in map_aiohttp_exceptions │
│ │
│ 66 │ │ │ raise │
│ 67 │ │ │
│ 68 │ │ message = str(exc) │
│ ❱ 69 │ │ raise mapped_exc(message) from exc │
│ 70 │
│ 71 │
│ 72 class AiohttpResponseStream(httpx.AsyncByteStream): │
╰──────────────────────────────────────────────────────────────────────────────────────────────────────╯
ConnectError: Cannot connect to host api.openai.com:443 ssl:default [None]
The above exception was the direct cause of the following exception:
╭───────────────────────────────── Traceback (most recent call last) ──────────────────────────────────╮
│ /share/CADD/biotools/anaconda3-2023.09/envs/paper-qa/lib/python3.11/site-packages/litellm/llms/opena │
│ i/openai.py:1116 in aembedding │
│ │
│ 1113 │ │ │ │ max_retries=max_retries, │
│ 1114 │ │ │ │ client=client, │
│ 1115 │ │ │ ) │
│ ❱ 1116 │ │ │ headers, response = await self.make_openai_embedding_request( │
│ 1117 │ │ │ │ openai_aclient=openai_aclient, │
│ 1118 │ │ │ │ data=data, │
│ 1119 │ │ │ │ timeout=timeout, │
│ │
│ /share/CADD/biotools/anaconda3-2023.09/envs/paper-qa/lib/python3.11/site-packages/litellm/litellm_co │
│ re_utils/logging_utils.py:135 in async_wrapper │
│ │
│ 132 │ │ async def async_wrapper(*args, **kwargs): │
│ 133 │ │ │ start_time = datetime.now() │
│ 134 │ │ │ try: │
│ ❱ 135 │ │ │ │ result = await func(*args, **kwargs) │
│ 136 │ │ │ │ return result │
│ 137 │ │ │ finally: │
│ 138 │ │ │ │ end_time = datetime.now() │
│ │
│ /share/CADD/biotools/anaconda3-2023.09/envs/paper-qa/lib/python3.11/site-packages/litellm/llms/opena │
│ i/openai.py:1069 in make_openai_embedding_request │
│ │
│ 1066 │ │ │ response = raw_response.parse() │
│ 1067 │ │ │ return headers, response │
│ 1068 │ │ except Exception as e: │
│ ❱ 1069 │ │ │ raise e │
│ 1070 │ │
│ 1071 │ @track_llm_api_timing() │
│ 1072 │ def make_sync_openai_embedding_request( │
│ │
│ /share/CADD/biotools/anaconda3-2023.09/envs/paper-qa/lib/python3.11/site-packages/litellm/llms/opena │
│ i/openai.py:1062 in make_openai_embedding_request │
│ │
│ 1059 │ │ - call embeddings.create by default │
│ 1060 │ │ """ │
│ 1061 │ │ try: │
│ ❱ 1062 │ │ │ raw_response = await openai_aclient.embeddings.with_raw_response.create( │
│ 1063 │ │ │ │ **data, timeout=timeout │
│ 1064 │ │ │ ) # type: ignore │
│ 1065 │ │ │ headers = dict(raw_response.headers) │
│ │
│ /share/CADD/biotools/anaconda3-2023.09/envs/paper-qa/lib/python3.11/site-packages/openai/_legacy_res │
│ ponse.py:381 in wrapped │
│ │
│ 378 │ │ │
│ 379 │ │ kwargs["extra_headers"] = extra_headers │
│ 380 │ │ │
│ ❱ 381 │ │ return cast(LegacyAPIResponse[R], await func(*args, **kwargs)) │
│ 382 │ │
│ 383 │ return wrapped │
│ 384 │
│ │
│ /share/CADD/biotools/anaconda3-2023.09/envs/paper-qa/lib/python3.11/site-packages/openai/resources/e │
│ mbeddings.py:245 in create │
│ │
│ 242 │ │ │ │
│ 243 │ │ │ return obj │
│ 244 │ │ │
│ ❱ 245 │ │ return await self._post( │
│ 246 │ │ │ "/embeddings", │
│ 247 │ │ │ body=maybe_transform(params, embedding_create_params.EmbeddingCreateParams), │
│ 248 │ │ │ options=make_request_options( │
│ │
│ /share/CADD/biotools/anaconda3-2023.09/envs/paper-qa/lib/python3.11/site-packages/openai/_base_clien │
│ t.py:1748 in post │
│ │
│ 1745 │ │ opts = FinalRequestOptions.construct( │
│ 1746 │ │ │ method="post", url=path, json_data=body, files=await │
│ async_to_httpx_files(files), **options │
│ 1747 │ │ ) │
│ ❱ 1748 │ │ return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls) │
│ 1749 │ │
│ 1750 │ async def patch( │
│ 1751 │ │ self, │
│ │
│ /share/CADD/biotools/anaconda3-2023.09/envs/paper-qa/lib/python3.11/site-packages/openai/_base_clien │
│ t.py:1522 in request │
│ │
│ 1519 │ │ │ │ │ continue │
│ 1520 │ │ │ │ │
│ 1521 │ │ │ │ log.debug("Raising connection error") │
│ ❱ 1522 │ │ │ │ raise APIConnectionError(request=request) from err │
│ 1523 │ │ │ │
│ 1524 │ │ │ log.debug( │
│ 1525 │ │ │ │ 'HTTP Response: %s %s "%i %s" %s', │
╰──────────────────────────────────────────────────────────────────────────────────────────────────────╯
APIConnectionError: Connection error. I'm sure the API is compatible with openAI,because this code works
import os
from litellm import completion
os.environ["OPENAI_API_KEY"] = "***"
messages = [{ "content": "Hello from litellm!","role": "user"}]
response = completion(model="openai/qwen-plus", messages=messages, base_url="https://dashscope.aliyuncs.com/compatible-mode/v1")
print(response.choices[0].message.content)>Hello! How can I assist you today? If you have any questions or need help with the `litellm` library, feel free to ask!dosubot
Metadata
Metadata
Assignees
Labels
bugSomething isn't workingSomething isn't working