Skip to content

Commit

Permalink
Merge pull request #54 from GPTStonks/daedalus/update-llamacpp
Browse files Browse the repository at this point in the history
Bump llama-cpp-python to version 0.2.47
  • Loading branch information
Dedalo314 authored Feb 22, 2024
2 parents b833e76 + 4825bce commit 686440a
Show file tree
Hide file tree
Showing 5 changed files with 2 additions and 7 deletions.
1 change: 0 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,6 @@ Check `http://localhost:8000/docs` once the API is started to access the endpoin
| SEARCH_TOOL_DESCRIPTION | No | None (Default DDG Search description) | DDG's search tool description for the LLM agent. |
| CUSTOM_GPTSTONKS_PREFIX | No | None (Default LangChain agent prefix) | Prefix to use with LLM agent. |
| CUSTOM_GPTSTONKS_SUFFIX | No | None (Default LangChain agent suffix) | Suffix to use with LLm agent. |
| CUSTOM_GPTSTONKS_QA (DEPRECATED) | No | None (LlamaIndex's Default QA Template) | QA template to use with LLM agent QA tools. |
## Contributing 🤝
Expand Down
1 change: 0 additions & 1 deletion gptstonks_api/constants/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
from .env import AUTOLLAMAINDEX_VSI_GDRIVE_URI as AUTOLLAMAINDEX_VSI_GDRIVE_URI
from .env import AUTOLLAMAINDEX_VSI_PATH as AUTOLLAMAINDEX_VSI_PATH
from .env import CUSTOM_GPTSTONKS_PREFIX as CUSTOM_GPTSTONKS_PREFIX
from .env import CUSTOM_GPTSTONKS_QA as CUSTOM_GPTSTONKS_QA
from .env import CUSTOM_GPTSTONKS_SUFFIX as CUSTOM_GPTSTONKS_SUFFIX
from .env import DEBUG_API as DEBUG_API
from .env import LLM_CHAT_MODEL_SYSTEM_MESSAGE as LLM_CHAT_MODEL_SYSTEM_MESSAGE
Expand Down
1 change: 0 additions & 1 deletion gptstonks_api/constants/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,4 +52,3 @@
SEARCH_TOOL_DESCRIPTION: str | None = os.getenv("SEARCH_TOOL_DESCRIPTION")
CUSTOM_GPTSTONKS_PREFIX: str | None = os.getenv("CUSTOM_GPTSTONKS_PREFIX")
CUSTOM_GPTSTONKS_SUFFIX: str | None = os.getenv("CUSTOM_GPTSTONKS_SUFFIX")
CUSTOM_GPTSTONKS_QA: str = os.getenv("CUSTOM_GPTSTONKS_QA", DEFAULT_TEXT_QA_PROMPT_TMPL)
2 changes: 0 additions & 2 deletions gptstonks_api/utils/openbb_chat_qa.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,6 @@
from openbb_chat.kernels.auto_llama_index import AutoLlamaIndex
from requests.exceptions import ReadTimeout

from ..constants import CUSTOM_GPTSTONKS_QA


async def get_openbb_chat_output(
query_str: str,
Expand Down
4 changes: 2 additions & 2 deletions pdm.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit 686440a

Please sign in to comment.