You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
I use pip install llama2-wrapper and run python -m llama2_wrapper.server --model_path /home/wcc/codellama/CodeLlama-7b, but it cause this error:
"/home/wcc/miniconda3/envs/codellama/lib/python3.10/site-packages/pydantic/_internal/fields.py:127: UserWarning: Field "model_path" has conflict with protected namespace "model".
You may be able to resolve this warning by setting model_config['protected_namespaces'] = ('settings_',).
warnings.warn(
Running on backend llama.cpp.
Traceback (most recent call last):
File "/home/wcc/miniconda3/envs/codellama/lib/python3.10/runpy.py", line 196, in _run_module_as_main
return _run_code(code, main_globals, None,
File "/home/wcc/miniconda3/envs/codellama/lib/python3.10/runpy.py", line 86, in _run_code
exec(code, run_globals)
File "/home/wcc/miniconda3/envs/codellama/lib/python3.10/site-packages/llama2_wrapper/server/main.py", line 40, in
app = create_app(settings=settings)
File "/home/wcc/miniconda3/envs/codellama/lib/python3.10/site-packages/llama2_wrapper/server/app.py", line 254, in create_app
llama2 = LLAMA2_WRAPPER(
File "/home/wcc/miniconda3/envs/codellama/lib/python3.10/site-packages/llama2_wrapper/model.py", line 99, in init
self.init_model()
File "/home/wcc/miniconda3/envs/codellama/lib/python3.10/site-packages/llama2_wrapper/model.py", line 103, in init_model
self.model = LLAMA2_WRAPPER.create_llama2_model(
File "/home/wcc/miniconda3/envs/codellama/lib/python3.10/site-packages/llama2_wrapper/model.py", line 125, in create_llama2_model
model = Llama(
File "/home/wcc/miniconda3/envs/codellama/lib/python3.10/site-packages/llama_cpp/llama.py", line 349, in init
self._model = _LlamaModel(
File "/home/wcc/miniconda3/envs/codellama/lib/python3.10/site-packages/llama_cpp/_internals.py", line 57, in init
raise ValueError(f"Failed to load model from file: {path_model}")
ValueError: Failed to load model from file: /home/wcc/codellama/CodeLlama-7b"
The text was updated successfully, but these errors were encountered:
I use
pip install llama2-wrapper
and runpython -m llama2_wrapper.server --model_path /home/wcc/codellama/CodeLlama-7b
, but it cause this error:"/home/wcc/miniconda3/envs/codellama/lib/python3.10/site-packages/pydantic/_internal/fields.py:127: UserWarning: Field "model_path" has conflict with protected namespace "model".
You may be able to resolve this warning by setting
model_config['protected_namespaces'] = ('settings_',)
.warnings.warn(
Running on backend llama.cpp.
Traceback (most recent call last):
File "/home/wcc/miniconda3/envs/codellama/lib/python3.10/runpy.py", line 196, in _run_module_as_main
return _run_code(code, main_globals, None,
File "/home/wcc/miniconda3/envs/codellama/lib/python3.10/runpy.py", line 86, in _run_code
exec(code, run_globals)
File "/home/wcc/miniconda3/envs/codellama/lib/python3.10/site-packages/llama2_wrapper/server/main.py", line 40, in
app = create_app(settings=settings)
File "/home/wcc/miniconda3/envs/codellama/lib/python3.10/site-packages/llama2_wrapper/server/app.py", line 254, in create_app
llama2 = LLAMA2_WRAPPER(
File "/home/wcc/miniconda3/envs/codellama/lib/python3.10/site-packages/llama2_wrapper/model.py", line 99, in init
self.init_model()
File "/home/wcc/miniconda3/envs/codellama/lib/python3.10/site-packages/llama2_wrapper/model.py", line 103, in init_model
self.model = LLAMA2_WRAPPER.create_llama2_model(
File "/home/wcc/miniconda3/envs/codellama/lib/python3.10/site-packages/llama2_wrapper/model.py", line 125, in create_llama2_model
model = Llama(
File "/home/wcc/miniconda3/envs/codellama/lib/python3.10/site-packages/llama_cpp/llama.py", line 349, in init
self._model = _LlamaModel(
File "/home/wcc/miniconda3/envs/codellama/lib/python3.10/site-packages/llama_cpp/_internals.py", line 57, in init
raise ValueError(f"Failed to load model from file: {path_model}")
ValueError: Failed to load model from file: /home/wcc/codellama/CodeLlama-7b"
The text was updated successfully, but these errors were encountered: