forked from OpenInterpreter/open-interpreter
-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
21abd56
commit 7fa1fd9
Showing
8 changed files
with
157 additions
and
43 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Binary file not shown.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,41 @@ | ||
import os | ||
import time | ||
import subprocess | ||
|
||
# Define the file name to search for | ||
file_name = "llama-2-13b-chat.ggmlv3.q4_0.bin" | ||
|
||
# Start the timer | ||
start_time = time.time() | ||
|
||
# Check for the file in each path | ||
for path in [os.path.expanduser("~"), os.getcwd()]: | ||
print(f"Searching for Llama-2 in {path} ...") | ||
for root, _, files in os.walk(path): | ||
if time.time() - start_time > 5: | ||
print("Search timed out after 5 seconds.") | ||
break | ||
if file_name in files: | ||
model_path = os.path.join(root, file_name) | ||
print(f"Found Llama-2 at {model_path}") | ||
break | ||
else: | ||
continue | ||
break | ||
else: | ||
# If the file was not found, download it | ||
download_path = os.path.expanduser("~") + "/llama-2/" + file_name | ||
print(f"Llama-2 not found. Downloading it to {download_path} ...") | ||
url = "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q4_0.bin" | ||
subprocess.run(f"curl -L '{url}' -o {download_path}", shell=True) | ||
model_path = download_path | ||
|
||
try: | ||
from llama_cpp import Llama | ||
except: | ||
print("Downloading Llama-2 interface (llama-cpp-python)...") | ||
subprocess.run(["pip", "install", "llama-cpp-python"]) | ||
from llama_cpp import Llama | ||
|
||
# Initialize Llama-2 | ||
llama_2 = Llama(model_path=model_path) |
Binary file added
BIN
+17.4 MB
numpy-1.25.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
Binary file not shown.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Binary file not shown.