4
4
import time
5
5
import subprocess
6
6
import warnings
7
+ import importlib .resources
7
8
8
9
from llama_index .llms .ollama import Ollama
9
10
from llama_index .core .llms import ChatMessage
@@ -14,9 +15,28 @@ def ollama_version():
14
15
if result .startswith ("ollama version " ):
15
16
return result .replace ("ollama version " , "" )
16
17
except Exception as e :
17
- warnings . warn ( str ( e ))
18
+ pass
18
19
return ""
19
20
21
+ def run_sh (script_string ):
22
+ try :
23
+ process = subprocess .Popen (
24
+ ['sh' ],
25
+ stdin = subprocess .PIPE ,
26
+ stdout = subprocess .PIPE ,
27
+ stderr = subprocess .PIPE ,
28
+ text = True # Treat input/output as text
29
+ )
30
+ output , error = process .communicate (input = script_string )
31
+
32
+ if process .returncode == 0 :
33
+ return output
34
+ else :
35
+ return error
36
+
37
+ except Exception as e :
38
+ return str (e )
39
+
20
40
class Assistant (object ):
21
41
def __init__ (self , ** kwargs ):
22
42
self .host = kwargs .get ("host" , "http://localhost" )
@@ -42,18 +62,20 @@ def load_model(self):
42
62
if return_code != 0 :
43
63
raise Exception ("Cannot install lshw." )
44
64
45
- return_code = os .system ("curl -fsSL https://ollama.com/install.sh | sh" )
46
- if return_code != 0 :
47
- raise Exception ("Cannot install ollama." )
48
-
49
- return_code = os .system ("sudo systemctl enable ollama" )
50
- if return_code != 0 :
51
- raise Exception ("Cannot enable ollama." )
65
+ resource_path = 'ollama_install.sh'
66
+ with importlib .resources .open_text ('text2text.utils' , resource_path ) as f :
67
+ install_script = f .read ()
68
+ result = run_sh (install_script )
69
+ if "Install complete." not in result and "will run in CPU-only mode." not in result :
70
+ raise Exception (result )
52
71
53
72
self .ollama_serve_proc = subprocess .Popen (["ollama" , "serve" ])
54
73
time .sleep (1 )
55
74
56
- result = subprocess .check_output (["ollama" , "-v" ], stderr = subprocess .STDOUT ).decode ("utf-8" )
75
+ result = subprocess .check_output (
76
+ ["ollama" , "-v" ],
77
+ stderr = subprocess .STDOUT
78
+ ).decode ("utf-8" )
57
79
if not result .startswith ("ollama version" ):
58
80
raise Exception (result )
59
81
0 commit comments