@@ -5,7 +5,6 @@ def check_proxy(proxies):
5
5
try :
6
6
response = requests .get ("https://ipapi.co/json/" , proxies = proxies , timeout = 4 )
7
7
data = response .json ()
8
- # print(f'查询代理的地理位置,返回的结果是{data}')
9
8
if 'country_name' in data :
10
9
country = data ['country_name' ]
11
10
result = f"代理配置 { proxies_https } , 代理所在地:{ country } "
@@ -47,8 +46,8 @@ def backup_and_download(current_version, remote_version):
47
46
os .makedirs (new_version_dir )
48
47
shutil .copytree ('./' , backup_dir , ignore = lambda x , y : ['history' ])
49
48
proxies = get_conf ('proxies' )
50
- r = requests .get (
51
- 'https://github.com/binary-husky/chatgpt_academic/archive/refs/heads /master.zip' , proxies = proxies , stream = True )
49
+ try : r = requests .get ('https://github.com/binary-husky/chatgpt_academic/archive/refs/heads/master.zip' , proxies = proxies , stream = True )
50
+ except : r = requests . get ( 'https://public.gpt-academic.top/publish /master.zip' , proxies = proxies , stream = True )
52
51
zip_file_path = backup_dir + '/master.zip'
53
52
with open (zip_file_path , 'wb+' ) as f :
54
53
f .write (r .content )
@@ -111,11 +110,10 @@ def auto_update(raise_error=False):
111
110
try :
112
111
from toolbox import get_conf
113
112
import requests
114
- import time
115
113
import json
116
114
proxies = get_conf ('proxies' )
117
- response = requests .get (
118
- "https://raw.githubusercontent.com/binary-husky/chatgpt_academic/master /version" , proxies = proxies , timeout = 5 )
115
+ try : response = requests .get ("https://raw.githubusercontent.com/binary-husky/chatgpt_academic/master/version" , proxies = proxies , timeout = 5 )
116
+ except : response = requests . get ( "https://public.gpt-academic.top/publish /version" , proxies = proxies , timeout = 5 )
119
117
remote_json_data = json .loads (response .text )
120
118
remote_version = remote_json_data ['version' ]
121
119
if remote_json_data ["show_feature" ]:
@@ -127,8 +125,7 @@ def auto_update(raise_error=False):
127
125
current_version = json .loads (current_version )['version' ]
128
126
if (remote_version - current_version ) >= 0.01 - 1e-5 :
129
127
from colorful import print亮黄
130
- print亮黄 (
131
- f'\n 新版本可用。新版本:{ remote_version } ,当前版本:{ current_version } 。{ new_feature } ' )
128
+ print亮黄 (f'\n 新版本可用。新版本:{ remote_version } ,当前版本:{ current_version } 。{ new_feature } ' )
132
129
print ('(1)Github更新地址:\n https://github.com/binary-husky/chatgpt_academic\n ' )
133
130
user_instruction = input ('(2)是否一键更新代码(Y+回车=确认,输入其他/无输入+回车=不更新)?' )
134
131
if user_instruction in ['Y' , 'y' ]:
@@ -154,7 +151,7 @@ def auto_update(raise_error=False):
154
151
print (msg )
155
152
156
153
def warm_up_modules ():
157
- print ('正在执行一些模块的预热...' )
154
+ print ('正在执行一些模块的预热 ...' )
158
155
from toolbox import ProxyNetworkActivate
159
156
from request_llms .bridge_all import model_info
160
157
with ProxyNetworkActivate ("Warmup_Modules" ):
0 commit comments