Skip to content

Commit

Permalink
add: zhipuai method
Browse files Browse the repository at this point in the history
  • Loading branch information
Lin-jun-xiang committed Jan 2, 2025
1 parent e130139 commit 5be08b9
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 5 deletions.
1 change: 1 addition & 0 deletions .github/workflows/translate-readme.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,4 +17,5 @@ jobs:
uses: ./
with:
token: ${{ secrets.Action_Bot }} # Your token
zhipuai: ${{ secrets.zhipuai_api_key }} # Your zhipuai api key if use zhipuai instead of g4f
langs: "en,zh-TW,zh-CN,French,Arabic" # You can define any langs
2 changes: 2 additions & 0 deletions action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ runs:
with:
fetch-depth: 3
token: ${{ inputs.token }}
zhipuai: ${{ inputs.zhipuai }}

- name: Set Up Python
uses: actions/setup-python@v3
Expand All @@ -35,6 +36,7 @@ runs:
run: |
pip install PyExecJS
pip install -U g4f
pip install -U zhipuai
pip install --upgrade tenacity
- name: Translate README
Expand Down
18 changes: 13 additions & 5 deletions translation.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,12 @@

import g4f
from tenacity import retry, stop_after_attempt
from zhipuai import ZhipuAI

g4f.debug.logging = True

LAGNS = os.environ.get('LANGS').split(',')

ZHIPUAI_API_KEY = os.environ.get('ZHIPUAI_API_KEY', '')

def run_shell_command(command: str) -> tuple:
result = subprocess.run(
Expand All @@ -24,10 +25,17 @@ def run_shell_command(command: str) -> tuple:

@retry(stop=stop_after_attempt(15))
async def chat_completion(query: str) -> str:
response = await g4f.ChatCompletion.create_async(
model="gpt-4o",
messages=[{"role": "user", "content": query}],
)
response = ''
if not ZHIPUAI_API_KEY:
response = await g4f.ChatCompletion.create_async(
model="gpt-4o",
messages=[{"role": "user", "content": query}],
)
else:
response = ZhipuAI(api_key=ZHIPUAI_API_KEY).chat.asyncCompletions.create(
model="glm-4-plus",
messages=[{"role": "user", "content": query}]
)
if response == '' or response is None:
raise Exception
return response
Expand Down

0 comments on commit 5be08b9

Please sign in to comment.