From 06959de34e8010c3553b3a0984d040fc936f5920 Mon Sep 17 00:00:00 2001 From: huyiwen <1020030101@qq.com> Date: Tue, 16 Apr 2024 22:06:10 +0800 Subject: [PATCH] Update requirements --- requirements.txt | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/requirements.txt b/requirements.txt index aec72aad..685dadd6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,22 +2,30 @@ torch transformers safetensors tokenizers -openai==0.28.1 -tqdm>=4.58.0 -tiktoken>=0.5.0 datasets>=2.16.1 accelerate coloredlogs +tqdm>=4.58.0 + +# Efficient inference +packaging +vllm +flash-attn # https://github.com/Dao-AILab/flash-attention/issues/453#issuecomment-1692867770 + +# API Models +anthropic +dashscope +qianfan +openai==0.28.1 +tiktoken>=0.5.0 + +# Metrics nltk sacrebleu rouge_score -vllm -flash-attn langcodes language_data -anthropic google-api-python-client immutabledict langdetect -dashscope -qianfan +