From 4a08f3b9fbc79e8cf92775a95b8361a943e79d24 Mon Sep 17 00:00:00 2001 From: fate-ubw <2474113608@qq.com> Date: Fri, 23 Aug 2024 02:43:01 +0000 Subject: [PATCH] [docs] update readme --- .../{auto_eval_ALCE.py => auto_eval_ALCE.txt} | 0 .../auto_run-factscore.txt | 48 +++++++++---------- readme.md | 8 ++-- 3 files changed, 28 insertions(+), 28 deletions(-) rename auto_gpu_scheduling_scripts/{auto_eval_ALCE.py => auto_eval_ALCE.txt} (100%) diff --git a/auto_gpu_scheduling_scripts/auto_eval_ALCE.py b/auto_gpu_scheduling_scripts/auto_eval_ALCE.txt similarity index 100% rename from auto_gpu_scheduling_scripts/auto_eval_ALCE.py rename to auto_gpu_scheduling_scripts/auto_eval_ALCE.txt diff --git a/auto_gpu_scheduling_scripts/auto_run-factscore.txt b/auto_gpu_scheduling_scripts/auto_run-factscore.txt index b378e6d..53f1f64 100644 --- a/auto_gpu_scheduling_scripts/auto_run-factscore.txt +++ b/auto_gpu_scheduling_scripts/auto_run-factscore.txt @@ -1,24 +1,24 @@ -sh run-raglab_result/2-eval_fact-raglab-active_rag-Llama3_70B_baseline-GPT.sh -sh run-raglab_result/2-eval_fact-raglab-active_rag-Llama3_8B_baseline-GPT.sh -sh run-raglab_result/2-eval_fact-raglab-active_rag-gpt3.5_baseline-GPT.sh -sh run-raglab_result/2-eval_fact-raglab-direct_llm-Llama3_70B_baseline-GPT.sh -sh run-raglab_result/2-eval_fact-raglab-direct_llm-Llama3_8B_baseline-GPT.sh -sh run-raglab_result/2-eval_fact-raglab-direct_llm-gpt3.5_baseline-GPT.sh -sh run-raglab_result/2-eval_fact-raglab-iter_retgen-Llama3_70B_baseline-GPT.sh -sh run-raglab_result/2-eval_fact-raglab-iter_retgen-Llama3_8B_baseline-GPT.sh -sh run-raglab_result/2-eval_fact-raglab-iter_retgen-gpt3.5_baseline-GPT.sh -sh run-raglab_result/2-eval_fact-raglab-naive_rag-Llama3_70B_baseline-GPT.sh -sh run-raglab_result/2-eval_fact-raglab-naive_rag-Llama3_8B_baseline-GPT.sh -sh run-raglab_result/2-eval_fact-raglab-naive_rag-gpt3.5_baseline-GPT.sh -sh run-raglab_result/2-eval_fact-raglab-query_rewrite_rag-Llama3_70B_baseline-GPT.sh -sh run-raglab_result/2-eval_fact-raglab-query_rewrite_rag-Llama3_8B_baseline-GPT.sh -sh run-raglab_result/2-eval_fact-raglab-query_rewrite_rag-gpt3.5_baseline-GPT.sh -sh run-raglab_result/2-eval_fact-raglab-self_ask-Llama3_70B_baseline-GPT.sh -sh run-raglab_result/2-eval_fact-raglab-self_ask-Llama3_8B_baseline-GPT.sh -sh run-raglab_result/2-eval_fact-raglab-self_ask-gpt3.5_baseline-GPT.sh -sh run-raglab_result/2-eval_fact-raglab-selfrag-selfrag_70B-adaptive_retrieval-GPT.sh -sh run-raglab_result/2-eval_fact-raglab-selfrag-selfrag_70B-always_retrieval-GPT.sh -sh run-raglab_result/2-eval_fact-raglab-selfrag-selfrag_70B-no_retrieval-GPT.sh -sh run-raglab_result/2-eval_fact-raglab-selfrag-selfrag_8B-adaptive_retrieval-GPT.sh -sh run-raglab_result/2-eval_fact-raglab-selfrag-selfrag_8B-always_retrieval-GPT.sh -sh run-raglab_result/2-eval_fact-raglab-selfrag-selfrag_8B-no_retrieval-GPT.sh \ No newline at end of file +sh ./run/Factscore/2-eval_fact-raglab-active_rag-Llama3_70B_baseline-GPT.sh +sh ./run/Factscore/2-eval_fact-raglab-active_rag-Llama3_8B_baseline-GPT.sh +sh ./run/Factscore/2-eval_fact-raglab-active_rag-gpt3.5_baseline-GPT.sh +sh ./run/Factscore/2-eval_fact-raglab-direct_llm-Llama3_70B_baseline-GPT.sh +sh ./run/Factscore/2-eval_fact-raglab-direct_llm-Llama3_8B_baseline-GPT.sh +sh ./run/Factscore/2-eval_fact-raglab-direct_llm-gpt3.5_baseline-GPT.sh +sh ./run/Factscore/2-eval_fact-raglab-iter_retgen-Llama3_70B_baseline-GPT.sh +sh ./run/Factscore/2-eval_fact-raglab-iter_retgen-Llama3_8B_baseline-GPT.sh +sh ./run/Factscore/2-eval_fact-raglab-iter_retgen-gpt3.5_baseline-GPT.sh +sh ./run/Factscore/2-eval_fact-raglab-naive_rag-Llama3_70B_baseline-GPT.sh +sh ./run/Factscore/2-eval_fact-raglab-naive_rag-Llama3_8B_baseline-GPT.sh +sh ./run/Factscore/2-eval_fact-raglab-naive_rag-gpt3.5_baseline-GPT.sh +sh ./run/Factscore/2-eval_fact-raglab-query_rewrite_rag-Llama3_70B_baseline-GPT.sh +sh ./run/Factscore/2-eval_fact-raglab-query_rewrite_rag-Llama3_8B_baseline-GPT.sh +sh ./run/Factscore/2-eval_fact-raglab-query_rewrite_rag-gpt3.5_baseline-GPT.sh +sh ./run/Factscore/2-eval_fact-raglab-self_ask-Llama3_70B_baseline-GPT.sh +sh ./run/Factscore/2-eval_fact-raglab-self_ask-Llama3_8B_baseline-GPT.sh +sh ./run/Factscore/2-eval_fact-raglab-self_ask-gpt3.5_baseline-GPT.sh +sh ./run/Factscore/2-eval_fact-raglab-selfrag-selfrag_70B-adaptive_retrieval-GPT.sh +sh ./run/Factscore/2-eval_fact-raglab-selfrag-selfrag_70B-always_retrieval-GPT.sh +sh ./run/Factscore/2-eval_fact-raglab-selfrag-selfrag_70B-no_retrieval-GPT.sh +sh ./run/Factscore/2-eval_fact-raglab-selfrag-selfrag_8B-adaptive_retrieval-GPT.sh +sh ./run/Factscore/2-eval_fact-raglab-selfrag-selfrag_8B-always_retrieval-GPT.sh +sh ./run/Factscore/2-eval_fact-raglab-selfrag-selfrag_8B-no_retrieval-GPT.sh \ No newline at end of file diff --git a/readme.md b/readme.md index 898f817..27b90e8 100644 --- a/readme.md +++ b/readme.md @@ -107,8 +107,8 @@ ## Setup colbert server - All algorithms integrated in raglab include two modes: `interact` and `evaluation`. The test stage demonstrates in `interact` mode, just for demostration and eduction 🤗. -> [!NOTE] -> - Due to colbert's requirement for absolute paths, you need to modify the index_dbPath and text_dbPath in the config file to use absolute paths. + > [!NOTE] + > - Due to colbert's requirement for absolute paths, you need to modify the index_dbPath and text_dbPath in the config file to use absolute paths. - Modify the `index_dbPath` and `text_dbPath` in config file:[colbert_server-10samples.yaml](./config/colbert_server/colbert_server-10samples.yaml) ~~~bash index_dbPath: /your_root_path/RAGLAB/data/retrieval/colbertv2.0_embedding/wiki2023-10samples @@ -119,8 +119,8 @@ cd RAGLAB sh run/colbert_server/colbert_server-10samples.sh ~~~ -> [!NOTE] -> - At this point, colbert embedding will prompt that due to path errors, colbert embedding needs to be reprocessed. Please enter `yes` and then raglab will automatically help you process the embedding and start the colbert server. + > [!NOTE] + > - At this point, colbert embedding will prompt that due to path errors, colbert embedding needs to be reprocessed. Please enter `yes` and then raglab will automatically help you process the embedding and start the colbert server. - Now please open another terminal and try to request the colbert server ~~~bash cd RAGLAB