Skip to content

Commit

Permalink
Fix test_qwen2.py
Browse files Browse the repository at this point in the history
  • Loading branch information
icppWorld committed Sep 28, 2024
1 parent d884295 commit 7ac41f7
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 7 deletions.
10 changes: 5 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -170,13 +170,9 @@ WARNING: Currently, the canister can only be build on a `mac` !
},
)
########################################################
# NOTE: This is the equivalent llama-cli call, when running llama.cpp locally
./llama-cli -m /models/Qwen/Qwen2.5-0.5B-Instruct-GGUF/qwen2.5-0.5b-instruct-q8_0.gguf -sp -p "<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n<|im_start|>user\ngive me a short introduction to LLMs.<|im_end|>\n<|im_start|>assistant\n" -fa -ngl 80 -n 512 --prompt-cache prompt.cache --prompt-cache-all
```
########################################
# Tip. Add this to the args vec if you #
# want to see how many tokens the #
Expand All @@ -186,6 +182,9 @@ WARNING: Currently, the canister can only be build on a `mac` !
# ;"--print-token-count"; "1" #
########################################
```
- Deployed to mainnet at canister: 6uwoh-vaaaa-aaaag-amema-cai
To be able to upload the model, I had to change the [compute allocation](https://internetcomputer.org/docs/current/developer-docs/smart-contracts/maintain/settings#compute-allocation)
Expand Down Expand Up @@ -270,4 +269,5 @@ WARNING: Currently, the canister can only be build on a `mac` !
# hits the instruction limit #
# #
# ;"--print-token-count"; "1" #
########################################
########################################
```
4 changes: 2 additions & 2 deletions test/test_qwen2.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def test__run_update_1(network: str) -> None:
canister_argument='(record { args = vec {"--prompt-cache"; "my_cache/prompt.cache"; "--prompt-cache-all"; "-sp"; "-n"; "512"; "-p"; "<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n<|im_start|>user\nExplain Large Language Models.<|im_end|>\n<|im_start|>assistant\n"} })',
network=network,
)
expected_response = '(variant { Ok = record { output = ""; conversation = "<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n<|im_start|>"; error = ""; status_code = 200 : nat16; prompt_remaining = "user\nExplain Large Language Models.<|im_end|>\n<|im_start|>assistant\n"; generated_eog = false;} })'
expected_response = '(variant { Ok = record { output = ""; conversation = "<|im_start|>system\\nYou are a helpful assistant.<|im_end|>\\n<|im_start|>"; error = ""; status_code = 200 : nat16; prompt_remaining = "user\\nExplain Large Language Models.<|im_end|>\\n<|im_start|>assistant\\n"; generated_eog = false;} })'
assert expected_response == response

def test__run_update_2(network: str) -> None:
Expand All @@ -89,7 +89,7 @@ def test__run_update_2(network: str) -> None:
canister_argument='(record { args = vec {"--prompt-cache"; "my_cache/prompt.cache"; "--prompt-cache-all"; "-sp"; "-n"; "512"; "-p"; "<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n<|im_start|>user\nExplain Large Language Models.<|im_end|>\n<|im_start|>assistant\n"} })',
network=network,
)
expected_response = '(variant { Ok = record { status_code = 200 : nat16; error = ""; output = ""; input = "<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n<|im_start|>user\nExplain Large Language Models.<|im_end|>\n<|im_start|>assistant"; prompt_remaining = "\n";} generated_eog=false : bool})'
expected_response = '(variant { Ok = record { output = ""; conversation = "<|im_start|>system\\nYou are a helpful assistant.<|im_end|>\\n<|im_start|>user\\nExplain Large Language Models.<|im_end|>\\n<|im_start|>assistant"; error = ""; status_code = 200 : nat16; prompt_remaining = "\\n"; generated_eog = false;} })'
assert expected_response == response

def test__run_update_3(network: str) -> None:
Expand Down

0 comments on commit 7ac41f7

Please sign in to comment.