From 52b9b9b56af4db99ef95288e2c2b562057cffdc5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=C3=A1=C5=A1=20Pazdiora?= Date: Sun, 9 Apr 2023 21:20:39 +0200 Subject: [PATCH] bugfix --- examples/main/main.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 1684bec9234a1b..3c61c7d9beacc5 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -85,7 +85,7 @@ int main(int argc, char ** argv) { params.prompt = gpt_random_prompt(rng); } - bool instruct_mode = params.instruct_prefix.empty() && params.instruct_suffix.empty(); + bool instruct_mode = !params.instruct_prefix.empty() || !params.instruct_suffix.empty(); // params.prompt = R"(// this function checks if the number n is prime //bool is_prime(int n) {)"; @@ -327,7 +327,7 @@ int main(int argc, char ** argv) { } // replace end of text token with newline token when in interactive mode - if (id == llama_token_eos() && params.interactive && instruct_mode) { + if (id == llama_token_eos() && params.interactive && !instruct_mode) { id = llama_token_newline.front(); if (params.antiprompt.size() != 0) { // tokenize and inject first reverse prompt