From 54a2b9c7eb6ca126eaa63016b564e3680fd0c0c6 Mon Sep 17 00:00:00 2001 From: KillianLucas <63927363+KillianLucas@users.noreply.github.com> Date: Mon, 28 Aug 2023 21:04:31 +0000 Subject: [PATCH] Code Llama --- interpreter/llama_2.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/interpreter/llama_2.py b/interpreter/llama_2.py index abfc6f42a1..ce54af6389 100644 --- a/interpreter/llama_2.py +++ b/interpreter/llama_2.py @@ -176,8 +176,11 @@ def supports_metal(): return llama_2 def confirm_action(message): - # Print message with newlines on either side (aesthetic choice) - print('', Markdown(f"{message} (y/n)"), '') - response = input().strip().lower() - print('') # <- Aesthetic choice - return response == 'y' \ No newline at end of file + question = [ + inquirer.Confirm('confirm', + message=message, + default=True), + ] + + answers = inquirer.prompt(question) + return answers['confirm'] \ No newline at end of file