From bf132427d82b460f3ab4049b6da1be0db4dc7feb Mon Sep 17 00:00:00 2001 From: Yingbei Tong Date: Sat, 15 Jun 2024 00:04:14 +0000 Subject: [PATCH] Sync (#6) * update1 * add key parser tool * add utils fix * jsonrepair * update notebook --- .gitignore | 4 + Makefile | 2 +- examples/server/function-call-parser.hpp | 147 +++++ examples/server/function-call.hpp | 325 ++++++++++ examples/server/utils.hpp | 59 +- jsonrepair.ts | 23 + test_llamacpp.ipynb | 743 +++++++++++++++++++++++ 7 files changed, 1290 insertions(+), 13 deletions(-) create mode 100644 examples/server/function-call-parser.hpp create mode 100644 examples/server/function-call.hpp create mode 100644 jsonrepair.ts create mode 100644 test_llamacpp.ipynb diff --git a/.gitignore b/.gitignore index 5296594952c4a..36d06b41d5ab9 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,7 @@ +*package-lock.json +*package.json +*node_modules +*.ipynb *.o *.a *.so diff --git a/Makefile b/Makefile index 744fe5739e95c..8459f91c0d2a2 100644 --- a/Makefile +++ b/Makefile @@ -897,7 +897,7 @@ llama-save-load-state: examples/save-load-state/save-load-state.cpp ggml.o llama $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -llama-server: examples/server/server.cpp examples/server/utils.hpp examples/server/httplib.h common/json.hpp examples/server/colorthemes.css.hpp examples/server/style.css.hpp examples/server/theme-beeninorder.css.hpp examples/server/theme-ketivah.css.hpp examples/server/theme-mangotango.css.hpp examples/server/theme-playground.css.hpp examples/server/theme-polarnight.css.hpp examples/server/theme-snowstorm.css.hpp examples/server/index.html.hpp examples/server/index-new.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp examples/server/system-prompts.js.hpp examples/server/prompt-formats.js.hpp examples/server/json-schema-to-grammar.mjs.hpp common/stb_image.h ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) +llama-server: examples/server/server.cpp examples/server/utils.hpp examples/server/function-call-parser.hpp examples/server/function-call.hpp examples/server/httplib.h common/json.hpp examples/server/colorthemes.css.hpp examples/server/style.css.hpp examples/server/theme-beeninorder.css.hpp examples/server/theme-ketivah.css.hpp examples/server/theme-mangotango.css.hpp examples/server/theme-playground.css.hpp examples/server/theme-polarnight.css.hpp examples/server/theme-snowstorm.css.hpp examples/server/index.html.hpp examples/server/index-new.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp examples/server/system-prompts.js.hpp examples/server/prompt-formats.js.hpp examples/server/json-schema-to-grammar.mjs.hpp common/stb_image.h ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h %.hpp $<,$^) -Iexamples/server $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) $(LWINSOCK2) diff --git a/examples/server/function-call-parser.hpp b/examples/server/function-call-parser.hpp new file mode 100644 index 0000000000000..a9e8cc297910c --- /dev/null +++ b/examples/server/function-call-parser.hpp @@ -0,0 +1,147 @@ +#include +#include +#include "json.hpp" +#include +#include + +using json = nlohmann::ordered_json; + + +std::string generate_uuid() { + static std::random_device rd; + static std::mt19937 generator(rd()); + static std::uniform_int_distribution distribution(0, 15); + + const char *v = "0123456789abcdef"; + std::stringstream uuid; + + for (int i = 0; i < 8; ++i) { + uuid << v[distribution(generator)]; + } + return uuid.str(); +} + + +std::string jsonrepair(const std::string value) { + std::array buffer; + std::string result; + // Ensure the command passed to popen() is null-terminated + std::string tmpfile_name = "." + generate_uuid() + ".json"; + std::ofstream outfile(tmpfile_name); + outfile << value; // Assuming jsonStr contains your JSON string + outfile.close(); + std::string command = "node jsonrepair.ts " + tmpfile_name; + std::unique_ptr pipe(popen(command.c_str(), "r"), pclose); + if (!pipe) { + throw std::runtime_error("popen() failed!"); + } + while (fgets(buffer.data(), buffer.size(), pipe.get()) != nullptr) { + result += buffer.data(); + } + return result; +} + + +json parse_if_json(const std::string& value) { + try { + // json repair here + return json::parse(jsonrepair(value)); + } catch (const json::parse_error&) { + return value; // Return the original string if parsing fails + } +} + + +std::string clean_command_string(const std::string& command_str) { + std::string cleaned_command = std::regex_replace(command_str, std::regex(R"(\\(?!["\\/bfnrt]|u[a-fA-F0-9]{4}))"), ""); + cleaned_command = std::regex_replace(cleaned_command, std::regex(R"(\\")"), "\""); + + if (cleaned_command.front() == '"' && cleaned_command.back() == '"') { + cleaned_command = cleaned_command.substr(1, cleaned_command.size() - 2); + } + return cleaned_command; +} + + +json clean_json_strings(const std::string& input_str) { + try { + // json repair here + std::string fixed_str = jsonrepair(input_str); + json data = json::parse(fixed_str); + + for (auto& [key, value] : data.items()) { + if (value.is_string()) { + std::string val = value.get(); + if (val.front() == '{' || val.front() == '[') { + data[key] = parse_if_json(val); + } else { + data[key] = clean_command_string(val); + } + } else if (value.is_object()) { + for (auto& [k, v] : value.items()) { + if (v.is_string()) { + v = clean_command_string(v.get()); + } + } + } + } + return data; + } catch (const json::parse_error& e) { + std::cout << "Error decoding JSON: " << e.what() << std::endl; + return nullptr; + } +} + + + + +std::vector rubra_fc_json_tool_extractor(const std::string& output_str) { + std::vector result; + printf("OUTPUT STR TO BE PARSED : %s", output_str.c_str()); + if (output_str.find("endtoolcall") == std::string::npos) { + return result; + } + + std::vector listOfStrToParse; + size_t start = 0, end = 0; + + // Iterate until all instances of "endtoolcall" are processed + while ((end = output_str.find("endtoolcall", start)) != std::string::npos) { + std::string segment = output_str.substr(start, end - start); + size_t pos = segment.find("starttoolcall"); + if (pos != std::string::npos) { + // Extract substring after "toolcall" + listOfStrToParse.push_back(segment.substr(pos + std::string("starttoolcall").length())); + } + start = end + std::string("endtoolcall").length(); // Move past the "endtoolcall" + } + + std::vector function_call_json; + + try { + for (const auto & line : listOfStrToParse) { + // json fc = json::parse(line); + json fc = clean_json_strings(line); + if (fc["arguments"].is_string()) { + fc["arguments"] = json::parse(fc["arguments"].get()); + } + if (!fc.is_null()) { + function_call_json.push_back(fc); + } + + } + } catch (const std::exception& e) { + std::cerr << "Error: " << e.what() << std::endl; + } + + for (const auto& fc : function_call_json) { + json func_call; + func_call["id"] = generate_uuid(); + func_call["name"] = fc["name"]; + func_call["kwargs"] = fc["arguments"]; + func_call["type"] = "function"; + result.push_back(func_call); + } + + return result; +} \ No newline at end of file diff --git a/examples/server/function-call.hpp b/examples/server/function-call.hpp new file mode 100644 index 0000000000000..ec08af9322512 --- /dev/null +++ b/examples/server/function-call.hpp @@ -0,0 +1,325 @@ +#include +#include "json.hpp" +#include +#include + +using json = nlohmann::ordered_json; + + +static std::string join(const std::vector& vec, const std::string& delimiter) { + std::string result; + for (size_t i = 0; i < vec.size(); i++) { + result += vec[i]; + if (i < vec.size() - 1) { + result += delimiter; + } + } + return result; +} + +static std::string capitalize(const std::string& str) { + std::string capitalized = str; + if (!capitalized.empty()) { + capitalized[0] = toupper(capitalized[0]); + for (size_t i = 1; i < capitalized.length(); i++) { + capitalized[i] = tolower(capitalized[i]); + } + } + return capitalized; +} + +static std::string json_schema_to_typescript_type(const json& schema, const std::string& param_name, std::string& enum_comment, std::string& integer_comment, std::string& description_comment); + +static std::pair generate_typescript_interface(const json& schema, const std::string& interface_name); + +static std::string generate_typescript_function(const json& function_schema); + +// Main functions +static std::string json_schema_to_typescript_type(const json& schema, const std::string& param_name, std::string& enum_comment, std::string& integer_comment, std::string& description_comment) { + std::string ts_type = "any"; // Default type + enum_comment = ""; + integer_comment = ""; + description_comment = ""; + + if (schema.contains("type")) { + std::string json_type = schema["type"]; + if (json_type == "array") { + std::string item_type = "any"; + if (schema.contains("items")) { + item_type = json_schema_to_typescript_type(schema["items"], param_name, enum_comment, integer_comment, description_comment); + } + ts_type = item_type + "[]"; + } else if (json_type == "number") { + ts_type = "number"; + } else if (json_type == "integer") { + ts_type = "number"; + integer_comment = " * @param " + param_name + " - Integer"; + } else if (json_type == "object") { + auto [interface_type, _] = generate_typescript_interface(schema, param_name); + ts_type = interface_type; + } else if (json_type == "boolean") { + ts_type = "boolean"; + } else if (json_type == "null") { + ts_type = "null"; + } else if (json_type == "string") { + ts_type = "string"; + } + } + + if (schema.contains("enum")) { + std::vector enum_values; + for (const auto& val : schema["enum"]) { + enum_values.push_back("\"" + val.get() + "\""); + } + enum_comment = " * @enum " + param_name + " - Possible values: " + join(enum_values, ", "); + ts_type = "string"; + } + if (schema.contains("description")) { + description_comment = " * @param " + param_name + " - " + schema["description"].get(); + } + + return ts_type; +} + +static std::pair generate_typescript_interface(const json& schema, const std::string& interface_name) { + json properties = schema.contains("properties") && !schema["properties"].is_null() + ? schema["properties"] + : json::object(); + std::vector required = schema.value("required", std::vector()); + + std::vector interface_body; + std::vector descriptions; + for (auto& [prop_name, prop_schema] : properties.items()) { + std::string enum_comment, integer_comment, description_comment; + std::string prop_type = json_schema_to_typescript_type(prop_schema, prop_name, enum_comment, integer_comment, description_comment); + bool is_optional = find(required.begin(), required.end(), prop_name) == required.end(); + interface_body.push_back(" " + prop_name + (is_optional ? "?" : "") + ": " + prop_type + ";"); + if (!description_comment.empty()) { + descriptions.push_back(description_comment); + } + if (!enum_comment.empty()) { + descriptions.push_back(enum_comment); + } + if (!integer_comment.empty()) { + descriptions.push_back(integer_comment); + } + } + + std::string comments = join(descriptions, "\n"); + std::string interface_definition = "interface " + interface_name + " {\n" + join(interface_body, "\n") + "\n}"; + return {interface_definition, comments}; +} + + +bool starts_with(const std::string& fullString, const std::string& prefix) { + return fullString.find(prefix) == 0; +} + +static std::string generate_typescript_function(const json& function_schema) { + std::string func_name = function_schema["name"]; + std::string description = function_schema.value("description", ""); + json parameters_schema = function_schema.contains("parameters") && !function_schema["parameters"].is_null() + ? function_schema["parameters"] + : json::object(); + std::vector required_params = parameters_schema.value("required", std::vector()); + + std::vector args_list; + std::vector comments_list; + std::vector interfaces; + + if (parameters_schema.contains("properties") && parameters_schema["properties"].is_object()){ + for (auto& [param_name, param_schema] : parameters_schema["properties"].items()) { + std::string enum_comment, integer_comment, description_comment; + std::string ts_type = json_schema_to_typescript_type(param_schema, param_name, enum_comment, integer_comment, description_comment); + if (starts_with(ts_type, "interface")) { + auto [interface_definition, nested_comments] = generate_typescript_interface(param_schema, func_name + "_" + capitalize(param_name) + "Params"); + interfaces.push_back(interface_definition); + comments_list.push_back(nested_comments); + ts_type = func_name + "_" + capitalize(param_name) + "Params"; + } else { + if (!description_comment.empty()) { + comments_list.push_back(description_comment); + } + if (!enum_comment.empty()) { + comments_list.push_back(enum_comment); + } + if (!integer_comment.empty()) { + comments_list.push_back(integer_comment); + } + } + bool is_optional = find(required_params.begin(), required_params.end(), param_name) == required_params.end(); + args_list.push_back(param_name + (is_optional ? "?" : "") + ": " + ts_type); + } + } + + + std::string args_str = join(args_list, ", "); + std::string comments_str = join(comments_list, "\n"); + std::string interfaces_str = join(interfaces, "\n\n"); + + std::string description_comment = (!description.empty()) ? " * " + description + "\n" : ""; + std::string typescript_func_declaration = + "/**\n" + + description_comment + + (comments_str.empty() ? "" : (comments_str + "\n")) + + " */\n" + + (interfaces_str.empty() ? "" : (interfaces_str + "\n\n")) + + "function " + func_name + "(" + args_str + "): any {};"; + + return typescript_func_declaration; +} + + + +std::string rubra_format_typescript_function_call_str(const std::vector &functions, json &tool_name_map) { + std::string final_str = "You have access to the following tools:\n"; + std::vector function_definitions; + for (auto& function : functions) { + // If function is directly the object or nested under "function" + json spec = function.contains("function") ? function["function"] : function; + + // Making a modifiable copy of spec + json spec_copy = spec; + + std::string func_name = spec_copy.value("name", ""); + + if (func_name.find('-') != std::string::npos) { + const std::string origin_func_name = func_name; + std::replace(func_name.begin(), func_name.end(), '-', '_'); // replace "-" with "_" because - is invalid in typescript function name + tool_name_map[func_name] = origin_func_name; + spec_copy["name"] = func_name; // Modify the name in the copied json object + } + + std::string res_string = generate_typescript_function(spec_copy); // generate TypeScript function + function_definitions.push_back(res_string); + } + + + for (const auto& def : function_definitions) { + final_str += def + "\n\n"; + } + final_str += "You can choose to respond with one or more tool calls at once, or with a chat message back to the user. Ensure you have all necessary details before making tool calls. If additional information is needed, ask the user appropriately. Any tool call you make must correspond to the functions listed above. If you decide to call a tool, format it like this: starttoolcall{\"name\": \"\", \"arguments\": {\"\": \"\", \"\": \"\", ...}}endtoolcall where the JSON wrapped between starttoolcall and endtoolcall represents the function call.\n"; + return final_str; + +} + + +std::string construct_json_tool_call_str(const json& tool_calls, nlohmann::ordered_map & func_observation_map) { + std::string tool_call_str; + bool first = true; + for (const auto& tool_call : tool_calls) { + std::string tool_call_id = tool_call["id"]; + func_observation_map[tool_call_id] = ""; // Initialize with empty value, updated later from the message with tool role + + if (!first) { + tool_call_str += "\n"; + } + json tc = tool_call["function"]; + if (tc["arguments"].is_string()) { + tc["arguments"] = json::parse(tc["arguments"].get()); + } + tool_call_str += std::string("starttoolcall") + tc.dump() + std::string("endtoolcall"); + first = false; + } + + return tool_call_str; +} + + +const std::vector expand_messages(const json & body, json &tool_name_map) { + std::string function_str = ""; + if (body.contains("tools") && !body["tools"].empty()) { + function_str = rubra_format_typescript_function_call_str(body["tools"], tool_name_map); + } + // If 'tool' is not set or empty, check 'functions' + else if (body.contains("functions") && !body["functions"].empty()) { + function_str = rubra_format_typescript_function_call_str(body["functions"], tool_name_map); + } + + if (function_str != "") { + printf("\n=============Formatting function call Input from OPENAI format...============\n"); + const std::vector expanded_messages = [&]() { + std::vector temp_vec; + nlohmann::ordered_map func_observation_map; + for (size_t i = 0; i < body["messages"].size(); ++i) { + if (body["messages"][i]["role"] != "tool" and func_observation_map.size() > 0) { + // insert the observation from the tool call before the next message + std::string observation_str = ""; + std::vector func_observation_array; + for (const auto& [key, value] : func_observation_map) { + func_observation_array.push_back(value); + } + json func_json_array = func_observation_array; + observation_str = std::string("start observation ") + func_json_array.dump() + std::string(" end observation"); + json observation_call; + observation_call["role"] = "user"; + observation_call["content"] = observation_str; + temp_vec.push_back(observation_call); + func_observation_map.clear(); + } + + if (i == 0){ + if (body["messages"][0]["role"] == "system") { + std::string old_content = body["messages"][0]["content"]; + json function_call; + function_call["role"] = "system"; + function_call["content"] = old_content + "\n" + function_str; + temp_vec.push_back(function_call); + } + else { // insert a system message of tool definition before the first message + json function_call; + function_call["role"] = "system"; + function_call["content"] = "You are a helpful assistant.\n" + function_str; + temp_vec.push_back(function_call); + temp_vec.push_back(body["messages"][0]); + } + } + // else if (body["messages"][i]["role"] == "assistant" and (body["messages"][i]["content"].is_null() or body["messages"][i]["content"]=="") and !body["messages"][i]["tool_calls"].is_null() and !body["messages"][i]["tool_calls"].empty()){ + else if (body["messages"][i]["role"] == "assistant" and body["messages"][i].contains("tool_calls")){ + // convert OpenAI function call format to Rubra format + // string tool_call_str = construct_python_tool_call_str(body["messages"][i]["tool_calls"], func_observation_map); + std::string tool_call_str = construct_json_tool_call_str(body["messages"][i]["tool_calls"], func_observation_map); + json function_call; + function_call["role"] = "assistant"; + function_call["content"] = tool_call_str; + temp_vec.push_back(function_call); + } + else if (body["messages"][i]["role"] == "tool") { + std::string tool_call_id = body["messages"][i]["tool_call_id"].get(); + if (func_observation_map.find(tool_call_id) != func_observation_map.end()) { + func_observation_map[tool_call_id] = body["messages"][i]["content"].get(); + } else { + printf("Tool call id not found in the map : %s", tool_call_id.c_str()); + // TODO: the input is not valid in this case, should return an error + } + + } + else { + temp_vec.push_back(body["messages"][i]); + } + + } + if (func_observation_map.size() > 0) { + // insert the observation from the tool call before the next message + std::string observation_str = ""; + std::vector func_observation_array; + for (const auto& [key, value] : func_observation_map) { + func_observation_array.push_back(value); + } + json func_json_array = func_observation_array; + observation_str = std::string("start observation ") + func_json_array.dump() + std::string(" end observation"); + json observation_call; + observation_call["role"] = "user"; + observation_call["content"] = observation_str; + temp_vec.push_back(observation_call); + func_observation_map.clear(); + } + return temp_vec; + }(); + return expanded_messages; + } + else { + return body["messages"]; + } + +} \ No newline at end of file diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index 63fde9c9faabe..4e91eb9e04ecd 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -6,11 +6,14 @@ // Change JSON_ASSERT from assert() to GGML_ASSERT: #define JSON_ASSERT GGML_ASSERT #include "json.hpp" +#include "function-call.hpp" +#include "function-call-parser.hpp" #include #include #include #include +#include #define DEFAULT_OAICOMPAT_MODEL "gpt-3.5-turbo-0613" @@ -147,6 +150,7 @@ inline std::string format_chat(const struct llama_model * model, const std::stri const std::string formatted_chat(buf.data(), res); LOG_VERBOSE("formatted_chat", {{"text", formatted_chat.c_str()}}); + printf("formatted_chat: %s\n", formatted_chat.c_str()); return formatted_chat; } @@ -355,6 +359,10 @@ static json oaicompat_completion_params_parse( json llama_params; llama_params["__oaicompat"] = true; + json tool_name_map; + const std::vector expanded_messages = expand_messages(body, tool_name_map); + llama_params["prompt"] = format_chat(model, chat_template, expanded_messages); + llama_params["tool_name_map"] = tool_name_map; // Map OpenAI parameters to llama.cpp parameters // @@ -375,7 +383,7 @@ static json oaicompat_completion_params_parse( llama_params["top_p"] = json_value(body, "top_p", 1.0); // Apply chat template to the list of messages - llama_params["prompt"] = format_chat(model, chat_template, body.at("messages")); + // llama_params["prompt"] = format_chat(model, chat_template, body.at("messages")); // Handle "stop" field if (body.contains("stop") && body.at("stop").is_string()) { @@ -410,12 +418,12 @@ static json oaicompat_completion_params_parse( } // Params supported by OAI but unsupported by llama.cpp - static const std::vector unsupported_params { "tools", "tool_choice" }; - for (auto & param : unsupported_params) { - if (body.contains(param)) { - throw std::runtime_error("Unsupported param: " + param); - } - } + // static const std::vector unsupported_params { "tools", "tool_choice" }; + // for (auto & param : unsupported_params) { + // if (body.contains(param)) { + // throw std::runtime_error("Unsupported param: " + param); + // } + // } // Copy remaining properties to llama_params // This allows user to use llama.cpp-specific params like "mirostat", "tfs_z",... via OAI endpoint. @@ -437,19 +445,45 @@ static json format_final_response_oaicompat(const json & request, json result, c int num_prompt_tokens = json_value(result, "tokens_evaluated", 0); std::string content = json_value(result, "content", std::string("")); + std::vector parsed_content = rubra_fc_json_tool_extractor(content); std::string finish_reason = "length"; if (stopped_word || stopped_eos) { finish_reason = "stop"; } - json choices = - streaming ? json::array({json{{"finish_reason", finish_reason}, - {"index", 0}, - {"delta", json::object()}}}) - : json::array({json{{"finish_reason", finish_reason}, + json choices; + + if (streaming) { + choices = json::array({json{{"finish_reason", finish_reason}, + {"index", 0}, + {"delta", json::object()}}}); + } else { + if (parsed_content.empty()) { + choices = json::array({json{{"finish_reason", finish_reason}, {"index", 0}, {"message", json{{"content", content}, {"role", "assistant"}}}}}); + } else { + std::vector oai_format_tool_calls; + for (size_t i = 0; i < parsed_content.size(); ++i) { + const auto &pc = parsed_content[i]; + // Use 'pc' and 'i' as needed + json tool_call; + tool_call["id"] = pc["id"]; + tool_call["type"] = "function"; + + tool_call["function"] = json{ + {"name" , pc["name"]}, + {"arguments" , pc["kwargs"].dump()}, + }; + oai_format_tool_calls.push_back(tool_call); + } + choices = json::array({json{{"finish_reason", "tool_calls"}, + {"index", 0}, + {"message", json{{"tool_calls", oai_format_tool_calls}, + {"role", "assistant"}}}}}); + } + } std::time_t t = std::time(0); @@ -466,6 +500,7 @@ static json format_final_response_oaicompat(const json & request, json result, c }}, {"id", completion_id} }; + printf("==============formatted_final_response_oaicompat================\n %s\n\n", res.dump().c_str()); if (server_verbose) { res["__verbose"] = result; diff --git a/jsonrepair.ts b/jsonrepair.ts new file mode 100644 index 0000000000000..a9c2899de8adc --- /dev/null +++ b/jsonrepair.ts @@ -0,0 +1,23 @@ +const fs = require('fs'); +const { jsonrepair } = require('jsonrepair'); + +// This script processes command-line arguments +const filename = process.argv[2]; // Skip the first two elements +// Simple processing: join arguments into a stri + +function processFile(filePath) { + try { + const data = fs.readFileSync(filePath, { encoding: 'utf8' }); + const repairData = jsonrepair(data); + console.log(repairData); + fs.unlinkSync(filePath); + return repairData; + } catch (error) { + console.error('Error reading file:', error); + return ''; + } +} + + +processFile(filename); +// Output the result diff --git a/test_llamacpp.ipynb b/test_llamacpp.ipynb new file mode 100644 index 0000000000000..665a73f7cbc83 --- /dev/null +++ b/test_llamacpp.ipynb @@ -0,0 +1,743 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Function Definitions" + ] + }, + { + "cell_type": "code", + "execution_count": 109, + "metadata": {}, + "outputs": [], + "source": [ + "import json\n", + "import uuid\n", + "from functools import partial\n", + "\n", + "\n", + "def add(args: str):\n", + " args = json.loads(args)\n", + " return str(float(args[\"a\"]) + float(args[\"b\"]))\n", + "\n", + "\n", + "def sub(args: str):\n", + " args = json.loads(args)\n", + " return str(float(args[\"a\"]) - float(args[\"b\"]))\n", + "\n", + "\n", + "def mult(args: str):\n", + " args = json.loads(args)\n", + " return str(float(args[\"a\"]) * float(args[\"b\"]))\n", + "\n", + "\n", + "def div(args: str):\n", + " args = json.loads(args)\n", + " return str(float(args[\"a\"]) / float(args[\"b\"]))\n", + "\n", + "\n", + "def get_oai_response(model, functions, msgs, api_key, base_url):\n", + " import openai\n", + " openai.api_key = api_key ## Add your API key here\n", + " openai.base_url = base_url\n", + " print(f\"Pointing to URL: {base_url}\")\n", + " \n", + " try:\n", + " completion = openai.chat.completions.create(\n", + " model=model,\n", + " temperature=0.1,\n", + " messages=msgs,\n", + " tools=functions,\n", + " tool_choice=\"auto\",\n", + " # functions=functions,\n", + " # function_call=\"auto\",\n", + " stream=False,\n", + " )\n", + " return completion.choices[0]\n", + " except Exception as e:\n", + " print(e)\n", + "\n", + "\n", + "def insert_tool_response(res, msgs):\n", + " # for tool_call in res.message.tool_calls:\n", + " # print(f\"Tool Call: {tool_call.id}, {tool_call.function}\")\n", + " assistant_message = res.message\n", + " tool_calls = []\n", + " for tool_call in assistant_message.tool_calls:\n", + " tool_calls.append( {\n", + " \"id\": tool_call.id,\n", + " \"function\": {\"name\": tool_call.function.name,\n", + " \"arguments\": tool_call.function.arguments},\n", + " \"type\": \"function\",\n", + " })\n", + " msgs.append({\"role\": \"assistant\", \"tool_calls\": tool_calls})\n", + " \n", + " for i, tool_call in enumerate(assistant_message.tool_calls):\n", + " if tool_call.function.name == \"getCurrentWeather\":\n", + " print()\n", + " l = len((json.loads(assistant_message.tool_calls[i].function.arguments))[\"location\"])\n", + " msgs.append({\"role\": \"tool\", \"tool_call_id\": str(assistant_message.tool_calls[i].id), \"name\": assistant_message.tool_calls[i].function.name, \"content\": f\"temprature is {(i+1) * 50 + l } degree\"})\n", + " elif tool_call.function.name == \"calculate_distance\":\n", + " msgs.append({\"role\": \"tool\", \"tool_call_id\": str(assistant_message.tool_calls[i].id), \"name\": assistant_message.tool_calls[i].function.name, \"content\": f\"Distance is {(i+1) * 50} miles.\"})\n", + " elif tool_call.function.name == \"generate_password\":\n", + " msgs.append({\"role\": \"tool\", \"tool_call_id\": str(assistant_message.tool_calls[i].id), \"name\": assistant_message.tool_calls[i].function.name, \"content\": f\"Password generated: {uuid.uuid4().hex[:8]}\"})\n", + " elif tool_call.function.name == \"orderUmbrella\":\n", + " msgs.append({\"role\": \"tool\", \"tool_call_id\": str(assistant_message.tool_calls[i].id), \"name\": assistant_message.tool_calls[i].function.name, \"content\": f\"Order placed. the price is {(i+1) * 10} dollars.\"})\n", + " elif tool_call.function.name == \"list_files\":\n", + " msgs.append({\"role\": \"tool\", \"tool_call_id\": str(assistant_message.tool_calls[i].id), \"name\": assistant_message.tool_calls[i].function.name, \"content\": f\"File list:\\nreport.docx\\ntask.txt\\nnotes.txt\"})\n", + " elif tool_call.function.name == \"get_file_size\":\n", + " msgs.append({\"role\": \"tool\", \"tool_call_id\": str(assistant_message.tool_calls[i].id), \"name\": assistant_message.tool_calls[i].function.name, \"content\": f\"the size is {(i+1) * 100} bytes.\"})\n", + " elif tool_call.function.name == \"addition\":\n", + " msgs.append({\n", + " \"role\": \"tool\",\n", + " \"name\": \"addition\",\n", + " \"content\": add(tool_call.function.arguments),\n", + " \"tool_call_id\": tool_call.id\n", + " })\n", + " elif tool_call.function.name == \"subtraction\":\n", + " msgs.append({\n", + " \"role\": \"tool\",\n", + " \"name\": \"subtraction\",\n", + " \"content\": sub(tool_call.function.arguments),\n", + " \"tool_call_id\": tool_call.id\n", + " })\n", + " elif tool_call.function.name == \"multiplication\":\n", + " msgs.append({\n", + " \"role\": \"tool\",\n", + " \"name\": \"multiplication\",\n", + " \"content\": mult(tool_call.function.arguments),\n", + " \"tool_call_id\": tool_call.id\n", + " })\n", + " elif tool_call.function.name == \"division\":\n", + " msgs.append({\n", + " \"role\": \"tool\",\n", + " \"name\": \"division\",\n", + " \"content\": div(tool_call.function.arguments),\n", + " \"tool_call_id\": tool_call.id\n", + " })\n", + " print(f\"Observation: {msgs[-1]}\")\n", + " \n", + " return msgs\n", + "\n", + "def run_completion(chat_method, user_query, msgs=[]):\n", + " system_prompt = \"You are a helpful assistant.\"\n", + " functions = [\n", + " # {\"type\": \"function\",\"function\":{\"name\":\"calculate_distance\",\"description\":\"Calculate the distance between two locations\",\"parameters\":{\"type\":\"object\",\"properties\":{\"origin\":{\"type\":\"string\",\"description\":\"The starting location\"},\"destination\":{\"type\":\"string\",\"description\":\"The destination location\"},\"mode\":{\"type\":\"string\",\"description\":\"The mode of transportation\"}},\"required\":[\"origin\",\"destination\",\"mode\"]}}},{\"type\": \"function\",\"function\":{\"name\":\"generate_password\",\"description\":\"Generate a random password\",\"parameters\":{\"type\":\"object\",\"properties\":{\"length\":{\"type\":\"integer\",\"description\":\"The length of the password\"}},\"required\":[\"length\"]}}},\n", + " {\n", + " \"type\": \"function\",\n", + " \"function\": {\n", + " \"name\": \"dummy\",\n", + " \"description\": \"just to say hi\",\n", + " \"parameters\": None,\n", + " }\n", + " },\n", + " {\n", + " \"type\": \"function\",\n", + " \"function\": {\n", + " \"name\": \"list_files\",\n", + " \"description\": \"List all files in a directory\",\n", + " \"parameters\": {\n", + " \"type\": \"object\",\n", + " \"properties\": {\n", + " \"directory\": {\n", + " \"type\": \"string\",\n", + " \"description\": \"the directory to list files from\"\n", + " }\n", + " },\n", + " \"required\": [\n", + " \"directory\"\n", + " ]\n", + " }\n", + " }\n", + " },\n", + " {\n", + " \"type\": \"function\",\n", + " \"function\": {\n", + " \"description\": \"Create a 3D model of an object with specified dimensions\",\n", + " \"name\": \"create_3d_model\",\n", + " \"parameters\": {\n", + " \"properties\": {\n", + " \"object_name\": {\n", + " \"description\": \"Name of the object to be modeled\",\n", + " \"type\": \"string\"\n", + " },\n", + " \"dimensions\": {\n", + " \"description\": \"Dimensions of the 3D object (length, width, height)\",\n", + " \"type\": \"object\",\n", + " \"properties\": {\n", + " \"length\": {\n", + " \"type\": \"number\"\n", + " },\n", + " \"width\": {\n", + " \"type\": \"number\"\n", + " },\n", + " \"height\": {\n", + " \"type\": \"number\"\n", + " }\n", + " },\n", + " \"required\": [\n", + " \"length\",\n", + " \"width\",\n", + " \"height\"\n", + " ]\n", + " }\n", + " },\n", + " \"required\": [\n", + " \"object_name\",\n", + " \"dimensions\"\n", + " ],\n", + " \"type\": \"object\"\n", + " }\n", + " }\n", + " },\n", + " {\n", + " \"type\": \"function\",\n", + " \"function\": {\n", + " \"description\": \"Get the latest insurance premium from a list of premiums.\",\n", + " \"name\": \"latest_insurance_premium\",\n", + " \"parameters\": {\n", + " \"properties\": {\n", + " \"premiums\": {\n", + " \"description\": \"List of insurance premiums\",\n", + " \"type\": \"array\",\n", + " \"items\": {\n", + " \"type\": \"number\"\n", + " }\n", + " }\n", + " },\n", + " \"required\": [\n", + " \"premiums\"\n", + " ],\n", + " \"type\": \"object\"\n", + " }\n", + " }\n", + " },\n", + " {\n", + " \"type\": \"function\",\n", + " \"function\": {\n", + " \"description\": \"Calculate insurance premium based on age and coverage\",\n", + " \"name\": \"calculate_insurance_premium\",\n", + " \"parameters\": {\n", + " \"properties\": {\n", + " \"age\": {\n", + " \"description\": \"Age of the person applying for insurance\",\n", + " \"type\": \"integer\"\n", + " },\n", + " \"coverage_type\": {\n", + " \"description\": \"Type of insurance coverage\",\n", + " \"type\": \"string\",\n", + " \"enum\": [\n", + " \"basic\",\n", + " \"standard\",\n", + " \"premium\"\n", + " ]\n", + " }\n", + " },\n", + " \"required\": [\n", + " \"age\",\n", + " \"coverage_type\"\n", + " ],\n", + " \"type\": \"object\"\n", + " }\n", + " }\n", + " },\n", + " {\n", + " \"type\": \"function\",\n", + " \"function\": {\n", + " \"name\": \"get_file_size\",\n", + " \"description\": \"Get the size of a file in bytes\",\n", + " \"parameters\": {\n", + " \"type\": \"object\",\n", + " \"properties\": {\n", + " \"filename\": {\n", + " \"type\": \"string\",\n", + " \"description\": \"the name of the file to get its size\"\n", + " }\n", + " },\n", + " \"required\": [\n", + " \"filename\"\n", + " ]\n", + " }\n", + " }\n", + " },\n", + " {\n", + " 'type': 'function',\n", + " 'function': {\n", + " 'name': 'addition',\n", + " 'description': \"Adds two numbers together\",\n", + " 'parameters': {\n", + " 'type': 'object',\n", + " 'properties': {\n", + " 'a': {\n", + " 'description': 'First number to add',\n", + " 'type': 'string'\n", + " },\n", + " 'b': {\n", + " 'description': 'Second number to add',\n", + " 'type': 'string'\n", + " }\n", + " },\n", + " 'required': []\n", + " }\n", + " }\n", + " },\n", + " {\n", + " 'type': 'function',\n", + " 'function': {\n", + " 'name': 'subtraction',\n", + " 'description': \"Subtracts two numbers\",\n", + " 'parameters': {\n", + " 'type': 'object',\n", + " 'properties': {\n", + " 'a': {\n", + " 'description': 'First number to be subtracted from',\n", + " 'type': 'string'\n", + " },\n", + " 'b': {\n", + " 'description': 'Number to subtract',\n", + " 'type': 'string'\n", + " }\n", + " },\n", + " 'required': []\n", + " }\n", + " }\n", + " },\n", + " {\n", + " 'type': 'function',\n", + " 'function': {\n", + " 'name': 'multiplication',\n", + " 'description': \"Multiply two numbers together\",\n", + " 'parameters': {\n", + " 'type': 'object',\n", + " 'properties': {\n", + " 'a': {\n", + " 'description': 'First number to multiply',\n", + " 'type': 'string'\n", + " },\n", + " 'b': {\n", + " 'description': 'Second number to multiply',\n", + " 'type': 'string'\n", + " }\n", + " },\n", + " 'required': []\n", + " }\n", + " }\n", + " },\n", + " {\n", + " 'type': 'function',\n", + " 'function': {\n", + " 'name': 'division',\n", + " 'description': \"Divide two numbers\",\n", + " 'parameters': {\n", + " 'type': 'object',\n", + " 'properties': {\n", + " 'a': {\n", + " 'description': 'First number to use as the dividend',\n", + " 'type': 'string'\n", + " },\n", + " 'b': {\n", + " 'description': 'Second number to use as the divisor',\n", + " 'type': 'string'\n", + " }\n", + " },\n", + " 'required': []\n", + " }\n", + " }\n", + " },\n", + " {\n", + " \"type\": \"function\",\n", + " \"function\": {\n", + " \"name\": \"getCurrentWeather\",\n", + " \"description\": \"Get the weather in location\",\n", + " \"parameters\": {\n", + " \"type\": \"object\",\n", + " \"properties\": {\n", + " \"location\": {\"type\": \"string\", \"description\": \"The city and state e.g. San Francisco, CA\"},\n", + " \"unit\": {\"type\": \"string\", \"enum\": [\"c\", \"f\"]}\n", + " },\n", + " \"required\": [\"location\"]\n", + " }\n", + " }\n", + " },\n", + " { \"type\": \"function\",\n", + " \"function\":\n", + " {\n", + " \"name\": \"orderUmbrella\",\n", + " \"description\": \"Do this to help user to order an umbrella online\", \n", + " \"parameters\": {\n", + " \"type\": \"object\",\n", + " \"properties\": {\n", + " \"number_to_buy\": {\n", + " \"type\": \"integer\",\n", + " \"description\": \"the amount of umbrellas to buy\"\n", + " }\n", + " },\n", + " \"required\": [\n", + " \"number_to_buy\"\n", + " ]\n", + " }\n", + " }},\n", + " {\"type\": \"function\",\"function\":{\"name\":\"calculate_distance\",\"description\":\"Calculate the distance between two locations\",\"parameters\":{\"type\":\"object\",\"properties\":{\"origin\":{\"type\":\"string\",\"description\":\"The starting location\"},\"destination\":{\"type\":\"string\",\"description\":\"The destination location\"},\"mode\":{\"type\":\"string\",\"description\":\"The mode of transportation\"}},\"required\":[\"origin\",\"destination\",\"mode\"]}}},{\"type\": \"function\",\"function\":{\"name\":\"generate_password\",\"description\":\"Generate a random password\",\"parameters\":{\"type\":\"object\",\"properties\":{\"length\":{\"type\":\"integer\",\"description\":\"The length of the password\"}},\"required\":[\"length\"]}}}\n", + " ]\n", + " # functions = [{\"type\": \"function\",\"function\":{\"name\":\"calculate_distance\",\"description\":\"Calculate the distance between two locations\",\"parameters\":{\"type\":\"object\",\"properties\":{\"origin\":{\"type\":\"string\",\"description\":\"The starting location\"},\"destination\":{\"type\":\"string\",\"description\":\"The destination location\"},\"mode\":{\"type\":\"string\",\"description\":\"The mode of transportation\"}},\"required\":[\"origin\",\"destination\",\"mode\"]}}},{\"type\": \"function\",\"function\":{\"name\":\"generate_password\",\"description\":\"Generate a random password\",\"parameters\":{\"type\":\"object\",\"properties\":{\"length\":{\"type\":\"integer\",\"description\":\"The length of the password\"}},\"required\":[\"length\"]}}}]\n", + "\n", + " if not msgs or len(msgs) == 0:\n", + " msgs = [{\"role\": \"system\", \"content\":system_prompt} ,{\"role\": \"user\", \"content\": user_query}]\n", + " else:\n", + " msgs.append({\"role\": \"user\", \"content\": user_query})\n", + "\n", + " res = chat_method(model=\"gpt-4-0125-preview\", functions=functions, msgs=msgs)\n", + " res_next = res\n", + " if res_next.message.content and len(res_next.message.content) > 0:\n", + " print(\"\\n[AI response]:\\n\", res_next.message.content)\n", + " else:\n", + " print(\"\\n[AI calling functions]:\")\n", + " for tool_call in res_next.message.tool_calls:\n", + " print(f\"Tool Call: {tool_call.function}\")\n", + " l = 0\n", + " while res_next.message.tool_calls and len(res_next.message.tool_calls) > 0:\n", + " msgs = insert_tool_response(res_next, msgs)\n", + " print(msgs)\n", + " res_next = chat_method(model=\"gpt-4-0125-preview\", functions=functions, msgs=msgs)\n", + " # for m in msgs:\n", + " # print(m)\n", + " print(f\"Loop {l}\")\n", + " if res_next.message.content and len(res_next.message.content) > 0:\n", + " print(\"\\n[AI response]:\\n\", res_next.message.content)\n", + " else:\n", + " print(\"\\n[AI calling functions]:\")\n", + " for tool_call in res_next.message.tool_calls:\n", + " print(f\"Tool Call: {tool_call.function}\")\n", + " l += 1\n", + " " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Multi + Parallel Function Call" + ] + }, + { + "cell_type": "code", + "execution_count": 110, + "metadata": {}, + "outputs": [], + "source": [ + "import openai\n", + "local_api_key = \"sk-\"\n", + "local_base_url = \"http://localhost:1234/v1/\"" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Pointing to URL: http://localhost:8019/v1/\n", + "\n", + "[AI calling functions]:\n", + "Tool Call: Function(arguments='{\"origin\":\"San Francisco\",\"destination\":\"Cupertino\",\"mode\":\"driving\"}', name='calculate_distance')\n", + "Observation: {'role': 'tool', 'tool_call_id': '8eefe744', 'name': 'calculate_distance', 'content': 'Distance is 50 miles.'}\n", + "[{'role': 'system', 'content': 'You are a helpful assistant.'}, {'role': 'user', 'content': 'What is the distance between San Francisco and Cupertino by driving and by air from both directions?'}, {'role': 'assistant', 'tool_calls': [{'id': '8eefe744', 'function': {'name': 'calculate_distance', 'arguments': '{\"origin\":\"San Francisco\",\"destination\":\"Cupertino\",\"mode\":\"driving\"}'}, 'type': 'function'}]}, {'role': 'tool', 'tool_call_id': '8eefe744', 'name': 'calculate_distance', 'content': 'Distance is 50 miles.'}]\n", + "Pointing to URL: http://localhost:8019/v1/\n", + "Loop 0\n", + "\n", + "[AI calling functions]:\n", + "Tool Call: Function(arguments='{\"origin\":\"San Francisco\",\"destination\":\"Cupertino\",\"mode\":\"air\"}', name='calculate_distance')\n", + "Observation: {'role': 'tool', 'tool_call_id': '8a948772', 'name': 'calculate_distance', 'content': 'Distance is 50 miles.'}\n", + "[{'role': 'system', 'content': 'You are a helpful assistant.'}, {'role': 'user', 'content': 'What is the distance between San Francisco and Cupertino by driving and by air from both directions?'}, {'role': 'assistant', 'tool_calls': [{'id': '8eefe744', 'function': {'name': 'calculate_distance', 'arguments': '{\"origin\":\"San Francisco\",\"destination\":\"Cupertino\",\"mode\":\"driving\"}'}, 'type': 'function'}]}, {'role': 'tool', 'tool_call_id': '8eefe744', 'name': 'calculate_distance', 'content': 'Distance is 50 miles.'}, {'role': 'assistant', 'tool_calls': [{'id': '8a948772', 'function': {'name': 'calculate_distance', 'arguments': '{\"origin\":\"San Francisco\",\"destination\":\"Cupertino\",\"mode\":\"air\"}'}, 'type': 'function'}]}, {'role': 'tool', 'tool_call_id': '8a948772', 'name': 'calculate_distance', 'content': 'Distance is 50 miles.'}]\n", + "Pointing to URL: http://localhost:8019/v1/\n", + "Loop 1\n", + "\n", + "[AI response]:\n", + " The driving distance between San Francisco and Cupertino is 50 miles. The air distance from San Francisco to Cupertino is also 50 miles.\n" + ] + } + ], + "source": [ + "\n", + "get_mistral_rubra_response = partial(get_oai_response, api_key=local_api_key, base_url=local_base_url)\n", + "\n", + "user_query = \"What is the distance between San Francisco and Cupertino by driving and by air from both directions?\"\n", + "# user_query = \"What is four plus six? What is the result of that plus 2? Take the result and multiply by 5 and then divide by two\"\n", + "# user_query = \"what's the distance between SF and NYC? Use the result value to multiply by 8, and then divide by 2, and then minus 30\"\n", + "msgs = run_completion(get_mistral_rubra_response, user_query)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Pointing to URL: http://localhost:8019/v1/\n", + "\n", + "[AI calling functions]:\n", + "Tool Call: Function(arguments='{\"object_name\":\"rectangle\",\"dimensions\":{\"length\":8,\"width\":5,\"height\":4}}', name='create_3d_model')\n", + "Observation: {'role': 'assistant', 'tool_calls': [{'id': '0ee56562', 'function': {'name': 'create_3d_model', 'arguments': '{\"object_name\":\"rectangle\",\"dimensions\":{\"length\":8,\"width\":5,\"height\":4}}'}, 'type': 'function'}]}\n", + "[{'role': 'system', 'content': 'You are a helpful assistant.'}, {'role': 'user', 'content': 'create a 3d model with length 8, width 5, and height 4'}, {'role': 'assistant', 'tool_calls': [{'id': '0ee56562', 'function': {'name': 'create_3d_model', 'arguments': '{\"object_name\":\"rectangle\",\"dimensions\":{\"length\":8,\"width\":5,\"height\":4}}'}, 'type': 'function'}]}]\n", + "Pointing to URL: http://localhost:8019/v1/\n", + "Loop 0\n", + "\n", + "[AI response]:\n", + " A 3D model of a rectangle with dimensions 8x5x4 has been successfully created.\n" + ] + } + ], + "source": [ + "user_query0 = \"create a 3d model with length 8, width 5, and height 4\"\n", + "# user_query0 = \"what's the latest insurance premium of 1, 3 ,5\"\n", + "msgs = run_completion(get_mistral_rubra_response, user_query0)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Pointing to URL: http://localhost:8019/v1/\n", + "\n", + "[AI calling functions]:\n", + "Tool Call: Function(arguments='{\"number_to_buy\":3}', name='orderUmbrella')\n", + "Observation: {'role': 'tool', 'tool_call_id': '19e16a34', 'name': 'orderUmbrella', 'content': 'Order placed. the price is 10 dollars.'}\n", + "[{'role': 'system', 'content': 'You are a helpful assistant.'}, {'role': 'user', 'content': 'now order 3 umbrellas for me and generate a password of length 8'}, {'role': 'assistant', 'tool_calls': [{'id': '19e16a34', 'function': {'name': 'orderUmbrella', 'arguments': '{\"number_to_buy\":3}'}, 'type': 'function'}]}, {'role': 'tool', 'tool_call_id': '19e16a34', 'name': 'orderUmbrella', 'content': 'Order placed. the price is 10 dollars.'}]\n", + "Pointing to URL: http://localhost:8019/v1/\n", + "Loop 0\n", + "\n", + "[AI calling functions]:\n", + "Tool Call: Function(arguments='{\"length\":8}', name='generate_password')\n", + "Observation: {'role': 'tool', 'tool_call_id': 'c14327a0', 'name': 'generate_password', 'content': 'Password generated: 85f87d3d'}\n", + "[{'role': 'system', 'content': 'You are a helpful assistant.'}, {'role': 'user', 'content': 'now order 3 umbrellas for me and generate a password of length 8'}, {'role': 'assistant', 'tool_calls': [{'id': '19e16a34', 'function': {'name': 'orderUmbrella', 'arguments': '{\"number_to_buy\":3}'}, 'type': 'function'}]}, {'role': 'tool', 'tool_call_id': '19e16a34', 'name': 'orderUmbrella', 'content': 'Order placed. the price is 10 dollars.'}, {'role': 'assistant', 'tool_calls': [{'id': 'c14327a0', 'function': {'name': 'generate_password', 'arguments': '{\"length\":8}'}, 'type': 'function'}]}, {'role': 'tool', 'tool_call_id': 'c14327a0', 'name': 'generate_password', 'content': 'Password generated: 85f87d3d'}]\n", + "Pointing to URL: http://localhost:8019/v1/\n", + "Loop 1\n", + "\n", + "[AI response]:\n", + " Your order for 3 umbrellas has been placed at 10 dollars each. A secure password '85f87d3d' has also been generated for you.\n" + ] + } + ], + "source": [ + "user_query2 = \"now order 3 umbrellas for me and generate a password of length 8\"\n", + "msgs = run_completion(get_mistral_rubra_response, user_query2, msgs)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Simple Math Chaining" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Pointing to URL: http://localhost:8019/v1/\n", + "\n", + "[AI calling functions]:\n", + "Tool Call: Function(arguments='{\"a\":\"4\",\"b\":\"6\"}', name='addition')\n", + "Observation: {'role': 'tool', 'name': 'addition', 'content': '10.0', 'tool_call_id': '89cdcf84'}\n", + "[{'role': 'system', 'content': 'You are a helpful assistant.'}, {'role': 'user', 'content': 'User tool to help me : What is four plus six? What is the result of that plus 2? Take the result and multiply by 5 and then divide by two'}, {'role': 'assistant', 'tool_calls': [{'id': '89cdcf84', 'function': {'name': 'addition', 'arguments': '{\"a\":\"4\",\"b\":\"6\"}'}, 'type': 'function'}]}, {'role': 'tool', 'name': 'addition', 'content': '10.0', 'tool_call_id': '89cdcf84'}]\n", + "Pointing to URL: http://localhost:8019/v1/\n", + "Loop 0\n", + "\n", + "[AI calling functions]:\n", + "Tool Call: Function(arguments='{\"a\":\"10.0\",\"b\":\"5\"}', name='multiplication')\n", + "Observation: {'role': 'tool', 'name': 'multiplication', 'content': '50.0', 'tool_call_id': '890a70de'}\n", + "[{'role': 'system', 'content': 'You are a helpful assistant.'}, {'role': 'user', 'content': 'User tool to help me : What is four plus six? What is the result of that plus 2? Take the result and multiply by 5 and then divide by two'}, {'role': 'assistant', 'tool_calls': [{'id': '89cdcf84', 'function': {'name': 'addition', 'arguments': '{\"a\":\"4\",\"b\":\"6\"}'}, 'type': 'function'}]}, {'role': 'tool', 'name': 'addition', 'content': '10.0', 'tool_call_id': '89cdcf84'}, {'role': 'assistant', 'tool_calls': [{'id': '890a70de', 'function': {'name': 'multiplication', 'arguments': '{\"a\":\"10.0\",\"b\":\"5\"}'}, 'type': 'function'}]}, {'role': 'tool', 'name': 'multiplication', 'content': '50.0', 'tool_call_id': '890a70de'}]\n", + "Pointing to URL: http://localhost:8019/v1/\n", + "Loop 1\n", + "\n", + "[AI calling functions]:\n", + "Tool Call: Function(arguments='{\"a\":\"50.0\",\"b\":\"2\"}', name='division')\n", + "Observation: {'role': 'tool', 'name': 'division', 'content': '25.0', 'tool_call_id': '6519faf3'}\n", + "[{'role': 'system', 'content': 'You are a helpful assistant.'}, {'role': 'user', 'content': 'User tool to help me : What is four plus six? What is the result of that plus 2? Take the result and multiply by 5 and then divide by two'}, {'role': 'assistant', 'tool_calls': [{'id': '89cdcf84', 'function': {'name': 'addition', 'arguments': '{\"a\":\"4\",\"b\":\"6\"}'}, 'type': 'function'}]}, {'role': 'tool', 'name': 'addition', 'content': '10.0', 'tool_call_id': '89cdcf84'}, {'role': 'assistant', 'tool_calls': [{'id': '890a70de', 'function': {'name': 'multiplication', 'arguments': '{\"a\":\"10.0\",\"b\":\"5\"}'}, 'type': 'function'}]}, {'role': 'tool', 'name': 'multiplication', 'content': '50.0', 'tool_call_id': '890a70de'}, {'role': 'assistant', 'tool_calls': [{'id': '6519faf3', 'function': {'name': 'division', 'arguments': '{\"a\":\"50.0\",\"b\":\"2\"}'}, 'type': 'function'}]}, {'role': 'tool', 'name': 'division', 'content': '25.0', 'tool_call_id': '6519faf3'}]\n", + "Pointing to URL: http://localhost:8019/v1/\n", + "Loop 2\n", + "\n", + "[AI response]:\n", + " The result of adding 4 and 6 is 10. After multiplying this by 5, the result is 50. Finally, dividing this by 2 gives 25.\n" + ] + } + ], + "source": [ + "user_query3 = \"User tool to help me : What is four plus six? What is the result of that plus 2? Take the result and multiply by 5 and then divide by two\"\n", + "\n", + "\n", + "msgs = run_completion(get_mistral_rubra_response, user_query3)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Condition" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Pointing to URL: http://localhost:8019/v1/\n", + "\n", + "[AI calling functions]:\n", + "Tool Call: Function(arguments='{\"location\":\"Boston\",\"unit\":\"f\"}', name='getCurrentWeather')\n", + "\n", + "Observation: {'role': 'tool', 'tool_call_id': 'de8f0d7b', 'name': 'getCurrentWeather', 'content': 'temprature is 56 degree'}\n", + "[{'role': 'system', 'content': 'You are a helpful assistant.'}, {'role': 'user', 'content': \"check the weather in boston, calculate the distance from boston to NYC for me only if it's less than 100 degrees Fahrenheit\"}, {'role': 'assistant', 'tool_calls': [{'id': 'de8f0d7b', 'function': {'name': 'getCurrentWeather', 'arguments': '{\"location\":\"Boston\",\"unit\":\"f\"}'}, 'type': 'function'}]}, {'role': 'tool', 'tool_call_id': 'de8f0d7b', 'name': 'getCurrentWeather', 'content': 'temprature is 56 degree'}]\n", + "Pointing to URL: http://localhost:8019/v1/\n", + "Loop 0\n", + "\n", + "[AI calling functions]:\n", + "Tool Call: Function(arguments='{\"origin\":\"Boston\",\"destination\":\"New York City\",\"mode\":\"car\"}', name='calculate_distance')\n", + "Observation: {'role': 'tool', 'tool_call_id': '361acf3f', 'name': 'calculate_distance', 'content': 'Distance is 50 miles.'}\n", + "[{'role': 'system', 'content': 'You are a helpful assistant.'}, {'role': 'user', 'content': \"check the weather in boston, calculate the distance from boston to NYC for me only if it's less than 100 degrees Fahrenheit\"}, {'role': 'assistant', 'tool_calls': [{'id': 'de8f0d7b', 'function': {'name': 'getCurrentWeather', 'arguments': '{\"location\":\"Boston\",\"unit\":\"f\"}'}, 'type': 'function'}]}, {'role': 'tool', 'tool_call_id': 'de8f0d7b', 'name': 'getCurrentWeather', 'content': 'temprature is 56 degree'}, {'role': 'assistant', 'tool_calls': [{'id': '361acf3f', 'function': {'name': 'calculate_distance', 'arguments': '{\"origin\":\"Boston\",\"destination\":\"New York City\",\"mode\":\"car\"}'}, 'type': 'function'}]}, {'role': 'tool', 'tool_call_id': '361acf3f', 'name': 'calculate_distance', 'content': 'Distance is 50 miles.'}]\n", + "Pointing to URL: http://localhost:8019/v1/\n", + "Loop 1\n", + "\n", + "[AI calling functions]:\n", + "Tool Call: Function(arguments='{\"a\":\"56\",\"b\":\"100\"}', name='subtraction')\n", + "Observation: {'role': 'tool', 'name': 'subtraction', 'content': '-44.0', 'tool_call_id': '7a78eb34'}\n", + "[{'role': 'system', 'content': 'You are a helpful assistant.'}, {'role': 'user', 'content': \"check the weather in boston, calculate the distance from boston to NYC for me only if it's less than 100 degrees Fahrenheit\"}, {'role': 'assistant', 'tool_calls': [{'id': 'de8f0d7b', 'function': {'name': 'getCurrentWeather', 'arguments': '{\"location\":\"Boston\",\"unit\":\"f\"}'}, 'type': 'function'}]}, {'role': 'tool', 'tool_call_id': 'de8f0d7b', 'name': 'getCurrentWeather', 'content': 'temprature is 56 degree'}, {'role': 'assistant', 'tool_calls': [{'id': '361acf3f', 'function': {'name': 'calculate_distance', 'arguments': '{\"origin\":\"Boston\",\"destination\":\"New York City\",\"mode\":\"car\"}'}, 'type': 'function'}]}, {'role': 'tool', 'tool_call_id': '361acf3f', 'name': 'calculate_distance', 'content': 'Distance is 50 miles.'}, {'role': 'assistant', 'tool_calls': [{'id': '7a78eb34', 'function': {'name': 'subtraction', 'arguments': '{\"a\":\"56\",\"b\":\"100\"}'}, 'type': 'function'}]}, {'role': 'tool', 'name': 'subtraction', 'content': '-44.0', 'tool_call_id': '7a78eb34'}]\n", + "Pointing to URL: http://localhost:8019/v1/\n", + "Loop 2\n", + "\n", + "[AI response]:\n", + " The weather in Boston is 56 degrees Fahrenheit. The distance from Boston to New York City is 50 miles. The difference between 56 and 100 is -44.0 degrees Fahrenheit.\n" + ] + } + ], + "source": [ + "user_query4 = \"check the weather in boston, calculate the distance from boston to NYC for me only if it's less than 100 degrees Fahrenheit\"\n", + "msgs = run_completion(get_mistral_rubra_response, user_query4)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Pointing to URL: http://localhost:8019/v1/\n", + "\n", + "[AI calling functions]:\n", + "Tool Call: Function(arguments='{\"location\":\"Boston\",\"unit\":\"f\"}', name='getCurrentWeather')\n", + "\n", + "Observation: {'role': 'tool', 'tool_call_id': 'defe6bc9', 'name': 'getCurrentWeather', 'content': 'temprature is 56 degree'}\n", + "[{'role': 'system', 'content': 'You are a helpful assistant.'}, {'role': 'user', 'content': \"check the weather in boston, calculate the distance from boston to NYC for me only if it's greater than 100 degrees Fahrenheit\"}, {'role': 'assistant', 'tool_calls': [{'id': 'defe6bc9', 'function': {'name': 'getCurrentWeather', 'arguments': '{\"location\":\"Boston\",\"unit\":\"f\"}'}, 'type': 'function'}]}, {'role': 'tool', 'tool_call_id': 'defe6bc9', 'name': 'getCurrentWeather', 'content': 'temprature is 56 degree'}]\n", + "Pointing to URL: http://localhost:8019/v1/\n", + "Loop 0\n", + "\n", + "[AI calling functions]:\n", + "Tool Call: Function(arguments='{\"origin\":\"Boston\",\"destination\":\"New York City\",\"mode\":\"car\"}', name='calculate_distance')\n", + "Observation: {'role': 'tool', 'tool_call_id': '6b7dec34', 'name': 'calculate_distance', 'content': 'Distance is 50 miles.'}\n", + "[{'role': 'system', 'content': 'You are a helpful assistant.'}, {'role': 'user', 'content': \"check the weather in boston, calculate the distance from boston to NYC for me only if it's greater than 100 degrees Fahrenheit\"}, {'role': 'assistant', 'tool_calls': [{'id': 'defe6bc9', 'function': {'name': 'getCurrentWeather', 'arguments': '{\"location\":\"Boston\",\"unit\":\"f\"}'}, 'type': 'function'}]}, {'role': 'tool', 'tool_call_id': 'defe6bc9', 'name': 'getCurrentWeather', 'content': 'temprature is 56 degree'}, {'role': 'assistant', 'tool_calls': [{'id': '6b7dec34', 'function': {'name': 'calculate_distance', 'arguments': '{\"origin\":\"Boston\",\"destination\":\"New York City\",\"mode\":\"car\"}'}, 'type': 'function'}]}, {'role': 'tool', 'tool_call_id': '6b7dec34', 'name': 'calculate_distance', 'content': 'Distance is 50 miles.'}]\n", + "Pointing to URL: http://localhost:8019/v1/\n", + "Loop 1\n", + "\n", + "[AI calling functions]:\n", + "Tool Call: Function(arguments='{\"a\":\"56\",\"b\":\"100\"}', name='subtraction')\n", + "Observation: {'role': 'tool', 'name': 'subtraction', 'content': '-44.0', 'tool_call_id': 'a53ba368'}\n", + "[{'role': 'system', 'content': 'You are a helpful assistant.'}, {'role': 'user', 'content': \"check the weather in boston, calculate the distance from boston to NYC for me only if it's greater than 100 degrees Fahrenheit\"}, {'role': 'assistant', 'tool_calls': [{'id': 'defe6bc9', 'function': {'name': 'getCurrentWeather', 'arguments': '{\"location\":\"Boston\",\"unit\":\"f\"}'}, 'type': 'function'}]}, {'role': 'tool', 'tool_call_id': 'defe6bc9', 'name': 'getCurrentWeather', 'content': 'temprature is 56 degree'}, {'role': 'assistant', 'tool_calls': [{'id': '6b7dec34', 'function': {'name': 'calculate_distance', 'arguments': '{\"origin\":\"Boston\",\"destination\":\"New York City\",\"mode\":\"car\"}'}, 'type': 'function'}]}, {'role': 'tool', 'tool_call_id': '6b7dec34', 'name': 'calculate_distance', 'content': 'Distance is 50 miles.'}, {'role': 'assistant', 'tool_calls': [{'id': 'a53ba368', 'function': {'name': 'subtraction', 'arguments': '{\"a\":\"56\",\"b\":\"100\"}'}, 'type': 'function'}]}, {'role': 'tool', 'name': 'subtraction', 'content': '-44.0', 'tool_call_id': 'a53ba368'}]\n", + "Pointing to URL: http://localhost:8019/v1/\n", + "Loop 2\n", + "\n", + "[AI response]:\n", + " The weather in Boston is 56 degrees Fahrenheit. The distance from Boston to New York City is 50 miles. The difference between 56 and 100 is -44.0 degrees Fahrenheit.\n" + ] + } + ], + "source": [ + "user_query5 = \"check the weather in boston, calculate the distance from boston to NYC for me only if it's greater than 100 degrees Fahrenheit\"\n", + "msgs = run_completion(get_mistral_rubra_response, user_query5)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# dependency" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Pointing to URL: http://localhost:8019/v1/\n", + "\n", + "[AI calling functions]:\n", + "Tool Call: Function(arguments='{\"directory\":\"documents\"}', name='list_files')\n", + "Observation: {'role': 'tool', 'tool_call_id': 'e8b5f43a', 'name': 'list_files', 'content': 'File list:\\nreport.docx\\ntask.txt\\nnotes.txt'}\n", + "[{'role': 'system', 'content': 'You are a helpful assistant.'}, {'role': 'user', 'content': \"check the size of all files in the 'documents' directory.\"}, {'role': 'assistant', 'tool_calls': [{'id': 'e8b5f43a', 'function': {'name': 'list_files', 'arguments': '{\"directory\":\"documents\"}'}, 'type': 'function'}]}, {'role': 'tool', 'tool_call_id': 'e8b5f43a', 'name': 'list_files', 'content': 'File list:\\nreport.docx\\ntask.txt\\nnotes.txt'}]\n", + "Pointing to URL: http://localhost:8019/v1/\n", + "Loop 0\n", + "\n", + "[AI response]:\n", + " The files in the 'documents' directory are:\n", + "- report.docx\n", + "- task.txt\n", + "- notes.txt\n" + ] + } + ], + "source": [ + "user_query6 = \"check the size of all files in the 'documents' directory.\"\n", + "msgs = run_completion(get_mistral_rubra_response, user_query6)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "py310", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +}