Skip to content

Commit

Permalink
fix(groq): Feature/groq response format improvements (#6754)
Browse files Browse the repository at this point in the history
Co-authored-by: jacoblee93 <jacoblee93@gmail.com>
  • Loading branch information
allohamora and jacoblee93 authored Sep 18, 2024
1 parent 3bb7bce commit ccd0b4b
Show file tree
Hide file tree
Showing 2 changed files with 52 additions and 7 deletions.
56 changes: 50 additions & 6 deletions docs/core_docs/docs/integrations/chat/groq.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -145,11 +145,11 @@
],
"source": [
"const aiMsg = await llm.invoke([\n",
" [\n",
" \"system\",\n",
" \"You are a helpful assistant that translates English to French. Translate the user sentence.\",\n",
" ],\n",
" [\"human\", \"I love programming.\"],\n",
" {\n",
" role: \"system\",\n",
" content: \"You are a helpful assistant that translates English to French. Translate the user sentence.\",\n",
" },\n",
" { role: \"user\", content: \"I love programming.\" },\n",
"])\n",
"aiMsg"
]
Expand All @@ -174,6 +174,50 @@
"console.log(aiMsg.content)"
]
},
{
"cell_type": "markdown",
"id": "ce0414fe",
"metadata": {},
"source": [
"## Json invocation"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "3f0a7a2a",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"{\n",
" aiInvokeMsgContent: '{\\n\"result\": 6\\n}',\n",
" aiBindMsg: '{\\n\"result\": 6\\n}'\n",
"}\n"
]
}
],
"source": [
"const messages = [\n",
" {\n",
" role: \"system\",\n",
" content: \"You are a math tutor that handles math exercises and makes output in json in format { result: number }.\",\n",
" },\n",
" { role: \"user\", content: \"2 + 2 * 2\" },\n",
"];\n",
"\n",
"const aiInvokeMsg = await llm.invoke(messages, { response_format: { type: \"json_object\" } });\n",
"\n",
"// if you want not to pass response_format in every invoke, you can bind it to the instance\n",
"const llmWithResponseFormat = llm.bind({ response_format: { type: \"json_object\" } });\n",
"const aiBindMsg = await llmWithResponseFormat.invoke(messages);\n",
"\n",
"// they are the same\n",
"console.log({ aiInvokeMsgContent: aiInvokeMsg.content, aiBindMsg: aiBindMsg.content });"
]
},
{
"cell_type": "markdown",
"id": "18e2bfc0-7e78-4528-a73f-499ac150dca8",
Expand All @@ -186,7 +230,7 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": 5,
"id": "e197d1d7-a070-4c96-9f8a-a0e86d046e0b",
"metadata": {},
"outputs": [
Expand Down
3 changes: 2 additions & 1 deletion libs/langchain-groq/src/chat_models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ import {
ChatCompletionCreateParams,
ChatCompletionCreateParamsNonStreaming,
ChatCompletionCreateParamsStreaming,
CompletionCreateParams,
} from "groq-sdk/resources/chat/completions";
import {
Runnable,
Expand Down Expand Up @@ -73,7 +74,7 @@ export interface ChatGroqCallOptions extends BaseChatModelCallOptions {
headers?: Record<string, string>;
tools?: ChatGroqToolType[];
tool_choice?: OpenAIClient.ChatCompletionToolChoiceOption | "any" | string;
response_format?: { type: "json_object" };
response_format?: CompletionCreateParams.ResponseFormat;
}

export interface ChatGroqInput extends BaseChatModelParams {
Expand Down

0 comments on commit ccd0b4b

Please sign in to comment.