-
Notifications
You must be signed in to change notification settings - Fork 2.2k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add payload formatting utils to Anthropic and OpenAI
- Loading branch information
1 parent
584ed7e
commit a85f662
Showing
7 changed files
with
148 additions
and
2 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,2 +1,2 @@ | ||
export * from "./chat_models.js"; | ||
export { _convertMessagesToAnthropicPayload } from "./utils/message_inputs.js"; | ||
export { convertPromptToAnthropic } from "./utils/prompts.js"; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,25 @@ | ||
import Anthropic from "@anthropic-ai/sdk"; | ||
import { pull } from "langchain/hub"; | ||
|
||
import { convertPromptToAnthropic } from "../utils/prompts.js"; | ||
|
||
test("basic traceable implementation", async () => { | ||
const prompt = await pull("jacob/joke-generator"); | ||
const formattedPrompt = await prompt.invoke({ | ||
topic: "cats", | ||
}); | ||
|
||
const { system, messages } = convertPromptToAnthropic(formattedPrompt); | ||
|
||
const anthropicClient = new Anthropic(); | ||
|
||
const anthropicResponse = await anthropicClient.messages.create({ | ||
model: "claude-3-haiku-20240307", | ||
system, | ||
messages: messages, | ||
max_tokens: 1024, | ||
stream: false, | ||
}); | ||
|
||
expect(anthropicResponse.content).toBeDefined(); | ||
}); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,51 @@ | ||
import type { BasePromptValue } from "@langchain/core/prompt_values"; | ||
import Anthropic from "@anthropic-ai/sdk"; | ||
|
||
import { _convertMessagesToAnthropicPayload } from "./message_inputs.js"; | ||
|
||
/** | ||
* Convert a formatted LangChain prompt (e.g. pulled from the hub) into | ||
* a format expected by Anthropic's JS SDK. | ||
* | ||
* Requires the "@langchain/anthropic" package to be installed in addition | ||
* to the Anthropic SDK. | ||
* | ||
* @example | ||
* ```ts | ||
* import { convertPromptToAnthropic } from "langsmith/utils/hub/anthropic"; | ||
* import { pull } from "langchain/hub"; | ||
* | ||
* import Anthropic from '@anthropic-ai/sdk'; | ||
* | ||
* const prompt = await pull("jacob/joke-generator"); | ||
* const formattedPrompt = await prompt.invoke({ | ||
* topic: "cats", | ||
* }); | ||
* | ||
* const { system, messages } = convertPromptToAnthropic(formattedPrompt); | ||
* | ||
* const anthropicClient = new Anthropic({ | ||
* apiKey: 'your_api_key', | ||
* }); | ||
* | ||
* const anthropicResponse = await anthropicClient.messages.create({ | ||
* model: "claude-3-5-sonnet-20240620", | ||
* max_tokens: 1024, | ||
* stream: false, | ||
* system, | ||
* messages, | ||
* }); | ||
* ``` | ||
* @param formattedPrompt | ||
* @returns A partial Anthropic payload. | ||
*/ | ||
export function convertPromptToAnthropic( | ||
formattedPrompt: BasePromptValue | ||
): Anthropic.Messages.MessageCreateParams { | ||
const messages = formattedPrompt.toChatMessages(); | ||
const anthropicBody = _convertMessagesToAnthropicPayload(messages); | ||
if (anthropicBody.messages === undefined) { | ||
anthropicBody.messages = []; | ||
} | ||
return anthropicBody; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,22 @@ | ||
import OpenAI from "openai"; | ||
import { pull } from "langchain/hub"; | ||
|
||
import { convertPromptToOpenAI } from "../utils/prompts.js"; | ||
|
||
test("basic traceable implementation", async () => { | ||
const prompt = await pull("jacob/joke-generator"); | ||
const formattedPrompt = await prompt.invoke({ | ||
topic: "cats", | ||
}); | ||
|
||
const { messages } = convertPromptToOpenAI(formattedPrompt); | ||
|
||
const openAIClient = new OpenAI(); | ||
|
||
const openAIResponse = await openAIClient.chat.completions.create({ | ||
model: "gpt-4o-mini", | ||
messages, | ||
}); | ||
|
||
expect(openAIResponse.choices.length).toBeGreaterThan(0); | ||
}); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,47 @@ | ||
/* eslint-disable import/no-extraneous-dependencies */ | ||
import type { BasePromptValue } from "@langchain/core/prompt_values"; | ||
import type { OpenAI } from "openai"; | ||
|
||
import { _convertMessagesToOpenAIParams } from "../chat_models.js"; | ||
|
||
/** | ||
* Convert a formatted LangChain prompt (e.g. pulled from the hub) into | ||
* a format expected by OpenAI's JS SDK. | ||
* | ||
* Requires the "@langchain/openai" package to be installed in addition | ||
* to the OpenAI SDK. | ||
* | ||
* @example | ||
* ```ts | ||
* import { convertPromptToOpenAI } from "langsmith/utils/hub/openai"; | ||
* import { pull } from "langchain/hub"; | ||
* | ||
* import OpenAI from 'openai'; | ||
* | ||
* const prompt = await pull("jacob/joke-generator"); | ||
* const formattedPrompt = await prompt.invoke({ | ||
* topic: "cats", | ||
* }); | ||
* | ||
* const { messages } = convertPromptToOpenAI(formattedPrompt); | ||
* | ||
* const openAIClient = new OpenAI(); | ||
* | ||
* const openaiResponse = await openAIClient.chat.completions.create({ | ||
* model: "gpt-4o", | ||
* messages, | ||
* }); | ||
* ``` | ||
* @param formattedPrompt | ||
* @returns A partial OpenAI payload. | ||
*/ | ||
export function convertPromptToOpenAI(formattedPrompt: BasePromptValue): { | ||
messages: OpenAI.Chat.ChatCompletionMessageParam[]; | ||
} { | ||
const messages = formattedPrompt.toChatMessages(); | ||
return { | ||
messages: _convertMessagesToOpenAIParams( | ||
messages | ||
) as OpenAI.Chat.ChatCompletionMessageParam[], | ||
}; | ||
} |