Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore(openai,anthropic): Expose OpenAI and Anthropic payload formatters #6872

Merged
merged 2 commits into from
Sep 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions libs/langchain-anthropic/src/chat_models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ import type {
import { isLangChainTool } from "@langchain/core/utils/function_calling";
import { AnthropicToolsOutputParser } from "./output_parsers.js";
import { extractToolCallChunk, handleToolChoice } from "./utils/tools.js";
import { _formatMessagesForAnthropic } from "./utils/message_inputs.js";
import { _convertMessagesToAnthropicPayload } from "./utils/message_inputs.js";
import {
_makeMessageChunkFromAnthropicEvent,
anthropicResponseToChatMessages,
Expand Down Expand Up @@ -782,7 +782,7 @@ export class ChatAnthropicMessages<
runManager?: CallbackManagerForLLMRun
): AsyncGenerator<ChatGenerationChunk> {
const params = this.invocationParams(options);
const formattedMessages = _formatMessagesForAnthropic(messages);
const formattedMessages = _convertMessagesToAnthropicPayload(messages);
const coerceContentToString = !_toolsInParams({
...params,
...formattedMessages,
Expand Down Expand Up @@ -852,7 +852,7 @@ export class ChatAnthropicMessages<
{
...params,
stream: false,
..._formatMessagesForAnthropic(messages),
..._convertMessagesToAnthropicPayload(messages),
},
requestOptions
);
Expand Down
1 change: 1 addition & 0 deletions libs/langchain-anthropic/src/index.ts
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
export * from "./chat_models.js";
export { _convertMessagesToAnthropicPayload } from "./utils/message_inputs.js";
4 changes: 2 additions & 2 deletions libs/langchain-anthropic/src/tests/chat_models.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { AIMessage, HumanMessage, ToolMessage } from "@langchain/core/messages";
import { z } from "zod";
import { OutputParserException } from "@langchain/core/output_parsers";
import { ChatAnthropic } from "../chat_models.js";
import { _formatMessagesForAnthropic } from "../utils/message_inputs.js";
import { _convertMessagesToAnthropicPayload } from "../index.js";

test("withStructuredOutput with output validation", async () => {
const model = new ChatAnthropic({
Expand Down Expand Up @@ -143,7 +143,7 @@ test("Can properly format anthropic messages when given two tool results", async
}),
];

const formattedMessages = _formatMessagesForAnthropic(messageHistory);
const formattedMessages = _convertMessagesToAnthropicPayload(messageHistory);

expect(formattedMessages).toEqual({
messages: [
Expand Down
3 changes: 2 additions & 1 deletion libs/langchain-anthropic/src/utils/message_inputs.ts
Original file line number Diff line number Diff line change
Expand Up @@ -185,10 +185,11 @@ function _formatContent(content: MessageContent) {

/**
* Formats messages as a prompt for the model.
* Used in LangSmith, export is important here.
* @param messages The base messages to format as a prompt.
* @returns The formatted prompt.
*/
export function _formatMessagesForAnthropic(
export function _convertMessagesToAnthropicPayload(
messages: BaseMessage[]
): AnthropicMessageCreateParams {
const mergedMessages = _mergeMessages(messages);
Expand Down
7 changes: 4 additions & 3 deletions libs/langchain-openai/src/chat_models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,8 @@ function _convertDeltaToMessageChunk(
}
}

function convertMessagesToOpenAIParams(messages: BaseMessage[]) {
// Used in LangSmith, export is important here
export function _convertMessagesToOpenAIParams(messages: BaseMessage[]) {
// TODO: Function messages do not support array content, fix cast
return messages.map((message) => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
Expand Down Expand Up @@ -1200,7 +1201,7 @@ export class ChatOpenAI<
return;
}
const messagesMapped: OpenAICompletionParam[] =
convertMessagesToOpenAIParams(messages);
_convertMessagesToOpenAIParams(messages);
const params = {
...this.invocationParams(options, {
streaming: true,
Expand Down Expand Up @@ -1329,7 +1330,7 @@ export class ChatOpenAI<
const tokenUsage: TokenUsage = {};
const params = this.invocationParams(options);
const messagesMapped: OpenAICompletionParam[] =
convertMessagesToOpenAIParams(messages);
_convertMessagesToOpenAIParams(messages);

if (params.stream) {
const stream = this._streamResponseChunks(messages, options, runManager);
Expand Down
Loading