Skip to content

Commit

Permalink
add streaming support for anthropic messages (#61)
Browse files Browse the repository at this point in the history
  • Loading branch information
abubakarsohail authored May 2, 2024
1 parent 6d04352 commit 5d4b189
Show file tree
Hide file tree
Showing 3 changed files with 64 additions and 11 deletions.
13 changes: 6 additions & 7 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "promptlayer",
"license": "MIT",
"version": "0.0.17",
"version": "0.0.18",
"main": "dist/index.js",
"module": "dist/index.esm.js",
"types": "dist/index.d.ts",
Expand All @@ -16,7 +16,7 @@
"release": "npm run build && npm publish"
},
"devDependencies": {
"@anthropic-ai/sdk": "^0.14.1",
"@anthropic-ai/sdk": "^0.20.8",
"@types/node": "^20.8.0",
"openai": "^4.28.4",
"tsup": "^7.2.0",
Expand Down
58 changes: 56 additions & 2 deletions src/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import {
TrackRequest,
TrackScore,
} from "@/types";
import { Message, MessageStreamEvent } from "@anthropic-ai/sdk/resources";
import { ChatCompletion, ChatCompletionChunk } from "openai/resources";

const URL_API_PROMPTLAYER = "https://api.promptlayer.com";
Expand Down Expand Up @@ -486,7 +487,57 @@ const mapChatCompletionChunk = (
return response;
};

const cleaned_result = (results: any[]) => {
const mapMessageStreamEvent = (results: MessageStreamEvent[]): Message => {
let response: Message = {
id: "",
model: "",
content: [],
role: "assistant",
type: "message",
stop_reason: "stop_sequence",
stop_sequence: null,
usage: {
input_tokens: 0,
output_tokens: 0,
},
};
const lastResult = results.at(-1);
if (!lastResult) return response;
let content = "";
for (const result of results) {
switch (result.type) {
case "message_start": {
response = {
...result.message,
};
break;
}
case "content_block_delta": {
if (result.delta.type === "text_delta")
content = `${content}${result.delta.text}`;
}
case "message_delta": {
if ("usage" in result)
response.usage.output_tokens = result.usage.output_tokens;
if ("stop_reason" in result.delta)
response.stop_reason = result.delta.stop_reason;
}
default: {
break;
}
}
}
response.content.push({
type: "text",
text: content,
});
return response;
};

const cleaned_result = (
results: any[],
function_name = "openai.chat.completions.create"
) => {
if ("completion" in results[0]) {
return results.reduce(
(prev, current) => ({
Expand All @@ -497,6 +548,9 @@ const cleaned_result = (results: any[]) => {
);
}

if (function_name === "anthropic.messages.create")
return mapMessageStreamEvent(results);

if ("text" in results[0].choices[0]) {
let response = "";
for (const result of results) {
Expand Down Expand Up @@ -528,7 +582,7 @@ async function* proxyGenerator<Item>(
yield value;
results.push(value);
}
const request_response = cleaned_result(results);
const request_response = cleaned_result(results, body.function_name);
const response = await promptLayerApiRequest({
...body,
request_response,
Expand Down

0 comments on commit 5d4b189

Please sign in to comment.