Skip to content

Commit 46bd515

Browse files
author
Stainless Bot
committed
feat: feat: complete OpenAI tracer implementation, handling streaming and tool calls
1 parent e0140f4 commit 46bd515

File tree

2 files changed

+90
-18
lines changed

2 files changed

+90
-18
lines changed

src/lib/integrations/openAiTracer.ts

+84-12
Original file line numberDiff line numberDiff line change
@@ -9,27 +9,94 @@ export function traceOpenAI(openai: OpenAI): OpenAI {
99
this: typeof openai.chat.completions,
1010
...args: Parameters<typeof createFunction>
1111
): Promise<Stream<OpenAI.Chat.Completions.ChatCompletionChunk> | OpenAI.Chat.Completions.ChatCompletion> {
12-
const [params, options = { stream: false }] = args;
12+
const [params, options] = args;
13+
const stream = params?.stream ?? false;
14+
1315
try {
1416
const startTime = performance.now();
15-
if (options.stream) {
16-
console.log('streaming not implemented yet');
17-
return createFunction.apply(this, args) as unknown as Promise<
18-
Stream<OpenAI.Chat.Completions.ChatCompletionChunk>
19-
>;
17+
18+
// Call the original `create` function
19+
let response = await createFunction.apply(this, args);
20+
21+
if (stream) {
22+
// Handle streaming responses
23+
const chunks: OpenAI.Chat.Completions.ChatCompletionChunk[] = [];
24+
let collectedOutputData: any[] = [];
25+
let firstTokenTime: number | undefined;
26+
let completionTokens: number = 0;
27+
if (isAsyncIterable(response)) {
28+
async function* tracedOutputGenerator(): AsyncGenerator<
29+
OpenAI.Chat.Completions.ChatCompletionChunk,
30+
void,
31+
unknown
32+
> {
33+
for await (const rawChunk of response as AsyncIterable<OpenAI.Chat.Completions.ChatCompletionChunk>) {
34+
if (chunks.length === 0) {
35+
firstTokenTime = performance.now();
36+
}
37+
chunks.push(rawChunk);
38+
const delta = rawChunk.choices[0]?.delta;
39+
if (delta?.content) {
40+
collectedOutputData.push(delta?.content);
41+
} else if (delta?.tool_calls) {
42+
const tool_call = delta.tool_calls[0];
43+
if (tool_call?.function?.name) {
44+
const functionName: string =
45+
'{\n "name": ' + '"' + tool_call.function.name + '"' + '\n "arguments": ';
46+
collectedOutputData.push(functionName);
47+
} else if (tool_call?.function?.arguments) {
48+
collectedOutputData.push(tool_call.function.arguments);
49+
}
50+
}
51+
52+
if (rawChunk.choices[0]?.finish_reason === 'tool_calls') {
53+
collectedOutputData.push('\n}');
54+
}
55+
completionTokens += 1;
56+
yield rawChunk;
57+
}
58+
const endTime = performance.now();
59+
const traceData = {
60+
name: 'OpenAI Chat Completion',
61+
inputs: { prompt: params.messages },
62+
output: collectedOutputData.join(''),
63+
latency: endTime - startTime,
64+
model: chunks[0]?.model as string,
65+
modelParameters: getModelParameters(args),
66+
rawOutput: chunks.map((chunk) => JSON.stringify(chunk, null, 2)).join('\n'),
67+
metadata: { timeToFistToken: firstTokenTime ? firstTokenTime - startTime : null },
68+
provider: 'OpenAI',
69+
completionTokens: completionTokens,
70+
promptTokens: 0,
71+
tokens: completionTokens,
72+
};
73+
addChatCompletionStepToTrace(traceData);
74+
}
75+
return tracedOutputGenerator() as unknown as Stream<OpenAI.Chat.Completions.ChatCompletionChunk>;
76+
}
2077
} else {
21-
const response = (await createFunction.apply(this, args)) as OpenAI.Chat.Completions.ChatCompletion;
78+
// Handle non-streaming responses
79+
response = response as OpenAI.Chat.Completions.ChatCompletion;
2280
const completion = response.choices[0];
2381
const endTime = performance.now();
82+
83+
let output: string = '';
84+
if (completion?.message?.content) {
85+
output = completion.message.content;
86+
} else if (completion?.message.tool_calls) {
87+
const tool_call = completion.message.tool_calls[0];
88+
output = JSON.stringify(tool_call?.function, null, 2);
89+
}
90+
2491
const traceData = {
2592
name: 'OpenAI Chat Completion',
2693
inputs: { prompt: params.messages },
27-
output: completion?.message.content,
94+
output: output,
2895
latency: endTime - startTime,
29-
tokens: response?.usage?.total_tokens ?? null,
30-
promptTokens: response?.usage?.prompt_tokens ?? null,
31-
completionTokens: response?.usage?.completion_tokens ?? null,
32-
model: response?.model,
96+
tokens: response.usage?.total_tokens ?? null,
97+
promptTokens: response.usage?.prompt_tokens ?? null,
98+
completionTokens: response.usage?.completion_tokens ?? null,
99+
model: response.model,
33100
modelParameters: getModelParameters(args),
34101
rawOutput: JSON.stringify(response, null, 2),
35102
metadata: {},
@@ -42,6 +109,8 @@ export function traceOpenAI(openai: OpenAI): OpenAI {
42109
console.error('Failed to trace the create chat completion request with Openlayer', error);
43110
throw error;
44111
}
112+
// Ensure a return statement is present
113+
return undefined as any;
45114
} as typeof createFunction;
46115

47116
return openai;
@@ -63,3 +132,6 @@ function getModelParameters(args: any): Record<string, any> {
63132
top_p: params?.topP ?? 1,
64133
};
65134
}
135+
136+
const isAsyncIterable = (x: any) =>
137+
x != null && typeof x === 'object' && typeof x[Symbol.asyncIterator] === 'function';

src/lib/tracing/tracer.ts

+6-6
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ let currentTrace: Trace | null = null;
1010
const publish = process.env['OPENLAYER_DISABLE_PUBLISH'] != 'true';
1111
let client: Openlayer | null = null;
1212
if (publish) {
13-
console.log('Publishing is enabled');
13+
console.debug('Publishing is enabled');
1414
client = new Openlayer();
1515
}
1616

@@ -45,13 +45,13 @@ function createStep(
4545
const isRootStep = parentStep === null;
4646

4747
if (isRootStep) {
48-
console.log('Starting a new trace...');
49-
console.log(`Adding step ${name} as the root step`);
48+
console.debug('Starting a new trace...');
49+
console.debug(`Adding step ${name} as the root step`);
5050
const currentTrace = new Trace();
5151
setCurrentTrace(currentTrace);
5252
currentTrace.addStep(newStep);
5353
} else {
54-
console.log(`Adding step ${name} as a nested step to ${parentStep!.name}`);
54+
console.debug(`Adding step ${name} as a nested step to ${parentStep!.name}`);
5555
currentTrace = getCurrentTrace()!;
5656
parentStep!.addNestedStep(newStep);
5757
}
@@ -65,7 +65,7 @@ function createStep(
6565
stepStack.pop(); // Remove the current step from the stack
6666

6767
if (isRootStep) {
68-
console.log('Ending the trace...');
68+
console.debug('Ending the trace...');
6969
const traceData = getCurrentTrace();
7070
// Post process trace and get the input variable names
7171
const { traceData: processedTraceData, inputVariableNames } = postProcessTrace(traceData!);
@@ -90,7 +90,7 @@ function createStep(
9090
setCurrentTrace(null);
9191
stepStack.length = 0; // Clear the step stack
9292
} else {
93-
console.log(`Ending step ${name}`);
93+
console.debug(`Ending step ${name}`);
9494
}
9595
};
9696

0 commit comments

Comments
 (0)