Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
47 commits
Select commit Hold shift + click to select a range
e7bd8d9
Initial APM-only openai-java instrumentation with a unit test.
ygree Oct 24, 2025
6feb61e
Start llmobs system in tests and create llmobs span.
ygree Oct 25, 2025
d541adb
streamed request completion test
ygree Oct 29, 2025
c209bd0
README
ygree Oct 29, 2025
a82856d
Instrument sync streamed completion
ygree Nov 5, 2025
3d3def9
Mock Streamed Completion in Tests
ygree Nov 6, 2025
3542b9e
Add failing test for Async Completion
ygree Nov 6, 2025
c12cd0a
Async Single Completion. Simplify tests. Remove println. Cleanup.
ygree Nov 6, 2025
2af4e5f
Extract DDHttpResponseFor to intercept when the response is parsed in…
ygree Nov 6, 2025
cec33af
Instrument and test Async Stream Completion
ygree Nov 6, 2025
b2b4141
More tests "streamed request completion test with withRawResponse" an…
ygree Nov 6, 2025
b1b6db2
Fix muzzle check
ygree Nov 6, 2025
baa687d
Add openai APM tags, assert tags, todo tags
ygree Nov 7, 2025
b9ad963
Wrap HttpResponseFor instead of forcing parsing. Add TODOs
ygree Nov 8, 2025
5bf16f4
Set response model tag
ygree Nov 10, 2025
be20fa1
Set "openai.organization.name"
ygree Nov 10, 2025
dd230b7
Set ratelimit metrics
ygree Nov 10, 2025
c3f94b0
api_base
ygree Nov 10, 2025
ddf0e09
"openai.request.method" & "openai.request.endpoint"
ygree Nov 11, 2025
fb9fd04
createChatCompletion instrumentation
ygree Nov 11, 2025
a4358a8
Reorder tests single then stream
ygree Nov 11, 2025
99dedfe
Async Single Chat Completion
ygree Nov 11, 2025
d66a5ee
Async Streamed Chat Completion
ygree Nov 11, 2025
f8af3ae
Rename assertChatCompletionTrace
ygree Nov 11, 2025
846e945
Instrument Embeddings
ygree Nov 11, 2025
b14bc4b
ResponseService WIP
ygree Nov 12, 2025
e8daf08
ResponseService synch
ygree Nov 12, 2025
c8a9e6e
ResponseServiceAsyncInstrumentation
ygree Nov 12, 2025
9754cd8
Setup httpClient for tests WIP
ygree Nov 12, 2025
aeaae2a
Intercept Http req/resp with TestOpenAiHttpClient
ygree Nov 12, 2025
7e50d1d
Implement req/resp recorder and mock backend for tests
ygree Nov 13, 2025
8e85447
Add lockfile
ygree Nov 13, 2025
ea2f465
Minor changes to the records format
ygree Nov 13, 2025
1f3aa6c
OpenAiHttpClient for tests. Record only if the record doesn't exist. …
ygree Nov 13, 2025
0c28997
Rename TestOpenAiHttpClient
ygree Nov 13, 2025
6a41720
Do not dump a record if already exists
ygree Nov 13, 2025
6050515
Fix format
ygree Nov 14, 2025
c8a1e94
Fix linter errors
ygree Nov 14, 2025
20c88d7
Fix unused imports
ygree Nov 14, 2025
001ce0e
Fix format
ygree Nov 14, 2025
96f4353
Fix format
ygree Nov 14, 2025
46c2d3a
Fix format
ygree Nov 14, 2025
29ae26c
Merge branch 'master' into ygree/openai-java
ygree Nov 14, 2025
8de5108
Fix format
ygree Nov 14, 2025
fb74ed2
Remove unexisting helper class that failed the test
ygree Nov 14, 2025
9a2bc61
Extract response model.
ygree Nov 15, 2025
637f8e5
Extract response model.
ygree Nov 15, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
apply from: "$rootDir/gradle/java.gradle"
apply from: "$rootDir/gradle/version.gradle"

dependencies {
api project(':dd-java-agent:agent-llmobs')
api project(':dd-java-agent:instrumentation-testing')
}

Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
package datadog.trace.llmobs

import datadog.communication.ddagent.SharedCommunicationObjects
import datadog.trace.agent.test.InstrumentationSpecification
import datadog.trace.api.Config
import datadog.trace.api.config.LlmObsConfig

class LlmObsSpecification extends InstrumentationSpecification {

void setupSpec() {
def sco = new SharedCommunicationObjects()
def config = Config.get()
sco.createRemaining(config)
// assert sco.configurationPoller(config) == null
// assert sco.monitoring instanceof Monitoring.DisabledMonitoring

LLMObsSystem.start(null, sco)
}

@Override
void configurePreAgent() {
super.configurePreAgent()

injectSysConfig(LlmObsConfig.LLMOBS_ENABLED, "true") // TODO maybe extract to an override method similar to DSM/DBM (see the super impl)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,14 @@ public static ReferenceMatcher loadStaticMuzzleReferences(
}
}

/** @return Class names of helpers to inject into the user's classloader */
/**
* @return Class names of helpers to inject into the user's classloader.
* <blockquote>
* <p><b>NOTE:</b> The order matters. If the muzzle check fails with a NoClassDefFoundError
* (as seen in build/reports/muzzle-*.txt), it may be because some helper classes depend on
* each other. In this case, the order must be adjusted accordingly.
* </blockquote>
*/
public String[] helperClassNames() {
return NO_HELPERS;
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
apply from: "$rootDir/gradle/java.gradle"
apply plugin: 'idea'

muzzle {
pass {
group = "com.openai"
module = "openai-java"
versions = "[1.0.0,)"
// assertInverse = true // TODO exclude <1.0.0, now it passes the muzzle check for >=0.34.0 but the tests will fail
}
}

addTestSuiteForDir('latestDepTest', 'test')

dependencies {
compileOnly group: 'com.openai', name: 'openai-java', version: '1.0.0'

testImplementation group: 'com.openai', name: 'openai-java', version: '1.0.0'
latestDepTestImplementation group: 'com.openai', name: 'openai-java', version: '+'

testImplementation project(':dd-java-agent:instrumentation:okhttp:okhttp-3.0')

testImplementation project(':dd-java-agent:agent-llmobs:llmobs-test-fixtures')
}

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
package datadog.trace.instrumentation.openai_java;

import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.named;
import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activateSpan;
import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan;
import static datadog.trace.instrumentation.openai_java.OpenAiDecorator.DECORATE;
import static net.bytebuddy.matcher.ElementMatchers.isMethod;
import static net.bytebuddy.matcher.ElementMatchers.returns;
import static net.bytebuddy.matcher.ElementMatchers.takesArgument;

import com.openai.core.ClientOptions;
import com.openai.core.http.HttpResponseFor;
import com.openai.core.http.StreamResponse;
import com.openai.models.chat.completions.ChatCompletion;
import com.openai.models.chat.completions.ChatCompletionChunk;
import com.openai.models.chat.completions.ChatCompletionCreateParams;
import datadog.trace.agent.tooling.Instrumenter;
import datadog.trace.bootstrap.instrumentation.api.AgentScope;
import datadog.trace.bootstrap.instrumentation.api.AgentSpan;
import java.util.concurrent.CompletableFuture;
import net.bytebuddy.asm.Advice;

public class ChatCompletionServiceAsyncInstrumentation
implements Instrumenter.ForSingleType, Instrumenter.HasMethodAdvice {
@Override
public String instrumentedType() {
return "com.openai.services.async.chat.ChatCompletionServiceAsyncImpl$WithRawResponseImpl";
}

@Override
public void methodAdvice(MethodTransformer transformer) {
transformer.applyAdvice(
isMethod()
.and(named("create"))
.and(
takesArgument(
0, named("com.openai.models.chat.completions.ChatCompletionCreateParams")))
.and(returns(named(CompletableFuture.class.getName()))),
getClass().getName() + "$CreateAdvice");

transformer.applyAdvice(
isMethod()
.and(named("createStreaming"))
.and(
takesArgument(
0, named("com.openai.models.chat.completions.ChatCompletionCreateParams")))
.and(returns(named(CompletableFuture.class.getName()))),
getClass().getName() + "$CreateStreamingAdvice");
}

public static class CreateAdvice {
@Advice.OnMethodEnter(suppress = Throwable.class)
public static AgentScope enter(
@Advice.Argument(0) final ChatCompletionCreateParams params,
@Advice.FieldValue("clientOptions") ClientOptions clientOptions) {
AgentSpan span = startSpan(OpenAiDecorator.INSTRUMENTATION_NAME, OpenAiDecorator.SPAN_NAME);
DECORATE.afterStart(span);
DECORATE.decorateWithClientOptions(span, clientOptions);
DECORATE.decorateChatCompletion(span, params);
return activateSpan(span);
}

@Advice.OnMethodExit(onThrowable = Throwable.class, suppress = Throwable.class)
public static void exit(
@Advice.Enter final AgentScope scope,
@Advice.Return(readOnly = false) CompletableFuture<HttpResponseFor<ChatCompletion>> future,
@Advice.Thrown final Throwable err) {
final AgentSpan span = scope.span();
try {
if (err != null) {
DECORATE.onError(span, err);
}
if (future != null) {
future =
ResponseWrappers.wrapFutureResponse(
future, span, DECORATE::decorateWithChatCompletion);
} else {
span.finish();
}
} finally {
scope.close();
}
}
}

public static class CreateStreamingAdvice {
@Advice.OnMethodEnter(suppress = Throwable.class)
public static AgentScope enter(
@Advice.Argument(0) final ChatCompletionCreateParams params,
@Advice.FieldValue("clientOptions") ClientOptions clientOptions) {
AgentSpan span = startSpan(OpenAiDecorator.INSTRUMENTATION_NAME, OpenAiDecorator.SPAN_NAME);
DECORATE.afterStart(span);
DECORATE.decorateWithClientOptions(span, clientOptions);
DECORATE.decorateChatCompletion(span, params);
return activateSpan(span);
}

@Advice.OnMethodExit(onThrowable = Throwable.class, suppress = Throwable.class)
public static void exit(
@Advice.Enter final AgentScope scope,
@Advice.Return(readOnly = false)
CompletableFuture<HttpResponseFor<StreamResponse<ChatCompletionChunk>>> future,
@Advice.Thrown final Throwable err) {
final AgentSpan span = scope.span();
try {
if (err != null) {
DECORATE.onError(span, err);
}
if (future != null) {
future =
ResponseWrappers.wrapFutureStreamResponse(
future, span, DECORATE::decorateWithChatCompletionChunks);
} else {
span.finish();
}
DECORATE.beforeFinish(span);
} finally {
scope.close();
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
package datadog.trace.instrumentation.openai_java;

import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.named;
import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activateSpan;
import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan;
import static datadog.trace.instrumentation.openai_java.OpenAiDecorator.DECORATE;
import static net.bytebuddy.matcher.ElementMatchers.isMethod;
import static net.bytebuddy.matcher.ElementMatchers.returns;
import static net.bytebuddy.matcher.ElementMatchers.takesArgument;

import com.openai.core.ClientOptions;
import com.openai.core.http.HttpResponseFor;
import com.openai.core.http.StreamResponse;
import com.openai.models.chat.completions.ChatCompletion;
import com.openai.models.chat.completions.ChatCompletionChunk;
import com.openai.models.chat.completions.ChatCompletionCreateParams;
import datadog.trace.agent.tooling.Instrumenter;
import datadog.trace.bootstrap.instrumentation.api.AgentScope;
import datadog.trace.bootstrap.instrumentation.api.AgentSpan;
import net.bytebuddy.asm.Advice;

public class ChatCompletionServiceInstrumentation
implements Instrumenter.ForSingleType, Instrumenter.HasMethodAdvice {
@Override
public String instrumentedType() {
return "com.openai.services.blocking.chat.ChatCompletionServiceImpl$WithRawResponseImpl";
}

@Override
public void methodAdvice(MethodTransformer transformer) {
transformer.applyAdvice(
isMethod()
.and(named("create"))
.and(
takesArgument(
0, named("com.openai.models.chat.completions.ChatCompletionCreateParams")))
.and(returns(named("com.openai.core.http.HttpResponseFor"))),
getClass().getName() + "$CreateAdvice");

transformer.applyAdvice(
isMethod()
.and(named("createStreaming"))
.and(
takesArgument(
0, named("com.openai.models.chat.completions.ChatCompletionCreateParams")))
.and(returns(named("com.openai.core.http.HttpResponseFor"))),
getClass().getName() + "$CreateStreamingAdvice");
}

public static class CreateAdvice {
@Advice.OnMethodEnter(suppress = Throwable.class)
public static AgentScope enter(
@Advice.Argument(0) final ChatCompletionCreateParams params,
@Advice.FieldValue("clientOptions") ClientOptions clientOptions) {
AgentSpan span = startSpan(OpenAiDecorator.INSTRUMENTATION_NAME, OpenAiDecorator.SPAN_NAME);
DECORATE.afterStart(span);
DECORATE.decorateWithClientOptions(span, clientOptions);
DECORATE.decorateChatCompletion(span, params);
return activateSpan(span);
}

@Advice.OnMethodExit(onThrowable = Throwable.class, suppress = Throwable.class)
public static void exit(
@Advice.Enter final AgentScope scope,
@Advice.Return(readOnly = false) HttpResponseFor<ChatCompletion> response,
@Advice.Thrown final Throwable err) {
final AgentSpan span = scope.span();
try {
if (err != null) {
DECORATE.onError(span, err);
}
if (response != null) {
response =
ResponseWrappers.wrapResponse(
response, span, OpenAiDecorator.DECORATE::decorateWithChatCompletion);
}
DECORATE.beforeFinish(span);
} finally {
scope.close();
span.finish();
}
}
}

public static class CreateStreamingAdvice {

@Advice.OnMethodEnter(suppress = Throwable.class)
public static AgentScope enter(
@Advice.Argument(0) final ChatCompletionCreateParams params,
@Advice.FieldValue("clientOptions") ClientOptions clientOptions) {
AgentSpan span = startSpan(OpenAiDecorator.INSTRUMENTATION_NAME, OpenAiDecorator.SPAN_NAME);
DECORATE.afterStart(span);
DECORATE.decorateWithClientOptions(span, clientOptions);
DECORATE.decorateChatCompletion(span, params);
return activateSpan(span);
}

@Advice.OnMethodExit(onThrowable = Throwable.class, suppress = Throwable.class)
public static void exit(
@Advice.Enter final AgentScope scope,
@Advice.Return(readOnly = false)
HttpResponseFor<StreamResponse<ChatCompletionChunk>> response,
@Advice.Thrown final Throwable err) {
final AgentSpan span = scope.span();
try {
if (err != null) {
DECORATE.onError(span, err);
}
if (response != null) {
response =
ResponseWrappers.wrapStreamResponse(
response, span, DECORATE::decorateWithChatCompletionChunks);
} else {
span.finish();
}
DECORATE.beforeFinish(span);
} finally {
scope.close();
}
}
}
}
Loading