Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions packages/mcp-server/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,12 @@ SENTRY_ACCESS_TOKEN=your-token
SENTRY_HOST=sentry.example.com
MCP_SCOPES=org:read,event:read # Override default scopes (replaces defaults)
MCP_ADD_SCOPES=event:write # Add to default scopes (keeps defaults)

# OpenAI configuration for AI-powered search tools
OPENAI_API_KEY=your-openai-key # Required for AI-powered search tools (search_events, search_issues)
OPENAI_MODEL=gpt-5 # OpenAI model to use (default: "gpt-5")
OPENAI_REASONING_EFFORT=low # Reasoning effort for o1 models: "low", "medium", "high", or "" to disable (default: "low")

# No environment variable exists for the OpenAI base URL override; use --openai-base-url instead.
# This restriction prevents unexpected environment overrides that could silently reroute requests to a
# malicious proxy capable of harvesting the OpenAI API key provided at runtime.
Expand Down
3 changes: 2 additions & 1 deletion packages/mcp-server/scripts/generate-otel-namespaces.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import { resolve, dirname } from "node:path";
import { fileURLToPath } from "node:url";
import { parse as parseYaml } from "yaml";
import { z } from "zod";
import { USER_AGENT } from "../src/version.js";

const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
Expand Down Expand Up @@ -198,7 +199,7 @@ async function fetchYamlContent(namespace: string): Promise<string | null> {
`${GITHUB_BASE_URL}/${namespace}/registry.yaml`,
{
headers: {
"User-Agent": "Sentry MCP Server",
"User-Agent": USER_AGENT,
},
},
);
Expand Down
3 changes: 2 additions & 1 deletion packages/mcp-server/src/api-client/client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ import {
} from "./schema";
import { ConfigurationError } from "../errors";
import { createApiError, ApiNotFoundError, ApiValidationError } from "./errors";
import { USER_AGENT } from "../version";
import type {
AutofixRun,
AutofixRunState,
Expand Down Expand Up @@ -212,7 +213,7 @@ export class SentryApiService {

const headers: Record<string, string> = {
"Content-Type": "application/json",
"User-Agent": "Sentry MCP Server",
"User-Agent": USER_AGENT,
};
if (this.accessToken) {
headers.Authorization = `Bearer ${this.accessToken}`;
Expand Down
4 changes: 4 additions & 0 deletions packages/mcp-server/src/cli/parse.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ export function parseArgv(argv: string[]): CliArgs {
"mcp-url": { type: "string" as const },
"sentry-dsn": { type: "string" as const },
"openai-base-url": { type: "string" as const },
"openai-model": { type: "string" as const },
"organization-slug": { type: "string" as const },
"project-slug": { type: "string" as const },
scopes: { type: "string" as const },
Expand Down Expand Up @@ -53,6 +54,7 @@ export function parseArgv(argv: string[]): CliArgs {
mcpUrl: values["mcp-url"] as string | undefined,
sentryDsn: values["sentry-dsn"] as string | undefined,
openaiBaseUrl: values["openai-base-url"] as string | undefined,
openaiModel: values["openai-model"] as string | undefined,
organizationSlug: values["organization-slug"] as string | undefined,
projectSlug: values["project-slug"] as string | undefined,
scopes: values.scopes as string | undefined,
Expand All @@ -74,6 +76,7 @@ export function parseEnv(env: NodeJS.ProcessEnv): EnvArgs {
if (env.MCP_URL) fromEnv.mcpUrl = env.MCP_URL;
if (env.SENTRY_DSN || env.DEFAULT_SENTRY_DSN)
fromEnv.sentryDsn = env.SENTRY_DSN || env.DEFAULT_SENTRY_DSN;
if (env.OPENAI_MODEL) fromEnv.openaiModel = env.OPENAI_MODEL;
if (env.MCP_SCOPES) fromEnv.scopes = env.MCP_SCOPES;
if (env.MCP_ADD_SCOPES) fromEnv.addScopes = env.MCP_ADD_SCOPES;
return fromEnv;
Expand All @@ -89,6 +92,7 @@ export function merge(cli: CliArgs, env: EnvArgs): MergedArgs {
mcpUrl: cli.mcpUrl ?? env.mcpUrl,
sentryDsn: cli.sentryDsn ?? env.sentryDsn,
openaiBaseUrl: cli.openaiBaseUrl,
openaiModel: cli.openaiModel ?? env.openaiModel,
// Scopes precedence: CLI scopes/add-scopes override their env counterparts
scopes: cli.scopes ?? env.scopes,
addScopes: cli.addScopes ?? env.addScopes,
Expand Down
1 change: 1 addition & 0 deletions packages/mcp-server/src/cli/resolve.ts
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@ export function finalize(input: MergedArgs): ResolvedConfig {
mcpUrl: input.mcpUrl,
sentryDsn: input.sentryDsn,
openaiBaseUrl: resolvedOpenAiBaseUrl,
openaiModel: input.openaiModel,
finalScopes,
organizationSlug: input.organizationSlug,
projectSlug: input.projectSlug,
Expand Down
4 changes: 4 additions & 0 deletions packages/mcp-server/src/cli/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ export type CliArgs = {
mcpUrl?: string;
sentryDsn?: string;
openaiBaseUrl?: string;
openaiModel?: string;
scopes?: string;
addScopes?: string;
allScopes?: boolean;
Expand All @@ -24,6 +25,7 @@ export type EnvArgs = {
url?: string; // raw URL if provided (SENTRY_URL)
mcpUrl?: string;
sentryDsn?: string;
openaiModel?: string;
scopes?: string;
addScopes?: string;
};
Expand All @@ -35,6 +37,7 @@ export type MergedArgs = {
mcpUrl?: string;
sentryDsn?: string;
openaiBaseUrl?: string;
openaiModel?: string;
scopes?: string;
addScopes?: string;
allScopes?: boolean;
Expand All @@ -52,6 +55,7 @@ export type ResolvedConfig = {
mcpUrl?: string;
sentryDsn?: string;
openaiBaseUrl?: string;
openaiModel?: string;
finalScopes?: Set<Scope>;
organizationSlug?: string;
projectSlug?: string;
Expand Down
2 changes: 2 additions & 0 deletions packages/mcp-server/src/cli/usage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ Common optional flags:
--host <host> Change Sentry host (self-hosted)
--sentry-dsn <dsn> Override DSN used for telemetry reporting
--openai-base-url <url> Override OpenAI API base URL for embedded agents
--openai-model <model> Override OpenAI model (default: gpt-5, reasoning effort: low)
--agent Agent mode: only expose use_sentry tool (for AI agents)

Session constraints:
Expand All @@ -31,5 +32,6 @@ All scopes: ${all.join(", ")}
Examples:
${packageName} --access-token=TOKEN
${packageName} --access-token=TOKEN --host=sentry.example.com
${packageName} --access-token=TOKEN --openai-model=o1-mini
${packageName} --access-token=TOKEN --openai-base-url=https://proxy.example.com/v1`;
}
9 changes: 7 additions & 2 deletions packages/mcp-server/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import { finalize } from "./cli/resolve";
import { sentryBeforeSend } from "./telem/sentry";
import { ALL_SCOPES } from "./permissions";
import { DEFAULT_SCOPES } from "./constants";
import { configureOpenAIProvider } from "./internal/agents/openai-provider";
import { setOpenAIBaseUrl } from "./internal/agents/openai-provider";
import agentTools from "./tools/agent-tools";

const packageName = "@sentry/mcp-server";
Expand Down Expand Up @@ -71,7 +71,12 @@ if (!process.env.OPENAI_API_KEY) {
console.warn("");
}

configureOpenAIProvider({ baseUrl: cfg.openaiBaseUrl });
// Configure OpenAI settings from CLI flags
// Note: baseUrl can only be set via CLI flag, not env var (security: prevents credential theft)
setOpenAIBaseUrl(cfg.openaiBaseUrl);
if (cfg.openaiModel) {
process.env.OPENAI_MODEL = cfg.openaiModel;
}

Sentry.init({
dsn: cfg.sentryDsn,
Expand Down
103 changes: 103 additions & 0 deletions packages/mcp-server/src/internal/agents/openai-provider.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import { getOpenAIModel, setOpenAIBaseUrl } from "./openai-provider.js";

describe("openai-provider", () => {
const originalEnv = process.env.OPENAI_REASONING_EFFORT;

beforeEach(() => {
setOpenAIBaseUrl(undefined);
});

afterEach(() => {
if (originalEnv === undefined) {
// biome-ignore lint/performance/noDelete: Required to properly unset environment variable
delete process.env.OPENAI_REASONING_EFFORT;
} else {
process.env.OPENAI_REASONING_EFFORT = originalEnv;
}
});

describe("reasoning effort configuration", () => {
it("uses default reasoning effort when env var is not set", () => {
// biome-ignore lint/performance/noDelete: Required to properly unset environment variable
delete process.env.OPENAI_REASONING_EFFORT;

const model = getOpenAIModel();

// The model object should be created with default reasoning effort
expect(model).toBeDefined();
expect(model.modelId).toBe("gpt-5");
});

it("disables reasoning effort when env var is empty string", () => {
process.env.OPENAI_REASONING_EFFORT = "";

const model = getOpenAIModel();

// The model object should be created without reasoning effort
expect(model).toBeDefined();
expect(model.modelId).toBe("gpt-5");
});

it("uses specified reasoning effort when env var is set", () => {
process.env.OPENAI_REASONING_EFFORT = "high";

const model = getOpenAIModel();

// The model object should be created with high reasoning effort
expect(model).toBeDefined();
expect(model.modelId).toBe("gpt-5");
});

it("throws error for invalid reasoning effort value", () => {
process.env.OPENAI_REASONING_EFFORT = "invalid";

expect(() => getOpenAIModel()).toThrow(
'Invalid OPENAI_REASONING_EFFORT value: "invalid". Must be one of: "low", "medium", "high", or "" (empty string to disable). Default is "low".',
);
});
});

describe("base URL configuration", () => {
it("uses default base URL when not configured", () => {
const model = getOpenAIModel();

expect(model).toBeDefined();
expect(model.modelId).toBe("gpt-5");
});

it("uses configured base URL", () => {
setOpenAIBaseUrl("https://custom-openai.example.com");

const model = getOpenAIModel();

expect(model).toBeDefined();
expect(model.modelId).toBe("gpt-5");
});
});

describe("model override", () => {
it("uses default model when not specified", () => {
const model = getOpenAIModel();

expect(model.modelId).toBe("gpt-5");
});

it("uses specified model when provided", () => {
const model = getOpenAIModel("gpt-4");

expect(model.modelId).toBe("gpt-4");
});

it("uses OPENAI_MODEL env var when set", () => {
process.env.OPENAI_MODEL = "gpt-4o";

const model = getOpenAIModel();

expect(model.modelId).toBe("gpt-4o");

// biome-ignore lint/performance/noDelete: Required to properly unset environment variable
delete process.env.OPENAI_MODEL;
});
});
});
96 changes: 55 additions & 41 deletions packages/mcp-server/src/internal/agents/openai-provider.ts
Original file line number Diff line number Diff line change
@@ -1,54 +1,68 @@
import { createOpenAI } from "@ai-sdk/openai";
import type { LanguageModelV1 } from "ai";
import { USER_AGENT } from "../../version";

// Create a default factory with User-Agent header
const defaultFactory = createOpenAI({
headers: {
"User-Agent": "Sentry MCP Server",
},
});
// Default configuration constants
const DEFAULT_OPENAI_MODEL = "gpt-5";
const VALID_REASONING_EFFORTS = ["low", "medium", "high"] as const;
const DEFAULT_REASONING_EFFORT: (typeof VALID_REASONING_EFFORTS)[number] =
"low";

let customFactory: ReturnType<typeof createOpenAI> | null = null;
let defaultModel = "gpt-5";
type ReasoningEffort = (typeof VALID_REASONING_EFFORTS)[number];

// Module-level state for baseURL (set only via explicit configuration, not env vars)
let configuredBaseUrl: string | undefined;

/**
* Configure the OpenAI provider factory.
*
* When a base URL is provided, the factory will use that endpoint for all
* subsequent model requests. Passing undefined resets to the default
* configuration bundled with the SDK.
*
* When a default model is provided, it will be used as the default for all
* subsequent getOpenAIModel() calls. Passing undefined resets to "gpt-5".
* Configure the OpenAI base URL (CLI flag only, not environment variable).
* This must be called explicitly - it cannot be set via environment variables for security.
*/
export function configureOpenAIProvider({
baseUrl,
defaultModel: model,
}: {
baseUrl?: string;
defaultModel?: string;
}): void {
if (baseUrl) {
customFactory = createOpenAI({
baseURL: baseUrl,
headers: {
"User-Agent": "Sentry MCP Server",
},
});
} else {
customFactory = null;
}

if (model !== undefined) {
defaultModel = model;
}
export function setOpenAIBaseUrl(baseUrl: string | undefined): void {
configuredBaseUrl = baseUrl;
}

/**
* Retrieve a configured OpenAI language model.
* If no model is specified, uses the configured default model (gpt-5).
* Retrieve an OpenAI language model configured from environment variables and explicit config.
*
* Configuration:
* - OPENAI_MODEL: Model to use (default: "gpt-5") - env var OK
* - OPENAI_REASONING_EFFORT: Reasoning effort for o1 models: "low", "medium", "high", or "" to disable (default: "low") - env var OK
* - Base URL: Must be set via setOpenAIBaseUrl() - NOT from env vars (security risk)
*/
export function getOpenAIModel(model?: string): LanguageModelV1 {
const factory = customFactory ?? defaultFactory;
return factory(model ?? defaultModel);
const defaultModel = process.env.OPENAI_MODEL || DEFAULT_OPENAI_MODEL;

// Handle reasoning effort: empty string explicitly disables it, undefined uses default
const envReasoningEffort = process.env.OPENAI_REASONING_EFFORT;
let reasoningEffort: ReasoningEffort | undefined;

if (envReasoningEffort === "") {
// Empty string explicitly disables reasoning effort
reasoningEffort = undefined;
} else if (envReasoningEffort === undefined) {
// Not set - use default
reasoningEffort = DEFAULT_REASONING_EFFORT;
} else if (
VALID_REASONING_EFFORTS.includes(envReasoningEffort as ReasoningEffort)
) {
// Valid value
reasoningEffort = envReasoningEffort as ReasoningEffort;
} else {
// Invalid value - provide helpful error with all valid options
const validValues = VALID_REASONING_EFFORTS.map((v) => `"${v}"`).join(", ");
throw new Error(
`Invalid OPENAI_REASONING_EFFORT value: "${envReasoningEffort}". Must be one of: ${validValues}, or "" (empty string to disable). Default is "${DEFAULT_REASONING_EFFORT}".`,
);
}

const factory = createOpenAI({
...(configuredBaseUrl && { baseURL: configuredBaseUrl }),
headers: {
"User-Agent": USER_AGENT,
},
});

return factory(model ?? defaultModel, {
...(reasoningEffort && { reasoningEffort }),
});
}
Loading