Skip to content

Commit c945265

Browse files
Add configurable default for sampling MaxTokens (#923)
* Initial plan * Add DefaultSamplingMaxTokens property to McpServerOptions Co-authored-by: MackinnonBuck <10456961+MackinnonBuck@users.noreply.github.com> * Add test to verify DefaultSamplingMaxTokens is respected Co-authored-by: MackinnonBuck <10456961+MackinnonBuck@users.noreply.github.com> * Merge test into existing SampleAsync_Messages_Forwards_To_McpServer_SendRequestAsync Co-authored-by: MackinnonBuck <10456961+MackinnonBuck@users.noreply.github.com> * Update src/ModelContextProtocol.Core/Server/McpServerOptions.cs --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: MackinnonBuck <10456961+MackinnonBuck@users.noreply.github.com> Co-authored-by: Mackinnon Buck <mackinnon.buck@gmail.com>
1 parent f4cbc98 commit c945265

File tree

3 files changed

+31
-1
lines changed

3 files changed

+31
-1
lines changed

src/ModelContextProtocol.Core/Server/McpServer.Methods.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -153,7 +153,7 @@ public async Task<ChatResponse> SampleAsync(
153153
var result = await SampleAsync(new()
154154
{
155155
Messages = samplingMessages,
156-
MaxTokens = options?.MaxOutputTokens ?? int.MaxValue,
156+
MaxTokens = options?.MaxOutputTokens ?? ServerOptions.DefaultSamplingMaxTokens,
157157
StopSequences = options?.StopSequences?.ToArray(),
158158
SystemPrompt = systemPrompt?.ToString(),
159159
Temperature = options?.Temperature,

src/ModelContextProtocol.Core/Server/McpServerOptions.cs

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -152,4 +152,17 @@ public McpServerHandlers Handlers
152152
/// </para>
153153
/// </remarks>
154154
public McpServerPrimitiveCollection<McpServerPrompt>? PromptCollection { get; set; }
155+
156+
/// <summary>
157+
/// Gets or sets the default maximum number of tokens to use for sampling requests when not explicitly specified.
158+
/// </summary>
159+
/// <remarks>
160+
/// <para>
161+
/// This value is used when <see cref="Microsoft.Extensions.AI.ChatOptions.MaxOutputTokens"/> is not set in the request options.
162+
/// </para>
163+
/// <para>
164+
/// The default value is 1000 tokens.
165+
/// </para>
166+
/// </remarks>
167+
public int DefaultSamplingMaxTokens { get; set; } = 1000;
155168
}

tests/ModelContextProtocol.Tests/Server/McpServerExtensionsTests.cs

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -125,12 +125,25 @@ public async Task SampleAsync_Messages_Forwards_To_McpServer_SendRequestAsync()
125125
StopReason = "endTurn",
126126
};
127127

128+
const int customDefaultMaxTokens = 500;
129+
128130
mockServer
129131
.Setup(s => s.ClientCapabilities)
130132
.Returns(new ClientCapabilities() { Sampling = new() });
131133

134+
mockServer
135+
.Setup(s => s.ServerOptions)
136+
.Returns(new McpServerOptions { DefaultSamplingMaxTokens = customDefaultMaxTokens });
137+
138+
CreateMessageRequestParams? capturedRequest = null;
132139
mockServer
133140
.Setup(s => s.SendRequestAsync(It.IsAny<JsonRpcRequest>(), It.IsAny<CancellationToken>()))
141+
.Callback<JsonRpcRequest, CancellationToken>((request, _) =>
142+
{
143+
capturedRequest = JsonSerializer.Deserialize<CreateMessageRequestParams>(
144+
request.Params ?? throw new InvalidOperationException(),
145+
McpJsonUtilities.DefaultOptions);
146+
})
134147
.ReturnsAsync(new JsonRpcResponse
135148
{
136149
Id = default,
@@ -146,6 +159,10 @@ public async Task SampleAsync_Messages_Forwards_To_McpServer_SendRequestAsync()
146159
Assert.Equal(ChatRole.Assistant, last.Role);
147160
Assert.Equal("resp", last.Text);
148161
mockServer.Verify(s => s.SendRequestAsync(It.IsAny<JsonRpcRequest>(), It.IsAny<CancellationToken>()), Times.Once);
162+
163+
// Verify that the default value was used
164+
Assert.NotNull(capturedRequest);
165+
Assert.Equal(customDefaultMaxTokens, capturedRequest.MaxTokens);
149166
}
150167

151168
[Fact]

0 commit comments

Comments
 (0)