Skip to content

Commit 0f47974

Browse files
author
Neil Gilbert
committed
list models
1 parent ec3acd7 commit 0f47974

File tree

14 files changed

+252
-71
lines changed

14 files changed

+252
-71
lines changed

README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -59,8 +59,8 @@ dotnet run --project src/CodePunk.Console
5959
CodePunk works with multiple AI providers through a unified interface:
6060

6161
### Supported Providers
62-
- **OpenAI**: GPT-4o, GPT-4o-mini, GPT-3.5-turbo
63-
- **Anthropic**: Claude 3.5 Sonnet, Claude 3.5 Haiku, Claude 3 Opus, Claude 3 Sonnet, Claude 3 Haiku
62+
- **OpenAI**: GPT-4.1, GPT-4.1-mini, GPT-4o, GPT-4o-mini, GPT-3.5-turbo (legacy)
63+
- **Anthropic**: Claude Opus 4.1, Claude Opus 4, Claude Sonnet 4, Claude Sonnet 3.7, Claude Haiku 3.5
6464
- **Local Models**: Ollama, LM Studio integration *(coming soon)*
6565
- **Azure OpenAI**: Enterprise deployments *(coming soon)*
6666

Lines changed: 71 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,71 @@
1+
using CodePunk.Core.Abstractions;
2+
using CodePunk.Core.Services;
3+
using Spectre.Console;
4+
using CodePunk.Console.Themes;
5+
6+
namespace CodePunk.Console.Commands;
7+
8+
/// <summary>
9+
/// Lists available LLM models (usage: /models [provider])
10+
/// </summary>
11+
public class ModelsChatCommand : ChatCommand
12+
{
13+
private readonly ILLMService _llm;
14+
public ModelsChatCommand(ILLMService llm) { _llm = llm; }
15+
public override string Name => "models";
16+
public override string Description => "List available models (optionally filter by provider: /models Anthropic)";
17+
public override string[] Aliases => Array.Empty<string>();
18+
19+
public override async Task<CommandResult> ExecuteAsync(string[] args, CancellationToken cancellationToken = default)
20+
{
21+
var providers = _llm.GetProviders() ?? Array.Empty<ILLMProvider>();
22+
if (providers.Count == 0)
23+
{
24+
AnsiConsole.MarkupLine("[yellow]No providers available. Authenticate first.[/]");
25+
return CommandResult.Ok("No providers available.");
26+
}
27+
28+
string? filter = args.FirstOrDefault();
29+
if (!string.IsNullOrWhiteSpace(filter))
30+
{
31+
providers = providers.Where(p => string.Equals(p.Name, filter, StringComparison.OrdinalIgnoreCase)).ToList();
32+
if (providers.Count == 0)
33+
{
34+
AnsiConsole.MarkupLine($"[yellow]Provider not found: {filter}[/]");
35+
return CommandResult.Ok($"Provider not found: {filter}");
36+
}
37+
}
38+
39+
var rows = new List<(string Provider, string Id, string Name, int Ctx, int Max, bool Tools, bool Stream)>();
40+
foreach (var p in providers.OrderBy(p => p.Name, StringComparer.OrdinalIgnoreCase))
41+
{
42+
IReadOnlyList<LLMModel> remote = Array.Empty<LLMModel>();
43+
try { remote = await p.FetchModelsAsync(cancellationToken); } catch { }
44+
var models = (remote != null && remote.Count > 0) ? remote : p.Models;
45+
foreach (var m in models.OrderBy(m => m.Id, StringComparer.OrdinalIgnoreCase))
46+
rows.Add((p.Name, m.Id, m.Name, m.ContextWindow, m.MaxTokens, m.SupportsTools, m.SupportsStreaming));
47+
}
48+
49+
if (rows.Count == 0)
50+
{
51+
AnsiConsole.MarkupLine("[yellow]No models found.[/]");
52+
return CommandResult.Ok("No models found.");
53+
}
54+
55+
var table = new Table().RoundedBorder().Title(ConsoleStyles.PanelTitle(filter == null ? "Models" : $"Models ({filter})"));
56+
table.AddColumn("Provider");
57+
table.AddColumn("Model Id");
58+
table.AddColumn("Name");
59+
table.AddColumn(new TableColumn("Ctx").Centered());
60+
table.AddColumn(new TableColumn("Max").Centered());
61+
table.AddColumn(new TableColumn("Tools").Centered());
62+
table.AddColumn(new TableColumn("Stream").Centered());
63+
foreach (var r in rows)
64+
{
65+
table.AddRow(ConsoleStyles.Accent(r.Provider), r.Id, r.Name, r.Ctx.ToString(), r.Max.ToString(), r.Tools?"[green]✓[/]":"[grey]-[/]", r.Stream?"[green]✓[/]":"[grey]-[/]");
66+
}
67+
AnsiConsole.Write(table);
68+
AnsiConsole.WriteLine();
69+
return CommandResult.Ok(filter == null ? $"Models listed: {rows.Count}" : $"Models listed for {filter}: {rows.Count}");
70+
}
71+
}

src/CodePunk.Console/Commands/RootCommandFactory.cs

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -249,8 +249,18 @@ private static Command BuildModels(IServiceProvider services)
249249
var providers = llm.GetProviders() ?? Array.Empty<ILLMProvider>();
250250
var rows = new List<(string Provider,string Id,string Name,int Context,int MaxTokens,bool Tools,bool Streaming)>();
251251
foreach (var p in providers.OrderBy(p => p.Name, StringComparer.OrdinalIgnoreCase))
252-
foreach (var m in p.Models.OrderBy(m => m.Id, StringComparer.OrdinalIgnoreCase))
252+
{
253+
IReadOnlyList<CodePunk.Core.Abstractions.LLMModel> remote = Array.Empty<CodePunk.Core.Abstractions.LLMModel>();
254+
try
255+
{
256+
remote = await (p.FetchModelsAsync());
257+
}
258+
catch { }
259+
260+
var models = (remote != null && remote.Count > 0) ? remote : p.Models;
261+
foreach (var m in models.OrderBy(m => m.Id, StringComparer.OrdinalIgnoreCase))
253262
rows.Add((p.Name, m.Id, m.Name, m.ContextWindow, m.MaxTokens, m.SupportsTools, m.SupportsStreaming));
263+
}
254264
var writer = ctx.Console.Out;
255265
if (json)
256266
{

src/CodePunk.Console/Program.cs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,7 @@
6363
builder.Services.AddTransient<ChatCommand, LoadCommand>();
6464
builder.Services.AddTransient<ChatCommand, UseCommand>();
6565
builder.Services.AddTransient<ChatCommand, UsageCommand>();
66+
builder.Services.AddTransient<ChatCommand, ModelsChatCommand>();
6667
builder.Services.AddSingleton<CommandProcessor>();
6768

6869
var host = builder.Build();

src/CodePunk.Core/Abstractions/ILLMProvider.cs

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,11 @@ public interface ILLMProvider
1818
/// </summary>
1919
IReadOnlyList<LLMModel> Models { get; }
2020

21+
/// <summary>
22+
/// Optionally fetch live model list from the provider. Implementations may return an empty list or throw on error.
23+
/// </summary>
24+
Task<IReadOnlyList<LLMModel>> FetchModelsAsync(CancellationToken cancellationToken = default);
25+
2126
/// <summary>
2227
/// Send a non-streaming request to the LLM
2328
/// </summary>

src/CodePunk.Core/Providers/Anthropic/AnthropicConfiguration.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ public class AnthropicConfiguration
66
{
77
public string ApiKey { get; set; } = string.Empty;
88
public string BaseUrl { get; set; } = "https://api.anthropic.com/v1";
9-
public string DefaultModel { get; set; } = AnthropicModels.Claude35Sonnet;
9+
public string DefaultModel { get; set; } = AnthropicModels.ClaudeOpus41;
1010
public int MaxTokens { get; set; } = 4096;
1111
public double Temperature { get; set; } = 0.7;
1212
public TimeSpan Timeout { get; set; } = TimeSpan.FromMinutes(2);

src/CodePunk.Core/Providers/Anthropic/AnthropicModels.cs

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -4,19 +4,20 @@ namespace CodePunk.Core.Providers.Anthropic;
44

55
public static class AnthropicModels
66
{
7-
public const string Claude35Sonnet = "claude-3-5-sonnet-20241022";
7+
// Latest 5 primary Anthropic model snapshots (Sept 2025)
8+
public const string ClaudeOpus41 = "claude-opus-4-1-20250805";
9+
public const string ClaudeOpus4 = "claude-opus-4-20250514";
10+
public const string ClaudeSonnet4 = "claude-sonnet-4-20250514";
11+
public const string Claude37Sonnet = "claude-3-7-sonnet-20250219";
812
public const string Claude35Haiku = "claude-3-5-haiku-20241022";
9-
public const string Claude3Opus = "claude-3-opus-20240229";
10-
public const string Claude3Sonnet = "claude-3-sonnet-20240229";
11-
public const string Claude3Haiku = "claude-3-haiku-20240307";
12-
13+
1314
public static readonly Dictionary<string, ModelCapabilities> Capabilities = new()
1415
{
15-
[Claude35Sonnet] = new(MaxTokens: 200000, SupportsStreaming: true, SupportsTools: true),
16-
[Claude35Haiku] = new(MaxTokens: 200000, SupportsStreaming: true, SupportsTools: true),
17-
[Claude3Opus] = new(MaxTokens: 200000, SupportsStreaming: true, SupportsTools: true),
18-
[Claude3Sonnet] = new(MaxTokens: 200000, SupportsStreaming: true, SupportsTools: true),
19-
[Claude3Haiku] = new(MaxTokens: 200000, SupportsStreaming: true, SupportsTools: false)
16+
[ClaudeOpus41] = new(MaxTokens: 200000, SupportsStreaming: true, SupportsTools: true),
17+
[ClaudeOpus4] = new(MaxTokens: 200000, SupportsStreaming: true, SupportsTools: true),
18+
[ClaudeSonnet4] = new(MaxTokens: 200000, SupportsStreaming: true, SupportsTools: true), // 1M beta context available via header
19+
[Claude37Sonnet] = new(MaxTokens: 200000, SupportsStreaming: true, SupportsTools: true),
20+
[Claude35Haiku] = new(MaxTokens: 200000, SupportsStreaming: true, SupportsTools: true)
2021
};
2122
}
2223

src/CodePunk.Core/Providers/Anthropic/AnthropicProvider.cs

Lines changed: 24 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,15 @@ public AnthropicProvider(HttpClient httpClient, AnthropicConfiguration config, I
3535
Models = CreateModels();
3636
}
3737

38+
/// <summary>
39+
/// Attempt to fetch live models from Anthropic. Currently returns empty list as a fallback.
40+
/// </summary>
41+
public Task<IReadOnlyList<LLMModel>> FetchModelsAsync(CancellationToken cancellationToken = default)
42+
{
43+
// Anthropic model listing not implemented yet. Fall back to static Models if needed by caller.
44+
return Task.FromResult(Models ?? Array.Empty<LLMModel>());
45+
}
46+
3847
public async Task<LLMResponse> SendAsync(LLMRequest request, CancellationToken cancellationToken = default)
3948
{
4049
try
@@ -537,11 +546,11 @@ private string GetModelDisplayName(string modelId)
537546
{
538547
return modelId switch
539548
{
540-
AnthropicModels.Claude35Sonnet => "Claude 3.5 Sonnet",
541-
AnthropicModels.Claude35Haiku => "Claude 3.5 Haiku",
542-
AnthropicModels.Claude3Opus => "Claude 3 Opus",
543-
AnthropicModels.Claude3Sonnet => "Claude 3 Sonnet",
544-
AnthropicModels.Claude3Haiku => "Claude 3 Haiku",
549+
AnthropicModels.ClaudeOpus41 => "Claude Opus 4.1",
550+
AnthropicModels.ClaudeOpus4 => "Claude Opus 4",
551+
AnthropicModels.ClaudeSonnet4 => "Claude Sonnet 4",
552+
AnthropicModels.Claude37Sonnet => "Claude Sonnet 3.7",
553+
AnthropicModels.Claude35Haiku => "Claude Haiku 3.5",
545554
_ => modelId
546555
};
547556
}
@@ -551,11 +560,11 @@ private decimal GetModelInputCost(string modelId)
551560
// Costs per 1M tokens as of August 2024
552561
return modelId switch
553562
{
554-
AnthropicModels.Claude35Sonnet => 3.00m,
555-
AnthropicModels.Claude35Haiku => 0.25m,
556-
AnthropicModels.Claude3Opus => 15.00m,
557-
AnthropicModels.Claude3Sonnet => 3.00m,
558-
AnthropicModels.Claude3Haiku => 0.25m,
563+
AnthropicModels.ClaudeOpus41 => 15.00m,
564+
AnthropicModels.ClaudeOpus4 => 15.00m,
565+
AnthropicModels.ClaudeSonnet4 => 3.00m,
566+
AnthropicModels.Claude37Sonnet => 3.00m,
567+
AnthropicModels.Claude35Haiku => 0.80m,
559568
_ => 0m
560569
};
561570
}
@@ -565,11 +574,11 @@ private decimal GetModelOutputCost(string modelId)
565574
// Costs per 1M tokens as of August 2024
566575
return modelId switch
567576
{
568-
AnthropicModels.Claude35Sonnet => 15.00m,
569-
AnthropicModels.Claude35Haiku => 1.25m,
570-
AnthropicModels.Claude3Opus => 75.00m,
571-
AnthropicModels.Claude3Sonnet => 15.00m,
572-
AnthropicModels.Claude3Haiku => 1.25m,
577+
AnthropicModels.ClaudeOpus41 => 18.75m,
578+
AnthropicModels.ClaudeOpus4 => 18.75m,
579+
AnthropicModels.ClaudeSonnet4 => 3.75m,
580+
AnthropicModels.Claude37Sonnet => 3.75m,
581+
AnthropicModels.Claude35Haiku => 1.00m,
573582
_ => 0m
574583
};
575584
}

src/CodePunk.Core/Providers/OpenAIProvider.cs

Lines changed: 70 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,11 +19,35 @@ public class OpenAIProvider : ILLMProvider
1919

2020
public IReadOnlyList<LLMModel> Models { get; } = new[]
2121
{
22+
new LLMModel
23+
{
24+
Id = "gpt-4.1",
25+
Name = "GPT-4.1",
26+
Description = "Latest flagship GPT-4 generation model",
27+
MaxTokens = 4096,
28+
ContextWindow = 128000,
29+
CostPerInputToken = 0m,
30+
CostPerOutputToken = 0m,
31+
SupportsTools = true,
32+
SupportsStreaming = true
33+
},
34+
new LLMModel
35+
{
36+
Id = "gpt-4.1-mini",
37+
Name = "GPT-4.1 Mini",
38+
Description = "Smaller, efficient 4.1 family model",
39+
MaxTokens = 4096,
40+
ContextWindow = 128000,
41+
CostPerInputToken = 0m,
42+
CostPerOutputToken = 0m,
43+
SupportsTools = true,
44+
SupportsStreaming = true
45+
},
2246
new LLMModel
2347
{
2448
Id = "gpt-4o",
2549
Name = "GPT-4o",
26-
Description = "Most capable GPT-4 model",
50+
Description = "Multimodal GPT-4o model",
2751
MaxTokens = 4096,
2852
ContextWindow = 128000,
2953
CostPerInputToken = 0.005m / 1000,
@@ -35,7 +59,7 @@ public class OpenAIProvider : ILLMProvider
3559
{
3660
Id = "gpt-4o-mini",
3761
Name = "GPT-4o Mini",
38-
Description = "Faster, cheaper GPT-4 model",
62+
Description = "Fast, low-cost GPT-4o variant",
3963
MaxTokens = 4096,
4064
ContextWindow = 128000,
4165
CostPerInputToken = 0.00015m / 1000,
@@ -46,8 +70,8 @@ public class OpenAIProvider : ILLMProvider
4670
new LLMModel
4771
{
4872
Id = "gpt-3.5-turbo",
49-
Name = "GPT-3.5 Turbo",
50-
Description = "Legacy GPT-3.5 model",
73+
Name = "GPT-3.5 Turbo (Legacy)",
74+
Description = "Legacy model (kept for backwards compatibility)",
5175
MaxTokens = 4096,
5276
ContextWindow = 16385,
5377
CostPerInputToken = 0.0015m / 1000,
@@ -86,6 +110,37 @@ public OpenAIProvider(HttpClient httpClient, LLMProviderConfig config)
86110
};
87111
}
88112

113+
/// <summary>
114+
/// Attempt to fetch available models from the OpenAI service. Falls back to an empty list on error.
115+
/// </summary>
116+
public async Task<IReadOnlyList<LLMModel>> FetchModelsAsync(CancellationToken cancellationToken = default)
117+
{
118+
try
119+
{
120+
var resp = await _httpClient.GetFromJsonAsync<OpenAIModelList>("models", _jsonOptions, cancellationToken);
121+
if (resp?.Data == null) return Array.Empty<LLMModel>();
122+
123+
var list = resp.Data.Select(d => new LLMModel
124+
{
125+
Id = d.Id,
126+
Name = d.Id,
127+
Description = d.Purpose ?? string.Empty,
128+
MaxTokens = 4096,
129+
ContextWindow = 4096,
130+
CostPerInputToken = 0m,
131+
CostPerOutputToken = 0m,
132+
SupportsTools = true,
133+
SupportsStreaming = true
134+
}).ToList();
135+
136+
return list;
137+
}
138+
catch
139+
{
140+
return Array.Empty<LLMModel>();
141+
}
142+
}
143+
89144
public async Task<LLMResponse> SendAsync(LLMRequest request, CancellationToken cancellationToken = default)
90145
{
91146
var openAIRequest = ConvertToOpenAIRequest(request, stream: false);
@@ -325,6 +380,17 @@ private record OpenAIChatStreamResponse
325380
public OpenAIChatChoice[]? Choices { get; init; }
326381
}
327382

383+
private record OpenAIModelList
384+
{
385+
public OpenAIModel[]? Data { get; init; }
386+
}
387+
388+
private record OpenAIModel
389+
{
390+
public string Id { get; init; } = string.Empty;
391+
public string? Purpose { get; init; }
392+
}
393+
328394
private record OpenAIChatChoice
329395
{
330396
public int Index { get; init; }

src/CodePunk.Infrastructure/Configuration/ServiceCollectionExtensions.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@ public static IServiceCollection AddLLMProviders(
113113
{
114114
ApiKey = anthropicApiKey,
115115
BaseUrl = configuration["AI:Providers:Anthropic:BaseUrl"] ?? "https://api.anthropic.com/v1",
116-
DefaultModel = configuration["AI:Providers:Anthropic:DefaultModel"] ?? AnthropicModels.Claude35Sonnet,
116+
DefaultModel = configuration["AI:Providers:Anthropic:DefaultModel"] ?? AnthropicModels.ClaudeOpus41,
117117
MaxTokens = configuration.GetValue("AI:Providers:Anthropic:MaxTokens", 4096),
118118
Temperature = configuration.GetValue("AI:Providers:Anthropic:Temperature", 0.7),
119119
Version = configuration["AI:Providers:Anthropic:Version"] ?? "2023-06-01"

0 commit comments

Comments
 (0)