Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion packages/app/control/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"version": "0.1.0",
"private": true,
"scripts": {
"predev": "docker-compose -f docker-local-db.yml up -d && sleep 3 && prisma generate && prisma migrate deploy",
"predev": "docker compose -f docker-local-db.yml up -d && sleep 3 && prisma generate && prisma migrate deploy",
"dev": "next dev --turbopack",
"prebuild": "prisma generate",
"build": "next build",
Expand Down
80 changes: 80 additions & 0 deletions packages/app/server/src/providers/GroqProvider.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
import { LlmTransactionMetadata, Transaction } from '../types';
import { getCostPerToken } from '../services/AccountingService';
import { BaseProvider } from './BaseProvider';
import { ProviderType } from './ProviderType';
import { CompletionStateBody, parseSSEGPTFormat } from './GPTProvider';
import logger from '../logger';

export class GroqProvider extends BaseProvider {
private readonly GROQ_BASE_URL = 'https://api.groq.com/openai/v1';

getType(): ProviderType {
return ProviderType.GROQ;
}

getBaseUrl(): string {
return this.GROQ_BASE_URL;
}

getApiKey(): string | undefined {
return process.env.GROQ_API_KEY;
}

override supportsStream(): boolean {
return true;
}

async handleBody(data: string): Promise<Transaction> {
try {
let prompt_tokens = 0;
let completion_tokens = 0;
let total_tokens = 0;
let providerId = 'null';

if (this.getIsStream()) {
const chunks = parseSSEGPTFormat(data);

for (const chunk of chunks) {
if (chunk.usage !== null) {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
if (chunk.usage !== null) {
if (chunk.usage && chunk.usage !== null) {

The code checks if (chunk.usage !== null) but chunk.usage could be undefined, which would cause a TypeError at runtime.

View Details

Analysis

TypeError in GroqProvider.handleBody() when accessing undefined usage properties

What fails: GroqProvider.handleBody() checks if (chunk.usage !== null) but crashes when chunk.usage is undefined, attempting to access chunk.usage.prompt_tokens

How to reproduce:

// When streaming chunk has missing usage property (undefined)
const chunk = { id: "test", choices: [] }; // usage property omitted
if (chunk.usage !== null) { // undefined !== null is true 
  chunk.usage.prompt_tokens; // TypeError: Cannot read properties of undefined
}

Result: TypeError: Cannot read properties of undefined (reading 'prompt_tokens')

Expected: Should safely skip undefined usage like OpenRouterProvider.ts does with if (chunk.usage && chunk.usage !== null)

prompt_tokens += chunk.usage.prompt_tokens;
completion_tokens += chunk.usage.completion_tokens;
total_tokens += chunk.usage.total_tokens;
}
providerId = chunk.id || 'null';
}
} else {
const parsed = JSON.parse(data) as CompletionStateBody;
prompt_tokens += parsed.usage.prompt_tokens;
completion_tokens += parsed.usage.completion_tokens;
total_tokens += parsed.usage.total_tokens;
providerId = parsed.id || 'null';
}

const cost = getCostPerToken(
this.getModel(),
prompt_tokens,
completion_tokens
);

const metadata: LlmTransactionMetadata = {
providerId: providerId,
provider: this.getType(),
model: this.getModel(),
inputTokens: prompt_tokens,
outputTokens: completion_tokens,
totalTokens: total_tokens,
};

const transaction: Transaction = {
rawTransactionCost: cost,
metadata: metadata,
status: 'success',
};

return transaction;
} catch (error) {
logger.error(`Error processing data: ${error}`);
throw error;
}
}
}
6 changes: 6 additions & 0 deletions packages/app/server/src/providers/ProviderFactory.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import type { BaseProvider } from './BaseProvider';
import { GeminiGPTProvider } from './GeminiGPTProvider';
import { GeminiProvider } from './GeminiProvider';
import { OpenAIVideoProvider } from './OpenAIVideoProvider';
import { GroqProvider } from './GroqProvider';
import {
GeminiVeoProvider,
PROXY_PASSTHROUGH_ONLY_MODEL as GeminiVeoProxyPassthroughOnlyModel,
Expand Down Expand Up @@ -48,6 +49,9 @@ const createChatModelToProviderMapping = (): Record<string, ProviderType> => {
case 'OpenRouter':
mapping[modelConfig.model_id] = ProviderType.OPENROUTER;
break;
case 'Groq':
mapping[modelConfig.model_id] = ProviderType.GROQ;
break;
// Add other providers as needed
default:
// Skip models with unsupported providers
Expand Down Expand Up @@ -178,6 +182,8 @@ export const getProvider = (
return new VertexAIProvider(stream, model);
case ProviderType.OPENAI_VIDEOS:
return new OpenAIVideoProvider(stream, model);
case ProviderType.GROQ:
return new GroqProvider(stream, model);
default:
throw new Error(`Unknown provider type: ${type}`);
}
Expand Down
1 change: 1 addition & 0 deletions packages/app/server/src/providers/ProviderType.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,5 @@ export enum ProviderType {
OPENROUTER = 'OPENROUTER',
OPENAI_IMAGES = 'OPENAI_IMAGES',
OPENAI_VIDEOS = 'OPENAI_VIDEOS',
GROQ = 'GROQ',
}
2 changes: 2 additions & 0 deletions packages/app/server/src/services/AccountingService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import {
AnthropicModels,
GeminiModels,
OpenRouterModels,
GroqModels,
OpenAIImageModels,
SupportedOpenAIResponseToolPricing,
SupportedModel,
Expand All @@ -26,6 +27,7 @@ export const ALL_SUPPORTED_MODELS: SupportedModel[] = [
...AnthropicModels,
...GeminiModels,
...OpenRouterModels,
...GroqModels,
];

// Handle image models separately since they have different pricing structure
Expand Down
10 changes: 10 additions & 0 deletions packages/sdk/next/src/ai-providers/groq.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
import { getEchoToken } from '../auth/token-manager';
import {
createEchoGroq as createEchoGroqBase,
EchoConfig,
GroqProvider,
} from '@merit-systems/echo-typescript-sdk';

export function createEchoGroq(config: EchoConfig): GroqProvider {
return createEchoGroqBase(config, async () => getEchoToken(config));
}
1 change: 1 addition & 0 deletions packages/sdk/next/src/ai-providers/index.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
export * from './anthropic';
export * from './google';
export * from './groq';
export * from './openai';
2 changes: 2 additions & 0 deletions packages/sdk/next/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ import {
handleSignOut,
handleSession,
} from './auth/oauth-handlers';
import { createEchoGroq } from 'ai-providers/groq';

/**
* Echo SDK for Next.js
Expand Down Expand Up @@ -112,5 +113,6 @@ export default function Echo(config: EchoConfig): EchoResult {
openai: createEchoOpenAI(config),
anthropic: createEchoAnthropic(config),
google: createEchoGoogle(config),
groq: createEchoGroq(config),
};
}
1 change: 1 addition & 0 deletions packages/sdk/ts/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@
"dependencies": {
"@ai-sdk/anthropic": "2.0.17",
"@ai-sdk/google": "2.0.14",
"@ai-sdk/groq": "2.0.17",
"@ai-sdk/openai": "2.0.32",
"@openrouter/ai-sdk-provider": "1.2.0",
"ai": "5.0.47"
Expand Down
2 changes: 2 additions & 0 deletions packages/sdk/ts/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,8 @@ export { GeminiModels } from './supported-models/chat/gemini';
export type { GeminiModel } from './supported-models/chat/gemini';
export { OpenRouterModels } from './supported-models/chat/openrouter';
export type { OpenRouterModel } from './supported-models/chat/openrouter';
export { GroqModels } from './supported-models/chat/groq';
export type { GroqModel } from './supported-models/chat/groq';
export { OpenAIImageModels } from './supported-models/image/openai';
export type { OpenAIImageModel } from './supported-models/image/openai';
export { GeminiVideoModels } from './supported-models/video/gemini';
Expand Down
23 changes: 23 additions & 0 deletions packages/sdk/ts/src/providers/groq.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import { createGroq as createGroqBase, GroqProvider } from '@ai-sdk/groq';
import { ROUTER_BASE_URL } from 'config';
import { EchoConfig } from '../types';
import { validateAppId } from '../utils/validation';
import { echoFetch } from './index';

export function createEchoGroq(
{ appId, baseRouterUrl = ROUTER_BASE_URL }: EchoConfig,
getTokenFn: (appId: string) => Promise<string | null>,
onInsufficientFunds?: () => void
): GroqProvider {
validateAppId(appId, 'createEchoGroq');

return createGroqBase({
baseURL: baseRouterUrl,
apiKey: 'placeholder_replaced_by_echoFetch',
fetch: echoFetch(
fetch,
async () => await getTokenFn(appId),
onInsufficientFunds
),
});
}
2 changes: 2 additions & 0 deletions packages/sdk/ts/src/providers/index.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
export * from './anthropic';
export * from './google';
export * from './groq';
export * from './openai';
export * from './openrouter';

Expand Down Expand Up @@ -57,5 +58,6 @@ export function echoFetch(
// re-export the underlying types so that next doesn't need to depend on provider specific types
export { type AnthropicProvider } from '@ai-sdk/anthropic';
export { type GoogleGenerativeAIProvider } from '@ai-sdk/google';
export { type GroqProvider } from '@ai-sdk/groq';
export { type OpenAIProvider } from '@ai-sdk/openai';
export { type OpenRouterProvider } from '@openrouter/ai-sdk-provider';
98 changes: 98 additions & 0 deletions packages/sdk/ts/src/supported-models/chat/groq.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
import { SupportedModel } from '../types';

// Groq model IDs
export type GroqModel =
| 'llama3-8b-8192'
| 'llama3-70b-8192'
| 'mixtral-8x7b-32768'
| 'gemma2-9b-it'
| 'llama-3.1-8b-instant'
| 'llama-3.3-70b-versatile'
| 'llama-4-scout'
| 'llama-4-maverick'
| 'llama-guard-4-12b'
| 'qwen3-32b'
| 'gpt-oss-20b'
| 'gpt-oss-120b'
| 'kimi-k2-0905-1t';

export const GroqModels: SupportedModel[] = [
{
model_id: 'llama3-8b-8192',
input_cost_per_token: 0.00000005,
output_cost_per_token: 0.00000008,
provider: 'Groq',
},
{
model_id: 'llama3-70b-8192',
input_cost_per_token: 0.00000027,
output_cost_per_token: 0.00000027,
provider: 'Groq',
},
{
model_id: 'mixtral-8x7b-32768',
input_cost_per_token: 0.00000027,
output_cost_per_token: 0.00000027,
provider: 'Groq',
},
{
model_id: 'gemma2-9b-it',
input_cost_per_token: 0.00000007,
output_cost_per_token: 0.00000007,
provider: 'Groq',
},
{
model_id: 'llama-3.1-8b-instant',
input_cost_per_token: 0.00000005,
output_cost_per_token: 0.00000008,
provider: 'Groq',
},
{
model_id: 'llama-3.3-70b-versatile',
input_cost_per_token: 0.00000059,
output_cost_per_token: 0.00000079,
provider: 'Groq',
},
{
model_id: 'llama-4-scout',
input_cost_per_token: 0.00000011,
output_cost_per_token: 0.00000034,
provider: 'Groq',
},
{
model_id: 'llama-4-maverick',
input_cost_per_token: 0.0000002,
output_cost_per_token: 0.0000006,
provider: 'Groq',
},
{
model_id: 'llama-guard-4-12b',
input_cost_per_token: 0.0000002,
output_cost_per_token: 0.0000002,
provider: 'Groq',
},
{
model_id: 'qwen3-32b',
input_cost_per_token: 0.00000029,
output_cost_per_token: 0.00000059,
provider: 'Groq',
},
{
model_id: 'gpt-oss-20b',
input_cost_per_token: 0.000000075,
output_cost_per_token: 0.0000003,
provider: 'Groq',
},
{
model_id: 'gpt-oss-120b',
input_cost_per_token: 0.00000015,
output_cost_per_token: 0.0000006,
provider: 'Groq',
},
{
model_id: 'kimi-k2-0905-1t',
input_cost_per_token: 0.000001,
output_cost_per_token: 0.000003,
provider: 'Groq',
},
];