Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion apps/sim/blocks/blocks/agent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ import {
supportsTemperature,
} from '@/providers/utils'

// Get current Ollama models dynamically
const getCurrentOllamaModels = () => {
return useProvidersStore.getState().providers.ollama.models
}
Expand Down
78 changes: 69 additions & 9 deletions apps/sim/blocks/blocks/evaluator.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,22 @@ import { isHosted } from '@/lib/environment'
import { createLogger } from '@/lib/logs/console/logger'
import type { BlockConfig, ParamType } from '@/blocks/types'
import type { ProviderId } from '@/providers/types'
import { getAllModelProviders, getBaseModelProviders, getHostedModels } from '@/providers/utils'
import {
getAllModelProviders,
getBaseModelProviders,
getHostedModels,
getProviderIcon,
providers,
} from '@/providers/utils'
import { useProvidersStore } from '@/stores/providers/store'
import type { ToolResponse } from '@/tools/types'

const logger = createLogger('EvaluatorBlock')

const getCurrentOllamaModels = () => {
return useProvidersStore.getState().providers.ollama.models
}

interface Metric {
name: string
description: string
Expand Down Expand Up @@ -173,16 +183,21 @@ export const EvaluatorBlock: BlockConfig<EvaluatorResponse> = {
{
id: 'model',
title: 'Model',
type: 'dropdown',
type: 'combobox',
layout: 'half',
placeholder: 'Type or select a model...',
required: true,
options: () => {
const ollamaModels = useProvidersStore.getState().providers.ollama.models
const providersState = useProvidersStore.getState()
const ollamaModels = providersState.providers.ollama.models
const openrouterModels = providersState.providers.openrouter.models
const baseModels = Object.keys(getBaseModelProviders())
return [...baseModels, ...ollamaModels].map((model) => ({
label: model,
id: model,
}))
const allModels = Array.from(new Set([...baseModels, ...ollamaModels, ...openrouterModels]))

return allModels.map((model) => {
const icon = getProviderIcon(model)
return { label: model, id: model, ...(icon && { icon }) }
})
},
},
{
Expand All @@ -198,9 +213,48 @@ export const EvaluatorBlock: BlockConfig<EvaluatorResponse> = {
? {
field: 'model',
value: getHostedModels(),
not: true,
not: true, // Show for all models EXCEPT those listed
}
: undefined,
: () => ({
field: 'model',
value: getCurrentOllamaModels(),
not: true, // Show for all models EXCEPT Ollama models
}),
},
{
id: 'azureEndpoint',
title: 'Azure OpenAI Endpoint',
type: 'short-input',
layout: 'full',
password: true,
placeholder: 'https://your-resource.openai.azure.com',
connectionDroppable: false,
condition: {
field: 'model',
value: providers['azure-openai'].models,
},
},
{
id: 'azureApiVersion',
title: 'Azure API Version',
type: 'short-input',
layout: 'full',
placeholder: '2024-07-01-preview',
connectionDroppable: false,
condition: {
field: 'model',
value: providers['azure-openai'].models,
},
},
{
id: 'temperature',
title: 'Temperature',
type: 'slider',
layout: 'half',
min: 0,
max: 2,
value: () => '0.1',
hidden: true,
},
{
id: 'systemPrompt',
Expand Down Expand Up @@ -310,6 +364,12 @@ export const EvaluatorBlock: BlockConfig<EvaluatorResponse> = {
},
model: { type: 'string' as ParamType, description: 'AI model to use' },
apiKey: { type: 'string' as ParamType, description: 'Provider API key' },
azureEndpoint: { type: 'string' as ParamType, description: 'Azure OpenAI endpoint URL' },
azureApiVersion: { type: 'string' as ParamType, description: 'Azure API version' },
temperature: {
type: 'number' as ParamType,
description: 'Response randomness level (low for consistent evaluation)',
},
content: { type: 'string' as ParamType, description: 'Content to evaluate' },
},
outputs: {
Expand Down
80 changes: 70 additions & 10 deletions apps/sim/blocks/blocks/router.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,20 @@ import { ConnectIcon } from '@/components/icons'
import { isHosted } from '@/lib/environment'
import type { BlockConfig } from '@/blocks/types'
import type { ProviderId } from '@/providers/types'
import { getAllModelProviders, getBaseModelProviders, getHostedModels } from '@/providers/utils'
import {
getAllModelProviders,
getBaseModelProviders,
getHostedModels,
getProviderIcon,
providers,
} from '@/providers/utils'
import { useProvidersStore } from '@/stores/providers/store'
import type { ToolResponse } from '@/tools/types'

const getCurrentOllamaModels = () => {
return useProvidersStore.getState().providers.ollama.models
}

interface RouterResponse extends ToolResponse {
output: {
content: string
Expand Down Expand Up @@ -116,17 +126,22 @@ export const RouterBlock: BlockConfig<RouterResponse> = {
{
id: 'model',
title: 'Model',
type: 'dropdown',
type: 'combobox',
layout: 'half',
placeholder: 'Type or select a model...',
required: true,
options: () => {
const ollamaModels = useProvidersStore.getState().providers.ollama.models
const providersState = useProvidersStore.getState()
const ollamaModels = providersState.providers.ollama.models
const openrouterModels = providersState.providers.openrouter.models
const baseModels = Object.keys(getBaseModelProviders())
return [...baseModels, ...ollamaModels].map((model) => ({
label: model,
id: model,
}))
const allModels = Array.from(new Set([...baseModels, ...ollamaModels, ...openrouterModels]))

return allModels.map((model) => {
const icon = getProviderIcon(model)
return { label: model, id: model, ...(icon && { icon }) }
})
},
required: true,
},
{
id: 'apiKey',
Expand All @@ -137,14 +152,53 @@ export const RouterBlock: BlockConfig<RouterResponse> = {
password: true,
connectionDroppable: false,
required: true,
// Hide API key for all hosted models when running on hosted version
// Hide API key for hosted models and Ollama models
condition: isHosted
? {
field: 'model',
value: getHostedModels(),
not: true, // Show for all models EXCEPT those listed
}
: undefined, // Show for all models in non-hosted environments
: () => ({
field: 'model',
value: getCurrentOllamaModels(),
not: true, // Show for all models EXCEPT Ollama models
}),
},
{
id: 'azureEndpoint',
title: 'Azure OpenAI Endpoint',
type: 'short-input',
layout: 'full',
password: true,
placeholder: 'https://your-resource.openai.azure.com',
connectionDroppable: false,
condition: {
field: 'model',
value: providers['azure-openai'].models,
},
},
{
id: 'azureApiVersion',
title: 'Azure API Version',
type: 'short-input',
layout: 'full',
placeholder: '2024-07-01-preview',
connectionDroppable: false,
condition: {
field: 'model',
value: providers['azure-openai'].models,
},
},
{
id: 'temperature',
title: 'Temperature',
type: 'slider',
layout: 'half',
hidden: true,
min: 0,
max: 2,
value: () => '0.1',
},
{
id: 'systemPrompt',
Expand Down Expand Up @@ -184,6 +238,12 @@ export const RouterBlock: BlockConfig<RouterResponse> = {
prompt: { type: 'string', description: 'Routing prompt content' },
model: { type: 'string', description: 'AI model to use' },
apiKey: { type: 'string', description: 'Provider API key' },
azureEndpoint: { type: 'string', description: 'Azure OpenAI endpoint URL' },
azureApiVersion: { type: 'string', description: 'Azure API version' },
temperature: {
type: 'number',
description: 'Response randomness level (low for consistent routing)',
},
},
outputs: {
content: { type: 'string', description: 'Routing response content' },
Expand Down
85 changes: 75 additions & 10 deletions apps/sim/blocks/blocks/translate.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,18 @@
import { TranslateIcon } from '@/components/icons'
import { isHosted } from '@/lib/environment'
import type { BlockConfig } from '@/blocks/types'
import type { ProviderId } from '@/providers/types'
import { getBaseModelProviders } from '@/providers/utils'
import {
getAllModelProviders,
getBaseModelProviders,
getHostedModels,
getProviderIcon,
providers,
} from '@/providers/utils'
import { useProvidersStore } from '@/stores/providers/store'

const getCurrentOllamaModels = () => {
return useProvidersStore.getState().providers.ollama.models
}

const getTranslationPrompt = (
targetLanguage: string
Expand Down Expand Up @@ -44,10 +55,22 @@ export const TranslateBlock: BlockConfig = {
{
id: 'model',
title: 'Model',
type: 'dropdown',
type: 'combobox',
layout: 'half',
options: Object.keys(getBaseModelProviders()).map((key) => ({ label: key, id: key })),
placeholder: 'Type or select a model...',
required: true,
options: () => {
const providersState = useProvidersStore.getState()
const ollamaModels = providersState.providers.ollama.models
const openrouterModels = providersState.providers.openrouter.models
const baseModels = Object.keys(getBaseModelProviders())
const allModels = Array.from(new Set([...baseModels, ...ollamaModels, ...openrouterModels]))

return allModels.map((model) => {
const icon = getProviderIcon(model)
return { label: model, id: model, ...(icon && { icon }) }
})
},
},
{
id: 'apiKey',
Expand All @@ -58,6 +81,43 @@ export const TranslateBlock: BlockConfig = {
password: true,
connectionDroppable: false,
required: true,
// Hide API key for hosted models and Ollama models
condition: isHosted
? {
field: 'model',
value: getHostedModels(),
not: true, // Show for all models EXCEPT those listed
}
: () => ({
field: 'model',
value: getCurrentOllamaModels(),
not: true, // Show for all models EXCEPT Ollama models
}),
},
{
id: 'azureEndpoint',
title: 'Azure OpenAI Endpoint',
type: 'short-input',
layout: 'full',
password: true,
placeholder: 'https://your-resource.openai.azure.com',
connectionDroppable: false,
condition: {
field: 'model',
value: providers['azure-openai'].models,
},
},
{
id: 'azureApiVersion',
title: 'Azure API Version',
type: 'short-input',
layout: 'full',
placeholder: '2024-07-01-preview',
connectionDroppable: false,
condition: {
field: 'model',
value: providers['azure-openai'].models,
},
},
{
id: 'systemPrompt',
Expand All @@ -71,21 +131,24 @@ export const TranslateBlock: BlockConfig = {
},
],
tools: {
access: ['openai_chat', 'anthropic_chat', 'google_chat'],
access: [
'openai_chat',
'anthropic_chat',
'google_chat',
'xai_chat',
'deepseek_chat',
'deepseek_reasoner',
],
config: {
tool: (params: Record<string, any>) => {
const model = params.model || 'gpt-4o'

if (!model) {
throw new Error('No model selected')
}

const tool = getBaseModelProviders()[model as ProviderId]

const tool = getAllModelProviders()[model]
if (!tool) {
throw new Error(`Invalid model selected: ${model}`)
}

return tool
},
},
Expand All @@ -94,6 +157,8 @@ export const TranslateBlock: BlockConfig = {
context: { type: 'string', description: 'Text to translate' },
targetLanguage: { type: 'string', description: 'Target language' },
apiKey: { type: 'string', description: 'Provider API key' },
azureEndpoint: { type: 'string', description: 'Azure OpenAI endpoint URL' },
azureApiVersion: { type: 'string', description: 'Azure API version' },
systemPrompt: { type: 'string', description: 'Translation instructions' },
},
outputs: {
Expand Down