Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions packages/insomnia/src/entry.preload.ts
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,10 @@ const llm: LLMConfigServiceAPI = {
ipcRenderer.invoke('llm.updateBackendConfig', backend, config),
getAllConfigurations: () => ipcRenderer.invoke('llm.getAllConfigurations'),
getCurrentConfig: () => ipcRenderer.invoke('llm.getCurrentConfig'),
getAIFeatureEnabled: (feature: 'aiMockServers' | 'aiCommitMessages') =>
ipcRenderer.invoke('llm.getAIFeatureEnabled', feature),
setAIFeatureEnabled: (feature: 'aiMockServers' | 'aiCommitMessages', enabled: boolean) =>
ipcRenderer.invoke('llm.setAIFeatureEnabled', feature, enabled),
};

const main: Window['main'] = {
Expand Down
2 changes: 2 additions & 0 deletions packages/insomnia/src/main/analytics.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,8 @@ export enum SegmentEvent {
vcsSyncComplete = 'VCS Sync Completed',
vcsAction = 'VCS Action Executed',
buttonClick = 'Button Clicked',
aiFeatureEnabled = "AI Feature Enabled",
aiFeatureDisabled = "AI Feature Disabled",
}

function hashString(input: string) {
Expand Down
2 changes: 2 additions & 0 deletions packages/insomnia/src/main/ipc/electron.ts
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,8 @@ export type HandleChannels =
| 'llm.updateBackendConfig'
| 'llm.getAllConfigurations'
| 'llm.getCurrentConfig'
| 'llm.getAIFeatureEnabled'
| 'llm.setAIFeatureEnabled'
| 'onDefaultBrowserOAuthRedirect'
| 'open-channel-to-hidden-browser-window'
| 'openPath'
Expand Down
22 changes: 22 additions & 0 deletions packages/insomnia/src/main/llm-config-service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import path from 'node:path';

import { app } from 'electron';

import { SegmentEvent, trackSegmentEvent } from '~/main/analytics';
import { ipcMainHandle } from '~/main/ipc/electron';

import * as models from '../models';
Expand Down Expand Up @@ -110,6 +111,23 @@ export const getCurrentConfig = async (): Promise<LLMConfig | null> => {
return { ...config, backend: activeBackend } as LLMConfig;
};

export const getAIFeatureEnabled = async (feature: 'aiMockServers' | 'aiCommitMessages'): Promise<boolean> => {
const data = await models.pluginData.getByKey(LLM_PLUGIN_NAME, `feature.${feature}`);
return data?.value === 'true';
};

export const setAIFeatureEnabled = async (feature: 'aiMockServers' | 'aiCommitMessages', enabled: boolean): Promise<void> => {
await models.pluginData.upsertByKey(LLM_PLUGIN_NAME, `feature.${feature}`, String(enabled));

trackSegmentEvent(
enabled ? SegmentEvent.aiFeatureEnabled : SegmentEvent.aiFeatureDisabled,
{
feature: feature,
set_for: "user",
}
);
};

export interface LLMConfigServiceAPI {
getActiveBackend: typeof getActiveBackend;
setActiveBackend: typeof setActiveBackend;
Expand All @@ -118,6 +136,8 @@ export interface LLMConfigServiceAPI {
updateBackendConfig: typeof updateBackendConfig;
getAllConfigurations: typeof getAllConfigurations;
getCurrentConfig: typeof getCurrentConfig;
getAIFeatureEnabled: typeof getAIFeatureEnabled;
setAIFeatureEnabled: typeof setAIFeatureEnabled;
}

export const registerLLMConfigServiceAPI = () => {
Expand All @@ -130,4 +150,6 @@ export const registerLLMConfigServiceAPI = () => {
);
ipcMainHandle('llm.getAllConfigurations', async () => getAllConfigurations());
ipcMainHandle('llm.getCurrentConfig', async () => getCurrentConfig());
ipcMainHandle('llm.getAIFeatureEnabled', async (_, feature: 'aiMockServers' | 'aiCommitMessages') => getAIFeatureEnabled(feature));
ipcMainHandle('llm.setAIFeatureEnabled', async (_, feature: 'aiMockServers' | 'aiCommitMessages', enabled: boolean) => setAIFeatureEnabled(feature, enabled));
};
11 changes: 10 additions & 1 deletion packages/insomnia/src/routes/ai.generate-commit-messages.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,15 @@ export async function clientAction(args: Route.ClientActionArgs) {
const { projectId } = (await args.request.json()) as { projectId: string };

try {
const isFeatureEnabled = await window.main.llm.getAIFeatureEnabled('aiCommitMessages');
const hasActiveLLM = (await window.main.llm.getCurrentConfig()) !== null;

if (!isFeatureEnabled || !hasActiveLLM) {
return {
error: 'Enable generating commit messages with AI in Insomnia Preferences → AI Settings to use this feature.',
};
}

const { changes } = await window.main.git.gitChangesLoader({ projectId });
if (changes.staged.length > 0) {
return {
Expand Down Expand Up @@ -40,7 +49,7 @@ export async function clientAction(args: Route.ClientActionArgs) {
}

return {
commits: commits.map(commit => ({
commits: commits.map((commit: any) => ({
id: crypto.randomUUID(),
...commit,
})),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ export const fallbackFeatures = Object.freeze<FeatureList>({
bulkImport: { enabled: false, reason: 'Insomnia API unreachable' },
gitSync: { enabled: false, reason: 'Insomnia API unreachable' },
orgBasicRbac: { enabled: false, reason: 'Insomnia API unreachable' },
aiMockServers: { enabled: false, reason: 'Insomnia API unreachable' },
aiCommitMessages: { enabled: false, reason: 'Insomnia API unreachable' },
});

// If network unreachable assume user has paid for the current period
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ import { formatMethodName } from '~/ui/components/tags/method-tag';
import { INSOMNIA_TAB_HEIGHT } from '~/ui/constant';
import { useInsomniaTab } from '~/ui/hooks/use-insomnia-tab';
import { useLoaderDeferData } from '~/ui/hooks/use-loader-defer-data';
import { useAIFeatureStatus } from '~/ui/hooks/use-organization-features';
import { useGitVCSVersion } from '~/ui/hooks/use-vcs-version';
import { DEFAULT_STORAGE_RULES } from '~/ui/organization-utils';
import { invariant } from '~/utils/invariant';
Expand Down Expand Up @@ -171,6 +172,8 @@ const Component = ({ params }: Route.ComponentProps) => {
const { storagePromise } = storageRuleFetcher.data || {};
const [storageRules = DEFAULT_STORAGE_RULES] = useLoaderDeferData(storagePromise, organizationId);

const { isGenerateMockServersWithAIEnabled } = useAIFeatureStatus();

const { apiSpec, rulesetPath, parsedSpec } = useLoaderData<typeof clientLoader>();

const [lintMessages, setLintMessages] = useState<LintMessage[]>([]);
Expand Down Expand Up @@ -481,14 +484,16 @@ const Component = ({ params }: Route.ComponentProps) => {
<div className="flex flex-shrink-0 items-center gap-2 p-[--padding-sm]">
<Heading className="uppercase text-[--hl]">Spec</Heading>
<span className="flex-1" />
<Button
onPress={() => setNewMockServerModalOpen(true)}
isDisabled={!apiSpec.contents}
className="flex max-w-full flex-1 items-center justify-center gap-2 truncate rounded-sm px-4 py-1 text-sm text-[--color-font] ring-1 ring-transparent transition-all hover:bg-[--hl-xs] focus:ring-inset focus:ring-[--hl-md] aria-pressed:bg-[--hl-sm] disabled:opacity-50 disabled:cursor-not-allowed"
>
<Icon icon="server" className="w-5 flex-shrink-0" />
<span className="truncate">Generate Mock</span>
</Button>
{isGenerateMockServersWithAIEnabled && (
<Button
onPress={() => setNewMockServerModalOpen(true)}
isDisabled={!apiSpec.contents}
className="flex max-w-full flex-1 items-center justify-center gap-2 truncate rounded-sm px-4 py-1 text-sm text-[--color-font] ring-1 ring-transparent transition-all hover:bg-[--hl-xs] focus:ring-inset focus:ring-[--hl-md] aria-pressed:bg-[--hl-sm] disabled:opacity-50 disabled:cursor-not-allowed"
>
<Icon icon="server" className="w-5 flex-shrink-0" />
<span className="truncate">Generate Mock</span>
</Button>
)}
<ToggleButton
aria-label="Toggle preview"
isSelected={isSpecPaneOpen}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,15 +66,16 @@ export async function clientAction({ request, params }: Route.ClientActionArgs)

const modelConfig = await window.main.llm.getCurrentConfig();
if (workspaceData.mockServerCreationType === 'ai') {
invariant(modelConfig, 'You must setup LLM configuration in your Preferences before using AI features.');
const isFeatureEnabled = await window.main.llm.getAIFeatureEnabled('aiMockServers');
invariant(isFeatureEnabled, 'Enable generating mock servers with AI in Insomnia Preferences → AI Settings to use this feature.');

const validationError = validateMockServerSpec(workspaceData);
if (validationError) {
return validationError;
}

if (workspaceData.mockServerSpecSource === 'url' || workspaceData.mockServerSpecSource === 'text') {
invariant(modelConfig.backend !== 'gguf', 'The URL and Text options are not supported with GGUF models.');
invariant(modelConfig && modelConfig.backend !== 'gguf', 'The URL and Text options are not supported with GGUF models.');
}
}

Expand Down
2 changes: 2 additions & 0 deletions packages/insomnia/src/routes/organization.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,8 @@ export interface FeatureList {
bulkImport: FeatureStatus;
gitSync: FeatureStatus;
orgBasicRbac: FeatureStatus;
aiMockServers: FeatureStatus;
aiCommitMessages: FeatureStatus;
}

export interface Billing {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ import { useGitProjectDiffLoaderFetcher } from '~/routes/git.diff';
import { useGitProjectDiscardActionFetcher } from '~/routes/git.discard';
import { useGitProjectStageActionFetcher } from '~/routes/git.stage';
import { useGitProjectUnstageActionFetcher } from '~/routes/git.unstage';
import { useAIFeatureStatus } from '~/ui/hooks/use-organization-features';

import { GitFileType, GitVCSOperationErrors } from '../../../sync/git/git-vcs';
import { DiffEditor } from '../diff-view-editor';
Expand Down Expand Up @@ -811,6 +812,8 @@ export const GitProjectStagingModal: FC<{
const undoUnstagedChangesFetcher = useGitProjectDiscardActionFetcher();
const diffChangesFetcher = useGitProjectDiffLoaderFetcher();

const { isGenerateCommitMessagesWithAIEnabled } = useAIFeatureStatus();

function diffChanges({ path, staged }: { path: string; staged: boolean }) {
diffChangesFetcher.load({
projectId,
Expand Down Expand Up @@ -906,6 +909,7 @@ export const GitProjectStagingModal: FC<{
<div className="grid h-full gap-2 divide-x divide-solid divide-[--hl-md] overflow-hidden [grid-template-columns:300px_1fr]">
<div className="flex flex-1 flex-col gap-4 overflow-hidden">
<Button
isDisabled={!isGenerateCommitMessagesWithAIEnabled}
onPress={() => {
if (generateCommitsFetcher.data && !('error' in generateCommitsFetcher.data)) {
setCommitGenerationKey(commitGenerationKey + 1);
Expand All @@ -916,7 +920,7 @@ export const GitProjectStagingModal: FC<{
projectId,
});
}}
className="hover:bg-[rgba(var(--color-surprise-rgb),0.8] flex h-8 flex-shrink-0 items-center justify-center gap-2 rounded-sm bg-[--color-surprise] px-4 text-[--color-font-surprise] ring-1 ring-transparent transition-all focus:ring-inset focus:ring-[--hl-md] aria-pressed:bg-[--hl-sm]"
className="hover:bg-[rgba(var(--color-surprise-rgb),0.8] flex h-8 flex-shrink-0 items-center justify-center gap-2 rounded-sm bg-[--color-surprise] px-4 text-[--color-font-surprise] ring-1 ring-transparent transition-all focus:ring-inset focus:ring-[--hl-md] aria-pressed:bg-[--hl-sm] data-[disabled]:cursor-not-allowed data-[disabled]:opacity-50"
>
<Icon
icon={
Expand All @@ -936,7 +940,13 @@ export const GitProjectStagingModal: FC<{
: 'Recommend commits and comments'}
</span>
</Button>
{generateCommitsFetcher.state === 'idle' &&
{!isGenerateCommitMessagesWithAIEnabled && (
<p className="text-xs text-[--hl]">
Enable generating commit messages with AI in Insomnia Preferences → AI Settings to use this feature.
</p>
)}
{isGenerateCommitMessagesWithAIEnabled &&
generateCommitsFetcher.state === 'idle' &&
generateCommitsFetcher.data &&
'error' in generateCommitsFetcher.data && (
<p className="flex items-center gap-2 rounded-sm bg-[rgba(var(--color-danger-rgb),var(--tw-bg-opacity))] bg-opacity-20 p-2 text-sm text-[--color-font-danger]">
Expand Down
30 changes: 19 additions & 11 deletions packages/insomnia/src/ui/components/modals/new-workspace-modal.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import { useParams } from 'react-router';
import type { StorageRules } from '~/models/organization';
import { useGitProjectRepositoryTreeLoaderFetcher } from '~/routes/git.repository-tree';
import { useWorkspaceNewActionFetcher } from '~/routes/organization.$organizationId.project.$projectId.workspace.new';
import { useAIFeatureStatus } from '~/ui/hooks/use-organization-features';

import { type ApiSpec } from '../../../models/api-spec';
import { isGitProject, type Project } from '../../../models/project';
Expand Down Expand Up @@ -71,6 +72,8 @@ export const NewWorkspaceModal = ({

const canOnlyCreateSelfHosted = isLocalProject && isEnterprise;

const { isGenerateMockServersWithAIEnabled } = useAIFeatureStatus();

const [workspaceData, setWorkspaceData] = useState<{
name: string;
scope: WorkspaceScope;
Expand All @@ -92,7 +95,7 @@ export const NewWorkspaceModal = ({
fileName: safeToUseInsomniaFileName(defaultNameByScope[scope]),
mockServerType: canOnlyCreateSelfHosted ? 'self-hosted' : 'cloud',
mockServerUrl: '',
mockServerCreationType: 'ai',
mockServerCreationType: sourceApiSpec?.contents ? 'ai' : 'manual',
mockServerSpecSource: 'file',
mockServerSpecText: '',
mockServerAdditionalFiles: [],
Expand Down Expand Up @@ -343,16 +346,6 @@ export const NewWorkspaceModal = ({
>
<Label className="text-sm text-[--hl]">How do you want to create your mock server?</Label>
<div className="flex gap-2">
<Radio
value="ai"
className="flex-1 rounded border border-solid border-[--hl-md] p-4 transition-colors hover:bg-[--hl-xs] focus:bg-[--hl-sm] focus:outline-none data-[selected]:border-[--color-surprise] data-[disabled]:opacity-25 data-[selected]:ring-2 data-[selected]:ring-[--color-surprise]"
>
<div className="flex items-center gap-2">
<Icon icon="robot" />
<Heading className="text-lg font-bold">Auto Generate</Heading>
</div>
<p className="pt-2">Automatically generate a mock server from an OpenAPI spec.</p>
</Radio>
<Radio
value="manual"
isDisabled={!!sourceApiSpec?.contents}
Expand All @@ -368,6 +361,21 @@ export const NewWorkspaceModal = ({
: 'Create an empty mock server.'}
</p>
</Radio>
<Radio
value="ai"
isDisabled={!isGenerateMockServersWithAIEnabled}
className="flex-1 rounded border border-solid border-[--hl-md] p-4 transition-colors hover:bg-[--hl-xs] focus:bg-[--hl-sm] focus:outline-none data-[selected]:border-[--color-surprise] data-[disabled]:opacity-25 data-[selected]:ring-2 data-[selected]:ring-[--color-surprise]"
>
<div className="flex items-center gap-2">
<Icon icon="robot" />
<Heading className="text-lg font-bold">Auto Generate</Heading>
</div>
<p className="pt-2">
{!isGenerateMockServersWithAIEnabled
? 'Enable generating mock servers with AI in Insomnia Preferences → AI Settings to use this feature.'
: 'Automatically generate a mock server from an OpenAPI spec.'}
</p>
</Radio>
</div>
</RadioGroup>

Expand Down
18 changes: 9 additions & 9 deletions packages/insomnia/src/ui/components/modals/settings-modal.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { Tab, TabList, TabPanel, Tabs } from 'react-aria-components';
import { AI_PLUGIN_NAME } from '~/common/constants';
import { getBundlePlugins } from '~/plugins';
import { useRootLoaderData } from '~/root';
import { LLMs } from '~/ui/components/settings/llms';
import { AISettings } from '~/ui/components/settings/ai-settings';

import { getAppVersion, getProductName } from '../../../common/constants';
import { Modal, type ModalHandle, type ModalProps } from '../base/modal';
Expand Down Expand Up @@ -39,13 +39,13 @@ export const SettingsModal = forwardRef<SettingsModalHandle, ModalProps>((props,
const modalRef = useRef<ModalHandle>(null);
const [keyboardClosable, setKeyboardClosable] = useState(true);

const [shouldShowAiModelsTab, setShouldShowAiModelsTab] = useState(false);
const [shouldShowAiSettingsTab, setShouldShowAiSettingsTab] = useState(false);

useEffect(() => {
const checkAiPlugin = async () => {
const plugins = await getBundlePlugins();
const aiPlugin = plugins.find(p => p.name === AI_PLUGIN_NAME);
setShouldShowAiModelsTab(!!aiPlugin);
setShouldShowAiSettingsTab(!!aiPlugin);
};
checkAiPlugin();
}, []);
Expand Down Expand Up @@ -128,12 +128,12 @@ export const SettingsModal = forwardRef<SettingsModalHandle, ModalProps>((props,
>
Cloud Credentials
</Tab>
{shouldShowAiModelsTab && (
{shouldShowAiSettingsTab && (
<Tab
className="flex h-full flex-shrink-0 cursor-pointer select-none items-center justify-between gap-2 px-3 py-1 text-[--hl] outline-none transition-colors duration-300 hover:bg-[--hl-sm] hover:text-[--color-font] focus:bg-[--hl-sm] aria-selected:bg-[--hl-xs] aria-selected:text-[--color-font] aria-selected:hover:bg-[--hl-sm] aria-selected:focus:bg-[--hl-sm]"
id="ai-models"
id="aiSettings"
>
AI Models
AI Settings
</Tab>
)}
</TabList>
Expand Down Expand Up @@ -191,9 +191,9 @@ export const SettingsModal = forwardRef<SettingsModalHandle, ModalProps>((props,
<TabPanel className="h-full w-full overflow-y-auto p-4" id="cloudCred">
<CloudServiceCredentialList />
</TabPanel>
{shouldShowAiModelsTab && (
<TabPanel className="relative h-full w-full overflow-y-auto p-4" id="ai-models">
<LLMs />
{shouldShowAiSettingsTab && (
<TabPanel className="relative h-full w-full overflow-y-auto p-4" id="aiSettings">
<AISettings />
</TabPanel>
)}
</Tabs>
Expand Down
Loading
Loading