Skip to content

Disable automatic function calling, and show structured output #241

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 9 commits into from
Jun 12, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/template-sync.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ name: Template Sync Verification
on:
workflow_dispatch:
pull_request:
types: [opened, synchronize, reopened, edited]
paths:
- "packages/cli/templates/typescript/**"
- "tests/*/**"
Expand Down
6 changes: 6 additions & 0 deletions packages/ai/src/models/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,12 @@ export type ChatSendOptions<TOptions = Record<string, any>> = {
* stream chunk
*/
readonly onChunk?: TextChunkHandler;

/**
* enable/disable automatic function calling
* @default true
*/
readonly autoFunctionCalling?: boolean;
};

/**
Expand Down
166 changes: 166 additions & 0 deletions packages/ai/src/prompts/chat-types.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,166 @@
import { ILogger } from '@microsoft/teams.common';

import { Function, FunctionHandler } from '../function';
import { IMemory } from '../memory';
import { ContentPart, Message, ModelMessage } from '../message';
import { IChatModel, TextChunkHandler } from '../models';
import { Schema } from '../schema';
import { ITemplate } from '../template';
import { PromiseOrValue } from '../utils/types';

import { IAiPlugin } from './plugin';

export type ChatPromptOptions<TOptions extends Record<string, any> = Record<string, any>> = {
/**
* the name of the prompt
*/
readonly name?: string;

/**
* the description of the prompt
*/
readonly description?: string;

/**
* the model to send messages to
*/
readonly model: IChatModel<TOptions>;

/**
* the defining characteristics/objective
* of the prompt. This is commonly used to provide a system prompt.
* If you supply the system prompt as part of the messages,
* you do not need to supply this option.
*/
readonly instructions?: string | string[] | ITemplate;

/**
* the `role` of the initial message
*/
readonly role?: 'system' | 'user';

/**
* the conversation history
*/
readonly messages?: Message[] | IMemory;

/**
* Logger instance to use for logging
* If not provided, a ConsoleLogger will be used
*/
logger?: ILogger;
};

export type ChatPromptSendOptions<TOptions extends Record<string, any> = Record<string, any>> = {
/**
* the conversation history
*/
readonly messages?: Message[] | IMemory;

/**
* the models request options
*/
readonly request?: TOptions;

/**
* the callback to be called for each
* stream chunk
*/
readonly onChunk?: TextChunkHandler;

/**
* enable/disable automatic function calling
* @default true
*/
readonly autoFunctionCalling?: boolean;
};

/**
* a prompt that can interface with a
* chat model that provides utility like
* streaming and function calling
*/
export interface IChatPrompt<
TOptions extends Record<string, any> = Record<string, any>,
TChatPromptPlugins extends readonly ChatPromptPlugin<string, any>[] = []
> {
/**
* the prompt name
*/
readonly name: string;

/**
* the prompt description
*/
readonly description: string;

/**
* the chat history
*/
readonly messages: IMemory;

/**
* the registered functions
*/
readonly functions: Array<Function>;

/**
* the chat model
*/
plugins: TChatPromptPlugins;
/**
* add another chat prompt as a
*/
use(prompt: IChatPrompt): this;
use(name: string, prompt: IChatPrompt): this;

/**
* add a function that can be called
* by the model
*/
function(name: string, description: string, handler: FunctionHandler): this;
function(name: string, description: string, parameters: Schema, handler: FunctionHandler): this;

usePlugin<TPluginName extends TChatPromptPlugins[number]['name']>(
name: TPluginName,
args: Extract<TChatPromptPlugins[number], { name: TPluginName }>['onUsePlugin'] extends
| ((args: infer U) => void)
| undefined
? U
: never
): this;

/**
* call a function
*/
call<A extends Record<string, any>, R = any>(name: string, args?: A): Promise<R>;

/**
* send a message to the model and get a response
*/
send(
input: string | ContentPart[],
options?: ChatPromptSendOptions<TOptions>
): Promise<ModelMessage>;
}

export type ChatPromptPlugin<TPluginName extends string, TPluginUseArgs extends {}> = IAiPlugin<
TPluginName,
TPluginUseArgs,
Parameters<IChatPrompt['send']>[0],
ReturnType<IChatPrompt['send']>
> & {
/**
* Optionally passed in to modify the functions array that
* is passed to the model
* @param functions
* @returns Functions
*/
onBuildFunctions?: (functions: Function[]) => PromiseOrValue<Function[]>;
/**
* Optionally passed in to modify the system prompt before it is sent to the model.
* @param systemPrompt The system prompt string (or undefined)
* @returns The modified system prompt string (or undefined)
*/
onBuildPrompt?: (systemPrompt: string | undefined) => PromiseOrValue<string | undefined>;
};
3 changes: 2 additions & 1 deletion packages/ai/src/prompts/chat.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@ import { ContentPart, Message } from '../message';
import { IChatModel } from '../models';
import { Schema } from '../schema';

import { ChatPrompt, ChatPromptPlugin } from './chat';
import { ChatPrompt } from './chat';
import { ChatPromptPlugin } from './chat-types';

// Mock implementations
const mockChatModel: IChatModel<any> = {
Expand Down
158 changes: 5 additions & 153 deletions packages/ai/src/prompts/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,163 +3,14 @@ import { ConsoleLogger, ILogger } from '@microsoft/teams.common';
import { Function, FunctionHandler } from '../function';
import { LocalMemory } from '../local-memory';
import { IMemory } from '../memory';
import { ContentPart, Message, ModelMessage, SystemMessage, UserMessage } from '../message';
import { IChatModel, TextChunkHandler } from '../models';
import { ContentPart, SystemMessage, UserMessage } from '../message';
import { IChatModel } from '../models';
import { Schema } from '../schema';
import { ITemplate } from '../template';
import { StringTemplate } from '../templates';
import { PromiseOrValue, WithRequired } from '../utils/types';

import { IAiPlugin } from './plugin';

export type ChatPromptOptions<TOptions extends Record<string, any> = Record<string, any>> = {
/**
* the name of the prompt
*/
readonly name?: string;

/**
* the description of the prompt
*/
readonly description?: string;

/**
* the model to send messages to
*/
readonly model: IChatModel<TOptions>;

/**
* the defining characteristics/objective
* of the prompt. This is commonly used to provide a system prompt.
* If you supply the system prompt as part of the messages,
* you do not need to supply this option.
*/
readonly instructions?: string | string[] | ITemplate;

/**
* the `role` of the initial message
*/
readonly role?: 'system' | 'user';

/**
* the conversation history
*/
readonly messages?: Message[] | IMemory;

/**
* Logger instance to use for logging
* If not provided, a ConsoleLogger will be used
*/
logger?: ILogger;
};

export type ChatPromptSendOptions<TOptions extends Record<string, any> = Record<string, any>> = {
/**
* the conversation history
*/
readonly messages?: Message[] | IMemory;

/**
* the models request options
*/
readonly request?: TOptions;

/**
* the callback to be called for each
* stream chunk
*/
readonly onChunk?: TextChunkHandler;
};
import { WithRequired } from '../utils/types';

/**
* a prompt that can interface with a
* chat model that provides utility like
* streaming and function calling
*/
export interface IChatPrompt<
TOptions extends Record<string, any> = Record<string, any>,
TChatPromptPlugins extends readonly ChatPromptPlugin<string, any>[] = []
> {
/**
* the prompt name
*/
readonly name: string;

/**
* the prompt description
*/
readonly description: string;

/**
* the chat history
*/
readonly messages: IMemory;

/**
* the registered functions
*/
readonly functions: Array<Function>;

/**
* the chat model
*/
plugins: TChatPromptPlugins;
/**
* add another chat prompt as a
*/
use(prompt: IChatPrompt): this;
use(name: string, prompt: IChatPrompt): this;

/**
* add a function that can be called
* by the model
*/
function(name: string, description: string, handler: FunctionHandler): this;
function(name: string, description: string, parameters: Schema, handler: FunctionHandler): this;

usePlugin<TPluginName extends TChatPromptPlugins[number]['name']>(
name: TPluginName,
args: Extract<TChatPromptPlugins[number], { name: TPluginName }>['onUsePlugin'] extends
| ((args: infer U) => void)
| undefined
? U
: never
): this;

/**
* call a function
*/
call<A extends Record<string, any>, R = any>(name: string, args?: A): Promise<R>;

/**
* send a message to the model and get a response
*/
send(
input: string | ContentPart[],
options?: ChatPromptSendOptions<TOptions>
): Promise<Pick<ModelMessage, 'content'> & Omit<ModelMessage, 'content'>>;
}

export type ChatPromptPlugin<TPluginName extends string, TPluginUseArgs extends {}> = IAiPlugin<
TPluginName,
TPluginUseArgs,
Parameters<IChatPrompt['send']>[0],
ReturnType<IChatPrompt['send']>
> & {
/**
* Optionally passed in to modify the functions array that
* is passed to the model
* @param functions
* @returns Functions
*/
onBuildFunctions?: (functions: Function[]) => PromiseOrValue<Function[]>;
/**
* Optionally passed in to modify the system prompt before it is sent to the model.
* @param systemPrompt The system prompt string (or undefined)
* @returns The modified system prompt string (or undefined)
*/
onBuildPrompt?: (systemPrompt: string | undefined) => PromiseOrValue<string | undefined>;
};
import { ChatPromptOptions, ChatPromptPlugin, ChatPromptSendOptions, IChatPrompt } from './chat-types';

/**
* a prompt that can interface with a
Expand Down Expand Up @@ -403,6 +254,7 @@ export class ChatPrompt<
return;
}
},
autoFunctionCalling: options.autoFunctionCalling,
}
);

Expand Down
6 changes: 4 additions & 2 deletions packages/ai/src/prompts/index.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
import { IAudioPrompt } from './audio';
import { IChatPrompt } from './chat';
import { IChatPrompt } from './chat-types';

export type Prompt = IChatPrompt | IAudioPrompt;

export * from './chat';
export * from './audio';
export * from './chat';
export * from './chat-types';

Loading