Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
64 changes: 64 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@ USAGE
# Commands
<!-- commands -->
* [`heroku ai:docs`](#heroku-aidocs)
* [`heroku ai:mcp`](#heroku-aimcp)
* [`heroku ai:mcp:start [ADDON]`](#heroku-aimcpstart-addon)
* [`heroku ai:models`](#heroku-aimodels)
* [`heroku ai:models:attach MODEL_RESOURCE`](#heroku-aimodelsattach-model_resource)
* [`heroku ai:models:call MODEL_RESOURCE`](#heroku-aimodelscall-model_resource)
Expand All @@ -34,6 +36,7 @@ USAGE
* [`heroku ai:models:detach MODEL_RESOURCE`](#heroku-aimodelsdetach-model_resource)
* [`heroku ai:models:info [MODEL_RESOURCE]`](#heroku-aimodelsinfo-model_resource)
* [`heroku ai:models:list`](#heroku-aimodelslist)
* [`heroku ai:tools:list [ADDON]`](#heroku-aitoolslist-addon)

## `heroku ai:docs`

Expand All @@ -52,6 +55,45 @@ DESCRIPTION

_See code: [src/commands/ai/docs.ts](https://github.com/heroku/heroku-cli-plugin-ai/blob/v0.0.11/src/commands/ai/docs.ts)_

## `heroku ai:mcp`

list all available AI tools

```
USAGE
$ heroku ai:mcp -a <value> [--json]

FLAGS
-a, --app=<value> (required) app to list tools for
--json output in JSON format

DESCRIPTION
list all available AI tools
```

_See code: [src/commands/ai/mcp/index.ts](https://github.com/heroku/heroku-cli-plugin-ai/blob/v0.0.11/src/commands/ai/mcp/index.ts)_

## `heroku ai:mcp:start [ADDON]`

Start the MCP proxy

```
USAGE
$ heroku ai:mcp:start [ADDON] -a <value> [--json]

ARGUMENTS
ADDON [default: heroku-inference] unique identifier or globally unique name of the add-on. If omitted

FLAGS
-a, --app=<value> (required) app to list tools for
--json output in JSON format

DESCRIPTION
Start the MCP proxy
```

_See code: [src/commands/ai/mcp/start.ts](https://github.com/heroku/heroku-cli-plugin-ai/blob/v0.0.11/src/commands/ai/mcp/start.ts)_

## `heroku ai:models`

list available AI models to provision access to
Expand Down Expand Up @@ -256,4 +298,26 @@ EXAMPLES
```

_See code: [src/commands/ai/models/list.ts](https://github.com/heroku/heroku-cli-plugin-ai/blob/v0.0.11/src/commands/ai/models/list.ts)_

## `heroku ai:tools:list [ADDON]`

list all available AI tools

```
USAGE
$ heroku ai:tools:list [ADDON] -a <value> [--json]

ARGUMENTS
ADDON [default: heroku-inference] unique identifier or globally unique name of the add-on. "heroku-inference" will be
used if omitted

FLAGS
-a, --app=<value> (required) app to list tools for
--json output in JSON format

DESCRIPTION
list all available AI tools
```

_See code: [src/commands/ai/tools/list.ts](https://github.com/heroku/heroku-cli-plugin-ai/blob/v0.0.11/src/commands/ai/tools/list.ts)_
<!-- commandsstop -->
2 changes: 2 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
"@heroku-cli/color": "^2",
"@heroku-cli/command": "^11.5.0",
"@heroku-cli/schema": "^1.0.25",
"@modelcontextprotocol/sdk": "^1.11.2",
"@oclif/core": "^2.16.0",
"@oclif/plugin-help": "^5",
"open": "^8.4.2",
Expand Down Expand Up @@ -67,6 +68,7 @@
"repository": "heroku/heroku-cli-plugin-ai",
"scripts": {
"build": "rm -rf dist && tsc -b && oclif manifest && oclif readme && mv oclif.manifest.json ./dist/oclif.manifest.json && cp README.md ./dist/README.md",
"build:dev": "rm -rf dist && tsc -b --sourcemap && oclif manifest && oclif readme && mv oclif.manifest.json ./dist/oclif.manifest.json && cp README.md ./dist/README.md",
"lint": "eslint . --ext .ts --config .eslintrc.json",
"lint:fix": "eslint . --ext .ts --config .eslintrc.json --fix",
"prepare": "yarn build",
Expand Down
28 changes: 28 additions & 0 deletions src/commands/ai/mcp/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import {flags} from '@heroku-cli/command'
import Command from '../../../lib/base'

export default class MCP extends Command {
public static description = 'list all available AI tools'
public static flags = {
json: flags.boolean({
description: 'output in JSON format',
}),
app: flags.app({
description: 'app to list tools for',
required: true,
}),
}

public async run() {
const {flags} = await this.parse(MCP)
const {body: config} = await this.heroku.get<Record<string, string>>(`/apps/${flags.app}/config-vars`)

if (config.INFERENCE_MCP_URL) {
this.log(config.INFERENCE_MCP_URL)
} else if (config.INFERENCE_URL) {
this.log(config.INFERENCE_URL + '/mcp')
} else {
this.log(`No MCP server URL found for ${flags.app}`)
}
}
}
43 changes: 43 additions & 0 deletions src/commands/ai/mcp/start.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
import Command from '../../../lib/base'
import {MCPStdioToSSEProxy} from '../../../lib/proxy'
import {flags} from '@heroku-cli/command'
import {Args} from '@oclif/core'

export default class StartCommand extends Command {
public static description = 'Start the MCP proxy'

public static flags = {
json: flags.boolean({
description: 'output in JSON format',
}),
app: flags.app({
description: 'app to list tools for',
required: true,
}),
}

public static args = {
addon: Args.string({
required: false,
default: 'heroku-inference',
description: 'unique identifier or globally unique name of the add-on. If omitted',
}),
};

public async run() {
const proxy = new MCPStdioToSSEProxy();

(async () => {
const {flags} = await this.parse(StartCommand)
const {body: config} = await this.heroku.get<Record<string, string>>(`/apps/${flags.app}/config-vars`)
const {INFERENCE_MCP_URL, INFERENCE_URL, INFERENCE_KEY} = config
if (INFERENCE_MCP_URL || INFERENCE_URL) {
const mcpUrl = INFERENCE_MCP_URL || INFERENCE_URL + '/mcp'
proxy.setRemoteUrl(new URL(mcpUrl))
proxy.setToken(INFERENCE_KEY)
}
})()

await proxy.run()
}
}
2 changes: 1 addition & 1 deletion src/commands/ai/models/info.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ export default class Info extends Command {
let listOfProvisionedModels: Array<ModelResource> = []

const modelInfo = async () => {
const modelInfoResponse = await this.herokuAI.get<ModelResource>(`/models/${this.apiModelId}`, {
const modelInfoResponse = await this.herokuAI.get<ModelResource>(`/models/${this.addon.id}`, {
headers: {authorization: `Bearer ${this.apiKey}`},
})
.catch(error => {
Expand Down
49 changes: 49 additions & 0 deletions src/commands/ai/tools/list.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
import {flags} from '@heroku-cli/command'
import Command from '../../../lib/base'
import {MCPServerList, MCPServerTool} from '../../../lib/ai/types'
import {Args, ux} from '@oclif/core'

export default class List extends Command {
public static description = 'list all available AI tools'
public static flags = {
json: flags.boolean({
description: 'output in JSON format',
}),
app: flags.app({
description: 'app to list tools for',
required: true,
}),
}

static args = {
addon: Args.string({
required: false,
default: 'heroku-inference',
description: 'unique identifier or globally unique name of the add-on. "heroku-inference" will be used if omitted',
}),
};

public async run() {
const {flags, args} = await this.parse(List)
const tools = await this.getTools(flags.app, args.addon)

if (flags.json) {
ux.styledJSON(tools)
} else if (tools.length === 0) {
ux.info('No AI tools are currently available for this app')
} else {
ux.table(tools, {
namespaced_name: {header: 'Tool', get: tool => tool.namespaced_name},
description: {header: 'Description', get: tool => tool.description},
})
}
}

private async getTools(app: string, addon: string): Promise<MCPServerTool[]> {
await this.configureHerokuAIClient(addon, app)

const {body: servers} = await this.herokuAI.get<MCPServerList>('/v1/mcp/servers')

return servers.flatMap(server => server.tools)
}
}
25 changes: 25 additions & 0 deletions src/lib/ai/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -316,3 +316,28 @@ export type EmbeddingResponse = {
readonly total_tokens: number
}
}

// MCP Server API response types

export type MCPServerTool = {
name: string;
namespaced_name: string;
description: string;
input_schema: Record<string, unknown>;
annotations: Record<string, unknown>;
};

export type MCPServer = {
id: string;
app_id: string;
process_type: string;
process_command: string;
created_at: string;
updated_at: string;
tools: MCPServerTool[];
server_status: 'registered' | 'disconnected';
primitives_status: 'syncing' | 'synced' | 'error';
namespace: string;
};

export type MCPServerList = MCPServer[];
1 change: 1 addition & 0 deletions src/lib/base.ts
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,7 @@ export default abstract class extends Command {

this._herokuAI = new APIClient(this.config)
this._herokuAI.http.defaults = defaults
this._herokuAI.auth = this._apiKey
}

/*
Expand Down
116 changes: 116 additions & 0 deletions src/lib/proxy.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
/// <reference types="node" />
// Model Context Protocol spec 2025-03-26: stdio <-> Streamable HTTP/SSE proxy
// Requires Node.js 20+ for native fetch and Headers
// If using TypeScript, run: npm install --save-dev @types/node
import {createInterface} from 'readline/promises'
import process from 'process'
// NOTE: If you see a rootDir error for the SDK import below, adjust your tsconfig.json rootDir or use the SDK as a package.
// import { StreamableHTTPClientTransport } from '@modelcontextprotocol/sdk';
import {StreamableHTTPClientTransport} from '@modelcontextprotocol/sdk/client/streamableHttp.js'

export class MCPStdioToSSEProxy {
private remoteUrl: URL | null = null;
private token: string | null = null;
private rl = createInterface({input: process.stdin, crlfDelay: Number.POSITIVE_INFINITY});
private transport: StreamableHTTPClientTransport | null = null;
private readyToProcess = false;
private messageQueue: string[] = [];

/**
* Set the remote URL at runtime.
* @param url - The remote URL to set.
* @example
* ```ts
* const proxy = new MCPStdioToSSEProxy()
* proxy.setRemoteUrl(new URL('https://your-mcp-server/mcp'))
* ```
* @returns void
*/
public setRemoteUrl(url: URL) {
this.remoteUrl = url
this.checkReady()
}

/**
* Set the token at runtime.
* @param token - The token to set.
* @example
* ```ts
* const proxy = new MCPStdioToSSEProxy()
* proxy.setToken('YOUR_TOKEN')
* ```
* @returns void
*/
public setToken(token: string) {
this.token = token
this.checkReady()
}

/**
* Check if both remoteUrl and token are set, and if so, start processing queued messages.
* @returns void
*/
private async checkReady() {
if (this.remoteUrl && this.token && !this.readyToProcess) {
this.readyToProcess = true
this.transport = new StreamableHTTPClientTransport(this.remoteUrl, {
requestInit: {
headers: {Authorization: `Bearer ${this.token}`},
},
})

// eslint-disable-next-line unicorn/prefer-add-event-listener
this.transport.onmessage = msg => {
process.stdout.write(JSON.stringify(msg) + '\n')
}

// eslint-disable-next-line unicorn/prefer-add-event-listener
this.transport.onerror = (err: Error) => {
process.stderr.write('Transport error: ' + err.message + '\n')
}

await this.transport.start()
while (this.messageQueue.length > 0) {
const msg = this.messageQueue.shift()
if (msg) {
await this.handleLine(msg)
}
}
}
}

private async handleLine(line: string) {
const trimmed = line.trim()
if (!trimmed) return
if (!this.readyToProcess || !this.transport) {
this.messageQueue.push(trimmed)
return
}

let parsed
try {
parsed = JSON.parse(trimmed)
} catch (error: any) {
process.stderr.write('Invalid JSON: ' + error.message + '\n')
return
}

try {
await this.transport.send(parsed)
} catch (error: any) {
process.stderr.write('Send error: ' + (error && error.message ? error.message : error) + '\n')
}
}

public async run(): Promise<void> {
for await (const line of this.rl) {
await this.handleLine(line)
}
}
}

// Example usage (not CLI):
// const proxy = new MCPStdioToSSEProxy();
// proxy.setRemoteUrl(new URL('https://your-mcp-server/mcp'));
// proxy.setToken('YOUR_TOKEN');
// proxy.run();
Loading