Skip to content
Merged
Show file tree
Hide file tree
Changes from 25 commits
Commits
Show all changes
59 commits
Select commit Hold shift + click to select a range
f55dac8
feat: fix #272 add memory feature
seratch Sep 2, 2025
91736b1
pnpm i
seratch Oct 29, 2025
808156b
align with python
seratch Oct 29, 2025
366cc0a
add more tests and prisma example
seratch Oct 29, 2025
0fd03a5
build error
seratch Oct 29, 2025
edf0782
Add changeset - minor release
seratch Oct 29, 2025
b2958fd
fix
seratch Oct 29, 2025
a44370a
fix review comment
seratch Oct 29, 2025
276eb29
HITL support
seratch Oct 29, 2025
a4fed4f
fix review comment
seratch Oct 29, 2025
23d2727
oai store limit issue
seratch Oct 29, 2025
60f7598
fix review comment
seratch Oct 29, 2025
8bfeeda
fix review comment
seratch Oct 29, 2025
ce85e41
fix local codex review
seratch Oct 29, 2025
7892cdc
fix local codex review
seratch Oct 29, 2025
9052aff
fix local codex review comment
seratch Oct 29, 2025
479ee58
fix local codex review comment
seratch Oct 29, 2025
252c919
fix local codex review comment
seratch Oct 29, 2025
c767837
fix local codex review comment
seratch Oct 29, 2025
e594dfe
fix local codex review comment
seratch Oct 29, 2025
12fe46c
fix local codex review comment
seratch Oct 29, 2025
e0fc814
refactor
seratch Oct 29, 2025
176f784
refactor
seratch Oct 29, 2025
fbedc61
refactor
seratch Oct 29, 2025
fe0e53d
fix local codex review comment
seratch Oct 29, 2025
ff827be
refactor
seratch Oct 29, 2025
78e1c59
fix review comment
seratch Oct 29, 2025
6eaa6e4
improve comments
seratch Oct 29, 2025
a58c9e8
fix local codex review comment
seratch Oct 29, 2025
6c584b4
fix local codex review comment
seratch Oct 29, 2025
4466847
refactor
seratch Oct 29, 2025
4d7cdbe
fix local codex review comment
seratch Oct 29, 2025
23ea5bd
fix local codex review comment
seratch Oct 29, 2025
fbad0d2
fix local codex review comment
seratch Oct 29, 2025
c2a4e1d
fix local codex review comment
seratch Oct 29, 2025
0c5ba85
fix local codex review comment
seratch Oct 29, 2025
6c57d2a
fix local codex review comment
seratch Oct 29, 2025
98d56c0
fix local codex review comment
seratch Oct 29, 2025
da895f4
fix local codex review comment
seratch Oct 29, 2025
c14e0a0
fix local codex review comment
seratch Oct 29, 2025
d687d37
fix local codex review comment:
seratch Oct 29, 2025
c49cd7f
fix local codex review comment:
seratch Oct 29, 2025
7a489f4
fix local codex review comment:
seratch Oct 29, 2025
c95f197
fix local codex review comment:
seratch Oct 29, 2025
60e2ab7
fix local codex review comment:
seratch Oct 29, 2025
683abfd
refactor
seratch Oct 29, 2025
edaad7c
fix local codex review comment:
seratch Oct 29, 2025
6eb1a57
fix local codex review comment:
seratch Oct 29, 2025
6346f3b
fix local codex review comment:
seratch Oct 29, 2025
9fb9868
revert unsupport use case changes
seratch Oct 30, 2025
42b23c6
fix oai store bugs
seratch Oct 30, 2025
911717e
tool output support and bug fixes
seratch Oct 30, 2025
72ac1dc
remove unnecessary inut validation; add more comments, refactor varia…
seratch Oct 30, 2025
46989c1
fix local codex review comment:
seratch Oct 30, 2025
9ceeb97
fix local codex review comment:
seratch Oct 30, 2025
e8c0bde
fix local codex review comment:
seratch Oct 30, 2025
c8c67f1
add memory store
seratch Oct 30, 2025
6fa4a4f
fix fc id bug
seratch Oct 30, 2025
27e74c4
Run all examples
seratch Oct 30, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .changeset/easy-taxis-stop.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
---
'@openai/agents-openai': minor
'@openai/agents-core': minor
---

feat: fix #272 add memory feature
2 changes: 2 additions & 0 deletions examples/memory/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
tmp/
*.db
152 changes: 152 additions & 0 deletions examples/memory/file-hitl.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
import { z } from 'zod';
import readline from 'node:readline/promises';
import { stdin as input, stdout as output } from 'node:process';
import {
Agent,
RunResult,
RunToolApprovalItem,
run,
tool,
} from '@openai/agents';

import type { Interface as ReadlineInterface } from 'node:readline/promises';
import { FileSession } from './sessions';

const lookupCustomerProfile = tool({
name: 'lookup_customer_profile',
description:
'Look up stored profile details for a customer by their internal id.',
parameters: z.object({
id: z
.string()
.describe('The internal identifier for the customer to retrieve.'),
}),
async needsApproval() {
return true;
},
async execute({ id }) {
const record = customerDirectory[id];
if (!record) {
return `No customer found for id ${id}.`;
}
return `Customer ${record.name} (tier ${record.tier}) can be reached at ${record.phone}. Notes: ${record.notes}`;
},
});

const customerDirectory: Record<
string,
{ name: string; phone: string; tier: string; notes: string }
> = {
'101': {
name: 'Amina K.',
phone: '+1-415-555-1010',
tier: 'gold',
notes: 'Prefers SMS follow ups and values concise summaries.',
},
'104': {
name: 'Diego L.',
phone: '+1-415-555-2040',
tier: 'platinum',
notes:
'Recently reported sync issues. Flagged for a proactive onboarding call.',
},
'205': {
name: 'Morgan S.',
phone: '+1-415-555-3205',
tier: 'standard',
notes: 'Interested in automation tutorials sent last week.',
},
};

function formatToolArguments(interruption: RunToolApprovalItem): string {
const args = interruption.rawItem.arguments;
if (!args) {
return '';
}
if (typeof args === 'string') {
return args;
}
try {
return JSON.stringify(args);
} catch {
return String(args);
}
}

async function promptYesNo(
rl: ReadlineInterface,
question: string,
): Promise<boolean> {
const answer = await rl.question(`${question} (y/n): `);
const normalized = answer.trim().toLowerCase();
return normalized === 'y' || normalized === 'yes';
}

async function resolveInterruptions<TContext, TAgent extends Agent<any, any>>(
rl: ReadlineInterface,
agent: TAgent,
initialResult: RunResult<TContext, TAgent>,
session: FileSession,
): Promise<RunResult<TContext, TAgent>> {
let result = initialResult;
while (result.interruptions?.length) {
for (const interruption of result.interruptions) {
const args = formatToolArguments(interruption);
const approved = await promptYesNo(
rl,
`Agent ${interruption.agent.name} wants to call ${interruption.rawItem.name} with ${args || 'no arguments'}`,
);
if (approved) {
result.state.approve(interruption);
console.log('Approved tool call.');
} else {
result.state.reject(interruption);
console.log('Rejected tool call.');
}
}

result = await run(agent, result.state, { session });
}

return result;
}

async function main() {
const agent = new Agent({
name: 'File HITL assistant',
instructions:
'You assist support agents. Always consult the lookup_customer_profile tool before answering customer questions so your replies include stored notes. Keep responses under three sentences.',
modelSettings: { toolChoice: 'required' },
tools: [lookupCustomerProfile],
});

const session = new FileSession({ dir: './tmp' });
const sessionId = await session.getSessionId();
const rl = readline.createInterface({ input, output });

console.log(`Session id: ${sessionId}`);
console.log(
'Enter a message to chat with the agent. Submit an empty line to exit.',
);

while (true) {
const userMessage = await rl.question('You: ');
if (!userMessage.trim()) {
break;
}

let result = await run(agent, userMessage, { session });
result = await resolveInterruptions(rl, agent, result, session);

const reply = result.finalOutput ?? '[No final output produced]';
console.log(`Assistant: ${reply}`);
console.log();
}

rl.close();
}

main().catch((error) => {
console.error(error);
process.exit(1);
});
103 changes: 103 additions & 0 deletions examples/memory/file.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
import { Agent, run, tool } from '@openai/agents';
import { FileSession } from './sessions';
import { z } from 'zod';

const lookupCustomerProfile = tool({
name: 'lookup_customer_profile',
description:
'Look up stored profile details for a customer by their internal id.',
parameters: z.object({
id: z
.string()
.describe('The internal identifier for the customer to retrieve.'),
}),
async execute({ id }) {
const directory: Record<string, string> = {
'1': 'Customer 1 (tier gold). Notes: Prefers concise replies.',
'2': 'Customer 2 (tier standard). Notes: Interested in tutorials.',
};
return directory[id] ?? `No customer found for id ${id}.`;
},
});

async function main() {
const agent = new Agent({
name: 'Assistant',
instructions: 'You are a helpful assistant.',
modelSettings: { toolChoice: 'required' },
tools: [lookupCustomerProfile],
});

const session = new FileSession({ dir: './tmp/' });
let result = await run(
agent,
'What is the largest country in South America?',
{ session },
);
console.log(result.finalOutput); // e.g., Brazil

result = await run(agent, 'What is the capital of that country?', {
session,
});
console.log(result.finalOutput); // e.g., Brasilia
}

async function mainStream() {
const agent = new Agent({
name: 'Assistant',
instructions: 'You are a helpful assistant.',
modelSettings: { toolChoice: 'required' },
tools: [lookupCustomerProfile],
});

const session = new FileSession({ dir: './tmp/' });
let result = await run(
agent,
'What is the largest country in South America?',
{
stream: true,
session,
},
);

for await (const event of result) {
if (
event.type === 'raw_model_stream_event' &&
event.data.type === 'output_text_delta'
)
process.stdout.write(event.data.delta);
}
console.log();

result = await run(agent, 'What is the capital of that country?', {
stream: true,
session,
});

// toTextStream() automatically returns a readable stream of strings intended to be displayed
// to the user
for await (const event of result.toTextStream()) {
process.stdout.write(event);
}
console.log();
}

async function promptAndRun() {
const readline = await import('node:readline/promises');
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
const isStream = await rl.question('Run in stream mode? (y/n): ');
rl.close();
if (isStream.trim().toLowerCase() === 'y') {
await mainStream();
} else {
await main();
}
}

promptAndRun().catch((error) => {
console.error(error);
process.exit(1);
});
Loading