A transport adapter that bridges LangGraph API streaming events to Vercel AI SDK's useChat hook format.
What it does: Transforms LangGraph's SSE message format into the chunk protocol that useChat understands, enabling seamless integration between LangGraph agents and React chat interfaces.
📦 From npm (Recommended)
npm install @keboola/langgraph-chat-transport ai@beta🐙 From GitHub Packages
# Add to .npmrc
echo "@keboola:registry=https://npm.pkg.github.com" >> .npmrc
# Install (requires GitHub authentication)
npm install @keboola/langgraph-chat-transport ai@betaNote: Requires Vercel AI SDK v5 Beta
⚡ Basic Usage
import { useChat } from 'ai/react';
import { LangGraphChatTransport } from '@keboola/langgraph-chat-transport';
const transport = new LangGraphChatTransport({
api: '/api/langgraph/stream',
});
export function ChatComponent() {
const { messages, input, handleInputChange, handleSubmit } = useChat({
transport,
});
return (
<div>
{messages.map((message) => (
<div key={message.id}>
<strong>{message.role}:</strong> {message.content}
</div>
))}
<form onSubmit={handleSubmit}>
<input value={input} onChange={handleInputChange} />
<button type="submit">Send</button>
</form>
</div>
);
}🔧 Server Setup (Next.js)
// app/api/langgraph/stream/route.ts
export async function POST(request: Request) {
const { messages } = await request.json();
// Forward to your LangGraph API
const response = await fetch('https://your-langgraph-api.com/stream', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ messages }),
});
// Return the SSE stream directly
return new Response(response.body, {
headers: {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
},
});
}- ✅ Message Types:
ai,human,system,tool - ✅ Text Streaming: Incremental text updates with proper delta handling
- ✅ Tool Calls: Full tool invocation and result streaming
- ✅ Error Handling: Graceful error propagation to UI
- ✅ TypeScript: Full type safety with generics
- ✅ SSE Parsing: Robust Server-Sent Events parsing
🛠️ With Tool Calling
import { useChat } from 'ai/react';
import { LangGraphChatTransport } from '@keboola/langgraph-chat-transport';
const transport = new LangGraphChatTransport({
api: '/api/langgraph/stream',
headers: {
'Authorization': 'Bearer YOUR_API_KEY',
},
});
export function AdvancedChatComponent() {
const { messages, input, handleInputChange, handleSubmit, isLoading } = useChat({
transport,
onToolCall: ({ toolCall }) => {
console.log('Tool called:', toolCall.toolName, toolCall.args);
},
});
return (
<div>
{messages.map((message) => (
<div key={message.id}>
<strong>{message.role}:</strong>
{message.content}
{/* Display tool invocations */}
{message.toolInvocations?.map((tool) => (
<div key={tool.toolCallId} className="tool-call">
<strong>🔧 {tool.toolName}</strong>
<pre>{JSON.stringify(tool.args, null, 2)}</pre>
{tool.result && (
<div className="tool-result">
Result: {JSON.stringify(tool.result, null, 2)}
</div>
)}
</div>
))}
</div>
))}
<form onSubmit={handleSubmit}>
<input
value={input}
onChange={handleInputChange}
disabled={isLoading}
placeholder="Ask me anything..."
/>
<button type="submit" disabled={isLoading}>
{isLoading ? 'Sending...' : 'Send'}
</button>
</form>
</div>
);
}🎯 TypeScript Configuration
import { useChat } from 'ai/react';
import { LangGraphChatTransport } from '@keboola/langgraph-chat-transport';
import type { LangGraphMessage } from '@keboola/langgraph-chat-transport';
import type { UIMessage } from 'ai';
// Custom message type extending UIMessage
interface CustomMessage extends UIMessage {
metadata?: {
model?: string;
tokens?: number;
};
}
const transport = new LangGraphChatTransport<CustomMessage>({
api: '/api/chat',
body: {
model: 'gpt-4',
sessionId: 'user-123',
},
});
export function TypedChat() {
const { messages } = useChat<CustomMessage>({ transport });
return (
<div>
{messages.map((message) => (
<div key={message.id}>
<div>
{message.parts?.map((part) =>
part.type === 'text' ? part.text : null
)}
</div>
{message.metadata?.tokens && (
<span>Tokens: {message.metadata.tokens}</span>
)}
</div>
))}
</div>
);
}⚙️ Configuration Options
const transport = new LangGraphChatTransport({
api: 'https://api.example.com/langgraph/stream',
headers: {
'Authorization': 'Bearer ' + process.env.API_KEY,
'X-User-ID': 'user-123',
},
body: {
model: 'gpt-4',
temperature: 0.7,
sessionId: crypto.randomUUID(),
},
});All HttpChatTransportInitOptions are supported:
api- API endpoint (default: '/api/chat')headers- HTTP headersbody- Additional request bodycredentials- Fetch credentials modefetch- Custom fetch implementation
🔄 Message Format Translation
LangGraph Input:
{
"type": "ai",
"content": [
{
"type": "text",
"text": "Hello, I can help you with that..."
},
{
"type": "tool_use",
"id": "tool_1",
"name": "search",
"input": { "query": "example" }
}
]
}Vercel AI SDK Output:
[
{ "type": "text-start", "id": "text-0" },
{ "type": "text-delta", "id": "text-0", "delta": "Hello, I can help..." },
{ "type": "text-end", "id": "text-0" },
{
"type": "tool-input-available",
"toolCallId": "tool_1",
"toolName": "search",
"input": { "query": "example" }
}
]🚨 Error Handling
const { messages, error, reload } = useChat({
transport,
onError: (error) => {
console.error('Chat error:', error);
// Report to your error tracking service
},
});
if (error) {
return (
<div>
<p>Error: {error.message}</p>
<button onClick={reload}>Retry</button>
</div>
);
}🌐 Express.js Server
import express from 'express';
app.post('/api/langgraph/stream', async (req, res) => {
const { messages } = req.body;
const response = await fetch('https://your-langgraph-api.com/stream', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ messages }),
});
res.setHeader('Content-Type', 'text/event-stream');
res.setHeader('Cache-Control', 'no-cache');
response.body?.pipe(res);
});- Node.js 20+
- Vercel AI SDK 5.0 Beta+
- A LangGraph API endpoint that streams SSE responses
MIT licensed, see LICENSE file.