feat: add Vercel AI SDK integration example (Next.js + runTeam)
Next.js App Router demo showing OMA + Vercel AI SDK working together: - Backend: OMA runTeam() orchestrates researcher + writer agents - Frontend: AI SDK useChat hook for streaming chat UI - Uses DeepSeek via OpenAI-compatible provider
This commit is contained in:
parent
93795db09f
commit
8ecfc1504c
|
|
@ -0,0 +1,5 @@
|
|||
node_modules/
|
||||
.next/
|
||||
.env
|
||||
.env.local
|
||||
*.tsbuildinfo
|
||||
|
|
@ -0,0 +1,59 @@
|
|||
# with-vercel-ai-sdk
|
||||
|
||||
A Next.js demo showing **open-multi-agent** (OMA) and **Vercel AI SDK** working together:
|
||||
|
||||
- **OMA** orchestrates a research team (researcher agent + writer agent) via `runTeam()`
|
||||
- **AI SDK** streams the result to a chat UI via `useChat` + `streamText`
|
||||
|
||||
## How it works
|
||||
|
||||
```
|
||||
User message
|
||||
│
|
||||
▼
|
||||
API route (app/api/chat/route.ts)
|
||||
│
|
||||
├─ Phase 1: OMA runTeam()
|
||||
│ coordinator decomposes goal → researcher gathers info → writer drafts article
|
||||
│
|
||||
└─ Phase 2: AI SDK streamText()
|
||||
streams the team's output to the browser
|
||||
│
|
||||
▼
|
||||
Chat UI (app/page.tsx) — useChat hook renders streamed response
|
||||
```
|
||||
|
||||
## Setup
|
||||
|
||||
```bash
|
||||
# 1. From repo root, install OMA dependencies
|
||||
cd ../..
|
||||
npm install
|
||||
|
||||
# 2. Back to this example
|
||||
cd examples/with-vercel-ai-sdk
|
||||
npm install
|
||||
|
||||
# 3. Set your API key
|
||||
export ANTHROPIC_API_KEY=sk-ant-...
|
||||
|
||||
# 4. Run
|
||||
npm run dev
|
||||
```
|
||||
|
||||
`npm run dev` automatically builds OMA before starting Next.js (via the `predev` script).
|
||||
|
||||
Open [http://localhost:3000](http://localhost:3000), type a topic, and watch the research team work.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Node.js >= 18
|
||||
- `ANTHROPIC_API_KEY` environment variable (used by both OMA and AI SDK)
|
||||
|
||||
## Key files
|
||||
|
||||
| File | Role |
|
||||
|------|------|
|
||||
| `app/api/chat/route.ts` | Backend — OMA orchestration + AI SDK streaming |
|
||||
| `app/page.tsx` | Frontend — chat UI with `useChat` hook |
|
||||
| `package.json` | References OMA via `file:../../` (local link) |
|
||||
|
|
@ -0,0 +1,91 @@
|
|||
import { streamText, convertToModelMessages, type UIMessage } from 'ai'
|
||||
import { createOpenAICompatible } from '@ai-sdk/openai-compatible'
|
||||
import { OpenMultiAgent } from '@jackchen_me/open-multi-agent'
|
||||
import type { AgentConfig } from '@jackchen_me/open-multi-agent'
|
||||
|
||||
export const maxDuration = 120
|
||||
|
||||
// --- DeepSeek via OpenAI-compatible API ---
|
||||
const DEEPSEEK_BASE_URL = 'https://api.deepseek.com'
|
||||
const DEEPSEEK_MODEL = 'deepseek-chat'
|
||||
|
||||
const deepseek = createOpenAICompatible({
|
||||
name: 'deepseek',
|
||||
baseURL: `${DEEPSEEK_BASE_URL}/v1`,
|
||||
apiKey: process.env.DEEPSEEK_API_KEY,
|
||||
})
|
||||
|
||||
const researcher: AgentConfig = {
|
||||
name: 'researcher',
|
||||
model: DEEPSEEK_MODEL,
|
||||
provider: 'openai',
|
||||
baseURL: DEEPSEEK_BASE_URL,
|
||||
apiKey: process.env.DEEPSEEK_API_KEY,
|
||||
systemPrompt: `You are a research specialist. Given a topic, provide thorough, factual research
|
||||
with key findings, relevant data points, and important context.
|
||||
Be concise but comprehensive. Output structured notes, not prose.`,
|
||||
maxTurns: 3,
|
||||
temperature: 0.2,
|
||||
}
|
||||
|
||||
const writer: AgentConfig = {
|
||||
name: 'writer',
|
||||
model: DEEPSEEK_MODEL,
|
||||
provider: 'openai',
|
||||
baseURL: DEEPSEEK_BASE_URL,
|
||||
apiKey: process.env.DEEPSEEK_API_KEY,
|
||||
systemPrompt: `You are an expert writer. Using research from team members (available in shared memory),
|
||||
write a well-structured, engaging article with clear headings and concise paragraphs.
|
||||
Do not repeat raw research — synthesize it into readable prose.`,
|
||||
maxTurns: 3,
|
||||
temperature: 0.4,
|
||||
}
|
||||
|
||||
function extractText(message: UIMessage): string {
|
||||
return message.parts
|
||||
.filter((p): p is { type: 'text'; text: string } => p.type === 'text')
|
||||
.map((p) => p.text)
|
||||
.join('')
|
||||
}
|
||||
|
||||
export async function POST(req: Request) {
|
||||
const { messages }: { messages: UIMessage[] } = await req.json()
|
||||
const lastText = extractText(messages.at(-1)!)
|
||||
|
||||
// --- Phase 1: OMA multi-agent orchestration ---
|
||||
const orchestrator = new OpenMultiAgent({
|
||||
defaultModel: DEEPSEEK_MODEL,
|
||||
defaultProvider: 'openai',
|
||||
defaultBaseURL: DEEPSEEK_BASE_URL,
|
||||
defaultApiKey: process.env.DEEPSEEK_API_KEY,
|
||||
})
|
||||
|
||||
const team = orchestrator.createTeam('research-writing', {
|
||||
name: 'research-writing',
|
||||
agents: [researcher, writer],
|
||||
sharedMemory: true,
|
||||
})
|
||||
|
||||
const teamResult = await orchestrator.runTeam(
|
||||
team,
|
||||
`Research and write an article about: ${lastText}`,
|
||||
)
|
||||
|
||||
const teamOutput = teamResult.agentResults.get('coordinator')?.output ?? ''
|
||||
|
||||
// --- Phase 2: Stream result via Vercel AI SDK ---
|
||||
const result = streamText({
|
||||
model: deepseek(DEEPSEEK_MODEL),
|
||||
system: `You are presenting research from a multi-agent team (researcher + writer).
|
||||
The team has already done the work. Your only job is to relay their output to the user
|
||||
in a well-formatted way. Keep the content faithful to the team output below.
|
||||
At the very end, add a one-line note that this was produced by a researcher agent
|
||||
and a writer agent collaborating via open-multi-agent.
|
||||
|
||||
## Team Output
|
||||
${teamOutput}`,
|
||||
messages: await convertToModelMessages(messages),
|
||||
})
|
||||
|
||||
return result.toUIMessageStreamResponse()
|
||||
}
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
import type { Metadata } from 'next'
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: 'OMA + Vercel AI SDK',
|
||||
description: 'Multi-agent research team powered by open-multi-agent, streamed via Vercel AI SDK',
|
||||
}
|
||||
|
||||
export default function RootLayout({ children }: { children: React.ReactNode }) {
|
||||
return (
|
||||
<html lang="en">
|
||||
<body style={{ margin: 0, background: '#fafafa' }}>{children}</body>
|
||||
</html>
|
||||
)
|
||||
}
|
||||
|
|
@ -0,0 +1,97 @@
|
|||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import { useChat } from '@ai-sdk/react'
|
||||
|
||||
export default function Home() {
|
||||
const { messages, sendMessage, status, error } = useChat()
|
||||
const [input, setInput] = useState('')
|
||||
|
||||
const isLoading = status === 'submitted' || status === 'streaming'
|
||||
|
||||
const handleSubmit = async (e: React.FormEvent) => {
|
||||
e.preventDefault()
|
||||
if (!input.trim() || isLoading) return
|
||||
const text = input
|
||||
setInput('')
|
||||
await sendMessage({ text })
|
||||
}
|
||||
|
||||
return (
|
||||
<main
|
||||
style={{
|
||||
maxWidth: 720,
|
||||
margin: '0 auto',
|
||||
padding: '32px 16px',
|
||||
fontFamily: 'system-ui, -apple-system, sans-serif',
|
||||
}}
|
||||
>
|
||||
<h1 style={{ fontSize: 22, marginBottom: 4 }}>Research Team</h1>
|
||||
<p style={{ color: '#666', fontSize: 14, marginBottom: 28 }}>
|
||||
Enter a topic. A <strong>researcher</strong> agent gathers information, a{' '}
|
||||
<strong>writer</strong> agent composes an article — orchestrated by
|
||||
open-multi-agent, streamed via Vercel AI SDK.
|
||||
</p>
|
||||
|
||||
<div style={{ minHeight: 120 }}>
|
||||
{messages.map((m) => (
|
||||
<div key={m.id} style={{ marginBottom: 24, lineHeight: 1.7 }}>
|
||||
<div style={{ fontWeight: 600, fontSize: 13, color: '#999', marginBottom: 4 }}>
|
||||
{m.role === 'user' ? 'You' : 'Research Team'}
|
||||
</div>
|
||||
<div style={{ whiteSpace: 'pre-wrap', fontSize: 15 }}>
|
||||
{m.parts
|
||||
.filter((part): part is { type: 'text'; text: string } => part.type === 'text')
|
||||
.map((part) => part.text)
|
||||
.join('')}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
|
||||
{isLoading && status === 'submitted' && (
|
||||
<div style={{ color: '#888', fontSize: 14, padding: '8px 0' }}>
|
||||
Agents are collaborating — this may take a minute...
|
||||
</div>
|
||||
)}
|
||||
|
||||
{error && (
|
||||
<div style={{ color: '#c00', fontSize: 14, padding: '8px 0' }}>
|
||||
Error: {error.message}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<form onSubmit={handleSubmit} style={{ display: 'flex', gap: 8, marginTop: 32 }}>
|
||||
<input
|
||||
value={input}
|
||||
onChange={(e) => setInput(e.target.value)}
|
||||
placeholder="Enter a topic to research..."
|
||||
disabled={isLoading}
|
||||
style={{
|
||||
flex: 1,
|
||||
padding: '10px 14px',
|
||||
borderRadius: 8,
|
||||
border: '1px solid #ddd',
|
||||
fontSize: 15,
|
||||
outline: 'none',
|
||||
}}
|
||||
/>
|
||||
<button
|
||||
type="submit"
|
||||
disabled={isLoading || !input.trim()}
|
||||
style={{
|
||||
padding: '10px 20px',
|
||||
borderRadius: 8,
|
||||
border: 'none',
|
||||
background: isLoading ? '#ccc' : '#111',
|
||||
color: '#fff',
|
||||
cursor: isLoading ? 'not-allowed' : 'pointer',
|
||||
fontSize: 15,
|
||||
}}
|
||||
>
|
||||
Send
|
||||
</button>
|
||||
</form>
|
||||
</main>
|
||||
)
|
||||
}
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
/// <reference types="next" />
|
||||
/// <reference types="next/image-types/global" />
|
||||
import "./.next/dev/types/routes.d.ts";
|
||||
|
||||
// NOTE: This file should not be edited
|
||||
// see https://nextjs.org/docs/app/api-reference/config/typescript for more information.
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
import type { NextConfig } from 'next'
|
||||
|
||||
const nextConfig: NextConfig = {
|
||||
serverExternalPackages: ['@jackchen_me/open-multi-agent'],
|
||||
}
|
||||
|
||||
export default nextConfig
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,25 @@
|
|||
{
|
||||
"name": "with-vercel-ai-sdk",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"predev": "cd ../.. && npm run build",
|
||||
"dev": "next dev",
|
||||
"build": "next build",
|
||||
"start": "next start"
|
||||
},
|
||||
"dependencies": {
|
||||
"@ai-sdk/openai-compatible": "^2.0.41",
|
||||
"@ai-sdk/react": "^3.0.0",
|
||||
"@jackchen_me/open-multi-agent": "file:../../",
|
||||
"ai": "^6.0.0",
|
||||
"next": "^16.0.0",
|
||||
"react": "^19.0.0",
|
||||
"react-dom": "^19.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.0.0",
|
||||
"@types/react": "^19.0.0",
|
||||
"@types/react-dom": "^19.0.0",
|
||||
"typescript": "^5.6.0"
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,41 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"lib": [
|
||||
"dom",
|
||||
"dom.iterable",
|
||||
"ES2022"
|
||||
],
|
||||
"allowJs": true,
|
||||
"skipLibCheck": true,
|
||||
"strict": true,
|
||||
"noEmit": true,
|
||||
"esModuleInterop": true,
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "bundler",
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
"jsx": "react-jsx",
|
||||
"incremental": true,
|
||||
"plugins": [
|
||||
{
|
||||
"name": "next"
|
||||
}
|
||||
],
|
||||
"paths": {
|
||||
"@/*": [
|
||||
"./*"
|
||||
]
|
||||
}
|
||||
},
|
||||
"include": [
|
||||
"next-env.d.ts",
|
||||
"**/*.ts",
|
||||
"**/*.tsx",
|
||||
".next/types/**/*.ts",
|
||||
".next/dev/types/**/*.ts"
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules"
|
||||
]
|
||||
}
|
||||
Loading…
Reference in New Issue