feat: add baseURL and apiKey support for OpenAI-compatible APIs

Enable connecting to any OpenAI-compatible API (Ollama, vLLM, LM Studio,
etc.) by adding baseURL and apiKey fields to AgentConfig and
OrchestratorConfig, threaded through to adapter constructors.

- OpenAIAdapter and AnthropicAdapter accept optional baseURL
- createAdapter() forwards baseURL to both adapters, warns if used with copilot
- All execution paths (runAgent, runTeam coordinator, buildPool) merge defaults
- Fully backward compatible — omitting new fields preserves existing behavior
This commit is contained in:
JackChen 2026-04-02 19:33:10 +08:00
parent 7acd450707
commit 62d6fa9e26
6 changed files with 37 additions and 8 deletions

View File

@ -109,7 +109,7 @@ export class Agent {
}
const provider = this.config.provider ?? 'anthropic'
const adapter = await createAdapter(provider)
const adapter = await createAdapter(provider, this.config.apiKey, this.config.baseURL)
const runnerOptions: RunnerOptions = {
model: this.config.model,

View File

@ -54,24 +54,29 @@ export type SupportedProvider = 'anthropic' | 'copilot' | 'openai'
*
* @param provider - Which LLM provider to target.
* @param apiKey - Optional API key override; falls back to env var.
* @param baseURL - Optional base URL for OpenAI-compatible APIs (Ollama, vLLM, etc.).
* @throws {Error} When the provider string is not recognised.
*/
export async function createAdapter(
provider: SupportedProvider,
apiKey?: string,
baseURL?: string,
): Promise<LLMAdapter> {
switch (provider) {
case 'anthropic': {
const { AnthropicAdapter } = await import('./anthropic.js')
return new AnthropicAdapter(apiKey)
return new AnthropicAdapter(apiKey, baseURL)
}
case 'copilot': {
if (baseURL) {
console.warn('[open-multi-agent] baseURL is not supported for the copilot provider and will be ignored.')
}
const { CopilotAdapter } = await import('./copilot.js')
return new CopilotAdapter(apiKey)
}
case 'openai': {
const { OpenAIAdapter } = await import('./openai.js')
return new OpenAIAdapter(apiKey)
return new OpenAIAdapter(apiKey, baseURL)
}
default: {
// The `never` cast here makes TypeScript enforce exhaustiveness.

View File

@ -189,9 +189,10 @@ export class AnthropicAdapter implements LLMAdapter {
readonly #client: Anthropic
constructor(apiKey?: string) {
constructor(apiKey?: string, baseURL?: string) {
this.#client = new Anthropic({
apiKey: apiKey ?? process.env['ANTHROPIC_API_KEY'],
baseURL,
})
}

View File

@ -69,9 +69,10 @@ export class OpenAIAdapter implements LLMAdapter {
readonly #client: OpenAI
constructor(apiKey?: string) {
constructor(apiKey?: string, baseURL?: string) {
this.#client = new OpenAI({
apiKey: apiKey ?? process.env['OPENAI_API_KEY'],
baseURL,
})
}

View File

@ -341,8 +341,8 @@ async function buildTaskPrompt(task: Task, team: Team): Promise<string> {
*/
export class OpenMultiAgent {
private readonly config: Required<
Omit<OrchestratorConfig, 'onProgress'>
> & Pick<OrchestratorConfig, 'onProgress'>
Omit<OrchestratorConfig, 'onProgress' | 'defaultBaseURL' | 'defaultApiKey'>
> & Pick<OrchestratorConfig, 'onProgress' | 'defaultBaseURL' | 'defaultApiKey'>
private readonly teams: Map<string, Team> = new Map()
private completedTaskCount = 0
@ -360,6 +360,8 @@ export class OpenMultiAgent {
maxConcurrency: config.maxConcurrency ?? DEFAULT_MAX_CONCURRENCY,
defaultModel: config.defaultModel ?? DEFAULT_MODEL,
defaultProvider: config.defaultProvider ?? 'anthropic',
defaultBaseURL: config.defaultBaseURL,
defaultApiKey: config.defaultApiKey,
onProgress: config.onProgress,
}
}
@ -405,7 +407,13 @@ export class OpenMultiAgent {
* @param prompt - The user prompt to send.
*/
async runAgent(config: AgentConfig, prompt: string): Promise<AgentRunResult> {
const agent = buildAgent(config)
const effective: AgentConfig = {
...config,
provider: config.provider ?? this.config.defaultProvider,
baseURL: config.baseURL ?? this.config.defaultBaseURL,
apiKey: config.apiKey ?? this.config.defaultApiKey,
}
const agent = buildAgent(effective)
this.config.onProgress?.({
type: 'agent_start',
agent: config.name,
@ -462,6 +470,8 @@ export class OpenMultiAgent {
name: 'coordinator',
model: this.config.defaultModel,
provider: this.config.defaultProvider,
baseURL: this.config.defaultBaseURL,
apiKey: this.config.defaultApiKey,
systemPrompt: this.buildCoordinatorSystemPrompt(agentConfigs),
maxTurns: 3,
}
@ -792,6 +802,8 @@ export class OpenMultiAgent {
...config,
model: config.model,
provider: config.provider ?? this.config.defaultProvider,
baseURL: config.baseURL ?? this.config.defaultBaseURL,
apiKey: config.apiKey ?? this.config.defaultApiKey,
}
pool.add(buildAgent(effective))
}

View File

@ -187,6 +187,14 @@ export interface AgentConfig {
readonly name: string
readonly model: string
readonly provider?: 'anthropic' | 'copilot' | 'openai'
/**
* Custom base URL for OpenAI-compatible APIs (Ollama, vLLM, LM Studio, etc.).
* Note: local servers that don't require auth still need `apiKey` set to a
* non-empty placeholder (e.g. `'ollama'`) because the OpenAI SDK validates it.
*/
readonly baseURL?: string
/** API key override; falls back to the provider's standard env var. */
readonly apiKey?: string
readonly systemPrompt?: string
/** Names of tools (from the tool registry) available to this agent. */
readonly tools?: readonly string[]
@ -286,6 +294,8 @@ export interface OrchestratorConfig {
readonly maxConcurrency?: number
readonly defaultModel?: string
readonly defaultProvider?: 'anthropic' | 'copilot' | 'openai'
readonly defaultBaseURL?: string
readonly defaultApiKey?: string
onProgress?: (event: OrchestratorEvent) => void
}