test: extend LLM adapter coverage for issue #54 (#144)

This commit is contained in:
jadegold55 2026-04-21 22:52:33 -04:00 committed by GitHub
parent 99d9d7f52e
commit 910ed0592e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 133 additions and 0 deletions

View File

@ -115,6 +115,62 @@ describe('CopilotAdapter', () => {
// =========================================================================
describe('token management', () => {
it('uses the device flow when no GitHub token is available', async () => {
vi.useFakeTimers()
const onDeviceCode = vi.fn()
globalThis.fetch = vi.fn()
.mockResolvedValueOnce({
ok: true,
json: () => Promise.resolve({
device_code: 'device-code',
user_code: 'ABCD-EFGH',
verification_uri: 'https://github.com/login/device',
interval: 0,
expires_in: 600,
}),
})
.mockResolvedValueOnce({
ok: true,
json: () => Promise.resolve({ access_token: 'oauth_token' }),
})
.mockResolvedValueOnce({
ok: true,
json: () => Promise.resolve({
token: 'session_from_device_flow',
expires_at: Math.floor(Date.now() / 1000) + 3600,
}),
text: () => Promise.resolve(''),
})
const adapter = new CopilotAdapter({ onDeviceCode })
mockCreate.mockResolvedValue(makeCompletion())
const responsePromise = adapter.chat([textMsg('user', 'Hi')], chatOpts())
await vi.runAllTimersAsync()
await responsePromise
expect(onDeviceCode).toHaveBeenCalledWith(
'https://github.com/login/device',
'ABCD-EFGH',
)
expect(globalThis.fetch).toHaveBeenNthCalledWith(
3,
'https://api.github.com/copilot_internal/v2/token',
expect.objectContaining({
headers: expect.objectContaining({
Authorization: 'token oauth_token',
}),
}),
)
expect(OpenAIMock).toHaveBeenCalledWith(
expect.objectContaining({
apiKey: 'session_from_device_flow',
}),
)
vi.useRealTimers()
})
it('exchanges GitHub token for Copilot session token', async () => {
const fetchMock = mockFetchForToken('session_xyz')
globalThis.fetch = fetchMock
@ -344,6 +400,23 @@ describe('CopilotAdapter', () => {
expect(events.filter(e => e.type === 'error')).toHaveLength(1)
})
it('handles malformed streamed tool arguments JSON', async () => {
mockCreate.mockResolvedValue(makeChunks([
{
id: 'c1', model: 'gpt-4o',
choices: [{ index: 0, delta: { tool_calls: [{ index: 0, id: 'call_1', function: { name: 'search', arguments: '{broken' } }] }, finish_reason: 'tool_calls' }],
usage: null,
},
{ id: 'c1', model: 'gpt-4o', choices: [], usage: { prompt_tokens: 5, completion_tokens: 3 } },
]))
const events = await collectEvents(adapter.stream([textMsg('user', 'Hi')], chatOpts()))
const toolEvents = events.filter(e => e.type === 'tool_use')
expect(toolEvents).toHaveLength(1)
expect((toolEvents[0].data as ToolUseBlock).input).toEqual({})
})
})
// =========================================================================

View File

@ -120,6 +120,29 @@ describe('GeminiAdapter (contract)', () => {
expect(parts[0].functionResponse.name).toBe('unknown_id')
})
it('serializes non-string tool_result content to JSON', async () => {
mockGenerateContent.mockResolvedValue(makeGeminiResponse([{ text: 'ok' }]))
await adapter.chat(
[{
role: 'user',
content: [{
type: 'tool_result',
tool_use_id: 'call_1',
content: { answer: 42 } as never,
is_error: false,
} as never],
}],
chatOpts(),
)
const parts = mockGenerateContent.mock.calls[0][0].contents[0].parts
expect(parts[0].functionResponse.response).toEqual({
content: '{"answer":42}',
isError: false,
})
})
it('converts image blocks to inlineData parts', async () => {
mockGenerateContent.mockResolvedValue(makeGeminiResponse([{ text: 'ok' }]))
@ -265,6 +288,17 @@ describe('GeminiAdapter (contract)', () => {
expect(result.content).toEqual([])
})
it('throws for unsupported message block types', async () => {
mockGenerateContent.mockResolvedValue(makeGeminiResponse([{ text: 'ok' }]))
await expect(adapter.chat([
{
role: 'user',
content: [{ type: 'unsupported' } as never],
},
], chatOpts())).rejects.toThrow('Unhandled content block type')
})
})
// =========================================================================

View File

@ -355,5 +355,31 @@ describe('OpenAIAdapter', () => {
expect((toolEvents[0].data as ToolUseBlock).name).toBe('search')
expect((toolEvents[1].data as ToolUseBlock).name).toBe('read')
})
it('falls back to extracting tool calls from streamed text when no native tool deltas exist', async () => {
mockCreate.mockResolvedValue(makeChunks([
textChunk('```json\n{"name":"search","input":{"query":"fallback"}}\n```', 'stop'),
{ id: 'chatcmpl-123', model: 'gpt-4o', choices: [], usage: { prompt_tokens: 6, completion_tokens: 4 } },
]))
const events = await collectEvents(
adapter.stream(
[textMsg('user', 'Search for fallback handling')],
chatOpts({ tools: [toolDef('search')] }),
),
)
const toolEvents = events.filter(e => e.type === 'tool_use')
expect(toolEvents).toHaveLength(1)
expect(toolEvents[0].data).toEqual({
type: 'tool_use',
id: expect.any(String),
name: 'search',
input: { query: 'fallback' },
})
const done = events.find(e => e.type === 'done')
expect((done!.data as LLMResponse).stop_reason).toBe('tool_use')
})
})
})