mirror of
https://github.com/zadam/trilium.git
synced 2025-11-21 16:14:23 +01:00
test(server): LLM provider tests failing due to mocks
This commit is contained in:
parent
50501aef56
commit
c15ae293aa
@ -31,49 +31,7 @@ vi.mock('./providers.js', () => ({
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
vi.mock('@anthropic-ai/sdk', () => {
|
vi.mock('@anthropic-ai/sdk', () => {
|
||||||
const mockStream = {
|
const MockAnthropic = vi.fn();
|
||||||
[Symbol.asyncIterator]: async function* () {
|
|
||||||
yield {
|
|
||||||
type: 'content_block_delta',
|
|
||||||
delta: { text: 'Hello' }
|
|
||||||
};
|
|
||||||
yield {
|
|
||||||
type: 'content_block_delta',
|
|
||||||
delta: { text: ' world' }
|
|
||||||
};
|
|
||||||
yield {
|
|
||||||
type: 'message_delta',
|
|
||||||
delta: { stop_reason: 'end_turn' }
|
|
||||||
};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
class MockAnthropic {
|
|
||||||
messages = {
|
|
||||||
create: vi.fn().mockImplementation((params) => {
|
|
||||||
if (params.stream) {
|
|
||||||
return Promise.resolve(mockStream);
|
|
||||||
}
|
|
||||||
return Promise.resolve({
|
|
||||||
id: 'msg_123',
|
|
||||||
type: 'message',
|
|
||||||
role: 'assistant',
|
|
||||||
content: [{
|
|
||||||
type: 'text',
|
|
||||||
text: 'Hello! How can I help you today?'
|
|
||||||
}],
|
|
||||||
model: 'claude-3-opus-20240229',
|
|
||||||
stop_reason: 'end_turn',
|
|
||||||
stop_sequence: null,
|
|
||||||
usage: {
|
|
||||||
input_tokens: 10,
|
|
||||||
output_tokens: 25
|
|
||||||
}
|
|
||||||
});
|
|
||||||
})
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return { default: MockAnthropic };
|
return { default: MockAnthropic };
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -85,7 +43,6 @@ describe('AnthropicService', () => {
|
|||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
|
|
||||||
// Get the mocked Anthropic instance before creating the service
|
// Get the mocked Anthropic instance before creating the service
|
||||||
const AnthropicMock = vi.mocked(Anthropic);
|
|
||||||
mockAnthropicInstance = {
|
mockAnthropicInstance = {
|
||||||
messages: {
|
messages: {
|
||||||
create: vi.fn().mockImplementation((params) => {
|
create: vi.fn().mockImplementation((params) => {
|
||||||
@ -127,8 +84,8 @@ describe('AnthropicService', () => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
AnthropicMock.mockImplementation(function () {
|
(Anthropic as any).mockImplementation(function(this: any) {
|
||||||
Object.assign(this, mockAnthropicInstance);
|
return mockAnthropicInstance;
|
||||||
});
|
});
|
||||||
|
|
||||||
service = new AnthropicService();
|
service = new AnthropicService();
|
||||||
@ -355,14 +312,13 @@ describe('AnthropicService', () => {
|
|||||||
vi.mocked(providers.getAnthropicOptions).mockReturnValueOnce(mockOptions);
|
vi.mocked(providers.getAnthropicOptions).mockReturnValueOnce(mockOptions);
|
||||||
|
|
||||||
// Spy on Anthropic constructor
|
// Spy on Anthropic constructor
|
||||||
const AnthropicMock = vi.mocked(Anthropic);
|
(Anthropic as any).mockClear();
|
||||||
AnthropicMock.mockClear();
|
|
||||||
|
|
||||||
// Create new service to trigger client creation
|
// Create new service to trigger client creation
|
||||||
const newService = new AnthropicService();
|
const newService = new AnthropicService();
|
||||||
await newService.generateChatCompletion(messages);
|
await newService.generateChatCompletion(messages);
|
||||||
|
|
||||||
expect(AnthropicMock).toHaveBeenCalledWith({
|
expect(Anthropic).toHaveBeenCalledWith({
|
||||||
apiKey: 'test-key',
|
apiKey: 'test-key',
|
||||||
baseURL: 'https://api.anthropic.com',
|
baseURL: 'https://api.anthropic.com',
|
||||||
defaultHeaders: {
|
defaultHeaders: {
|
||||||
@ -382,14 +338,13 @@ describe('AnthropicService', () => {
|
|||||||
vi.mocked(providers.getAnthropicOptions).mockReturnValueOnce(mockOptions);
|
vi.mocked(providers.getAnthropicOptions).mockReturnValueOnce(mockOptions);
|
||||||
|
|
||||||
// Spy on Anthropic constructor
|
// Spy on Anthropic constructor
|
||||||
const AnthropicMock = vi.mocked(Anthropic);
|
(Anthropic as any).mockClear();
|
||||||
AnthropicMock.mockClear();
|
|
||||||
|
|
||||||
// Create new service to trigger client creation
|
// Create new service to trigger client creation
|
||||||
const newService = new AnthropicService();
|
const newService = new AnthropicService();
|
||||||
await newService.generateChatCompletion(messages);
|
await newService.generateChatCompletion(messages);
|
||||||
|
|
||||||
expect(AnthropicMock).toHaveBeenCalledWith({
|
expect(Anthropic).toHaveBeenCalledWith({
|
||||||
apiKey: 'test-key',
|
apiKey: 'test-key',
|
||||||
baseURL: 'https://api.anthropic.com',
|
baseURL: 'https://api.anthropic.com',
|
||||||
defaultHeaders: {
|
defaultHeaders: {
|
||||||
|
|||||||
@ -65,63 +65,7 @@ vi.mock('./stream_handler.js', () => ({
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
vi.mock('ollama', () => {
|
vi.mock('ollama', () => {
|
||||||
const mockStream = {
|
const MockOllama = vi.fn();
|
||||||
[Symbol.asyncIterator]: async function* () {
|
|
||||||
yield {
|
|
||||||
message: {
|
|
||||||
role: 'assistant',
|
|
||||||
content: 'Hello'
|
|
||||||
},
|
|
||||||
done: false
|
|
||||||
};
|
|
||||||
yield {
|
|
||||||
message: {
|
|
||||||
role: 'assistant',
|
|
||||||
content: ' world'
|
|
||||||
},
|
|
||||||
done: true
|
|
||||||
};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
class MockOllama {
|
|
||||||
chat = vi.fn().mockImplementation((params) => {
|
|
||||||
if (params.stream) {
|
|
||||||
return Promise.resolve(mockStream);
|
|
||||||
}
|
|
||||||
return Promise.resolve({
|
|
||||||
message: {
|
|
||||||
role: 'assistant',
|
|
||||||
content: 'Hello! How can I help you today?'
|
|
||||||
},
|
|
||||||
created_at: '2024-01-01T00:00:00Z',
|
|
||||||
model: 'llama2',
|
|
||||||
done: true
|
|
||||||
});
|
|
||||||
});
|
|
||||||
show = vi.fn().mockResolvedValue({
|
|
||||||
modelfile: 'FROM llama2',
|
|
||||||
parameters: {},
|
|
||||||
template: '',
|
|
||||||
details: {
|
|
||||||
format: 'gguf',
|
|
||||||
family: 'llama',
|
|
||||||
families: ['llama'],
|
|
||||||
parameter_size: '7B',
|
|
||||||
quantization_level: 'Q4_0'
|
|
||||||
}
|
|
||||||
});
|
|
||||||
list = vi.fn().mockResolvedValue({
|
|
||||||
models: [
|
|
||||||
{
|
|
||||||
name: 'llama2:latest',
|
|
||||||
modified_at: '2024-01-01T00:00:00Z',
|
|
||||||
size: 3800000000
|
|
||||||
}
|
|
||||||
]
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return { Ollama: MockOllama };
|
return { Ollama: MockOllama };
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -141,7 +85,6 @@ describe('OllamaService', () => {
|
|||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
|
|
||||||
// Create the mock instance before creating the service
|
// Create the mock instance before creating the service
|
||||||
const OllamaMock = vi.mocked(Ollama);
|
|
||||||
mockOllamaInstance = {
|
mockOllamaInstance = {
|
||||||
chat: vi.fn().mockImplementation((params) => {
|
chat: vi.fn().mockImplementation((params) => {
|
||||||
if (params.stream) {
|
if (params.stream) {
|
||||||
@ -197,8 +140,9 @@ describe('OllamaService', () => {
|
|||||||
})
|
})
|
||||||
};
|
};
|
||||||
|
|
||||||
OllamaMock.mockImplementation(function () {
|
// Mock the Ollama constructor to return our mock instance
|
||||||
Object.assign(this, mockOllamaInstance);
|
(Ollama as any).mockImplementation(function(this: any) {
|
||||||
|
return mockOllamaInstance;
|
||||||
});
|
});
|
||||||
|
|
||||||
service = new OllamaService();
|
service = new OllamaService();
|
||||||
@ -401,8 +345,7 @@ describe('OllamaService', () => {
|
|||||||
vi.mocked(providers.getOllamaOptions).mockResolvedValueOnce(mockOptions);
|
vi.mocked(providers.getOllamaOptions).mockResolvedValueOnce(mockOptions);
|
||||||
|
|
||||||
// Spy on Ollama constructor
|
// Spy on Ollama constructor
|
||||||
const OllamaMock = vi.mocked(Ollama);
|
(Ollama as any).mockClear();
|
||||||
OllamaMock.mockClear();
|
|
||||||
|
|
||||||
// Create new service to trigger client creation
|
// Create new service to trigger client creation
|
||||||
const newService = new OllamaService();
|
const newService = new OllamaService();
|
||||||
@ -416,7 +359,7 @@ describe('OllamaService', () => {
|
|||||||
|
|
||||||
await newService.generateChatCompletion(messages);
|
await newService.generateChatCompletion(messages);
|
||||||
|
|
||||||
expect(OllamaMock).toHaveBeenCalledWith({
|
expect(Ollama).toHaveBeenCalledWith({
|
||||||
host: 'http://localhost:11434',
|
host: 'http://localhost:11434',
|
||||||
fetch: expect.any(Function)
|
fetch: expect.any(Function)
|
||||||
});
|
});
|
||||||
@ -576,15 +519,14 @@ describe('OllamaService', () => {
|
|||||||
};
|
};
|
||||||
vi.mocked(providers.getOllamaOptions).mockResolvedValue(mockOptions);
|
vi.mocked(providers.getOllamaOptions).mockResolvedValue(mockOptions);
|
||||||
|
|
||||||
const OllamaMock = vi.mocked(Ollama);
|
(Ollama as any).mockClear();
|
||||||
OllamaMock.mockClear();
|
|
||||||
|
|
||||||
// Make two calls
|
// Make two calls
|
||||||
await service.generateChatCompletion([{ role: 'user', content: 'Hello' }]);
|
await service.generateChatCompletion([{ role: 'user', content: 'Hello' }]);
|
||||||
await service.generateChatCompletion([{ role: 'user', content: 'Hi' }]);
|
await service.generateChatCompletion([{ role: 'user', content: 'Hi' }]);
|
||||||
|
|
||||||
// Should only create client once
|
// Should only create client once
|
||||||
expect(OllamaMock).toHaveBeenCalledTimes(1);
|
expect(Ollama).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user