mirror of
https://github.com/zadam/trilium.git
synced 2025-11-21 16:14:23 +01:00
test: fix typecheck issues by using classes
This commit is contained in:
parent
c9424d6f8d
commit
50501aef56
@ -52,9 +52,9 @@ vi.mock("../../services/llm/ai_service_manager.js", () => ({
|
|||||||
|
|
||||||
// Mock chat pipeline
|
// Mock chat pipeline
|
||||||
const mockChatPipelineExecute = vi.fn();
|
const mockChatPipelineExecute = vi.fn();
|
||||||
const MockChatPipeline = vi.fn().mockImplementation(function () {
|
class MockChatPipeline {
|
||||||
this.execute = mockChatPipelineExecute;
|
execute = mockChatPipelineExecute;
|
||||||
});
|
}
|
||||||
vi.mock("../../services/llm/pipeline/chat_pipeline.js", () => ({
|
vi.mock("../../services/llm/pipeline/chat_pipeline.js", () => ({
|
||||||
ChatPipeline: MockChatPipeline
|
ChatPipeline: MockChatPipeline
|
||||||
}));
|
}));
|
||||||
|
|||||||
@ -34,26 +34,29 @@ vi.mock('../log.js', () => ({
|
|||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
|
|
||||||
vi.mock('./providers/anthropic_service.js', () => ({
|
vi.mock('./providers/anthropic_service.js', () => {
|
||||||
AnthropicService: vi.fn().mockImplementation(function () {
|
class AnthropicService {
|
||||||
this.isAvailable = vi.fn().mockReturnValue(true);
|
isAvailable = vi.fn().mockReturnValue(true);
|
||||||
this.generateChatCompletion = vi.fn();
|
generateChatCompletion = vi.fn();
|
||||||
})
|
}
|
||||||
}));
|
return { AnthropicService };
|
||||||
|
});
|
||||||
|
|
||||||
vi.mock('./providers/openai_service.js', () => ({
|
vi.mock('./providers/openai_service.js', () => {
|
||||||
OpenAIService: vi.fn().mockImplementation(function () {
|
class OpenAIService {
|
||||||
this.isAvailable = vi.fn().mockReturnValue(true);
|
isAvailable = vi.fn().mockReturnValue(true);
|
||||||
this.generateChatCompletion = vi.fn();
|
generateChatCompletion = vi.fn();
|
||||||
})
|
}
|
||||||
}));
|
return { OpenAIService };
|
||||||
|
});
|
||||||
|
|
||||||
vi.mock('./providers/ollama_service.js', () => ({
|
vi.mock('./providers/ollama_service.js', () => {
|
||||||
OllamaService: vi.fn().mockImplementation(function () {
|
class OllamaService {
|
||||||
this.isAvailable = vi.fn().mockReturnValue(true);
|
isAvailable = vi.fn().mockReturnValue(true);
|
||||||
this.generateChatCompletion = vi.fn();
|
generateChatCompletion = vi.fn();
|
||||||
})
|
}
|
||||||
}));
|
return { OllamaService };
|
||||||
|
});
|
||||||
|
|
||||||
vi.mock('./config/configuration_helpers.js', () => ({
|
vi.mock('./config/configuration_helpers.js', () => ({
|
||||||
getSelectedProvider: vi.fn(),
|
getSelectedProvider: vi.fn(),
|
||||||
|
|||||||
@ -38,11 +38,12 @@ vi.mock('../pipeline/chat_pipeline.js', () => ({
|
|||||||
}))
|
}))
|
||||||
}));
|
}));
|
||||||
|
|
||||||
vi.mock('./handlers/tool_handler.js', () => ({
|
vi.mock('./handlers/tool_handler.js', () => {
|
||||||
ToolHandler: vi.fn().mockImplementation(function () {
|
class ToolHandler {
|
||||||
this.handleToolCalls = vi.fn()
|
handleToolCalls = vi.fn()
|
||||||
})
|
}
|
||||||
}));
|
return { ToolHandler };
|
||||||
|
});
|
||||||
|
|
||||||
vi.mock('../chat_storage_service.js', () => ({
|
vi.mock('../chat_storage_service.js', () => ({
|
||||||
default: {
|
default: {
|
||||||
|
|||||||
@ -35,24 +35,28 @@ vi.mock('./constants/llm_prompt_constants.js', () => ({
|
|||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
|
|
||||||
vi.mock('./pipeline/chat_pipeline.js', () => ({
|
vi.mock('./pipeline/chat_pipeline.js', () => {
|
||||||
ChatPipeline: vi.fn().mockImplementation(function (config) {
|
class ChatPipeline {
|
||||||
Object.assign(this, {
|
config: any;
|
||||||
config,
|
|
||||||
execute: vi.fn(),
|
constructor(config: any) {
|
||||||
getMetrics: vi.fn(),
|
this.config = config;
|
||||||
resetMetrics: vi.fn(),
|
}
|
||||||
stages: {
|
|
||||||
contextExtraction: {
|
execute = vi.fn();
|
||||||
execute: vi.fn()
|
getMetrics = vi.fn();
|
||||||
},
|
resetMetrics = vi.fn();
|
||||||
semanticContextExtraction: {
|
stages = {
|
||||||
execute: vi.fn()
|
contextExtraction: {
|
||||||
}
|
execute: vi.fn()
|
||||||
|
},
|
||||||
|
semanticContextExtraction: {
|
||||||
|
execute: vi.fn()
|
||||||
}
|
}
|
||||||
});
|
}
|
||||||
})
|
}
|
||||||
}));
|
return { ChatPipeline };
|
||||||
|
});
|
||||||
|
|
||||||
vi.mock('./ai_service_manager.js', () => ({
|
vi.mock('./ai_service_manager.js', () => ({
|
||||||
default: {
|
default: {
|
||||||
|
|||||||
@ -46,11 +46,12 @@ vi.mock('../../ai_service_manager.js', () => ({
|
|||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
|
|
||||||
vi.mock('../index.js', () => ({
|
vi.mock('../index.js', () => {
|
||||||
ContextExtractor: vi.fn().mockImplementation(function () {
|
class ContextExtractor {
|
||||||
this.findRelevantNotes = vi.fn().mockResolvedValue([])
|
findRelevantNotes = vi.fn().mockResolvedValue([])
|
||||||
})
|
}
|
||||||
}));
|
return { ContextExtractor };
|
||||||
|
});
|
||||||
|
|
||||||
describe('ContextService', () => {
|
describe('ContextService', () => {
|
||||||
let service: ContextService;
|
let service: ContextService;
|
||||||
|
|||||||
@ -48,8 +48,8 @@ vi.mock('@anthropic-ai/sdk', () => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const mockAnthropic = vi.fn().mockImplementation(function () {
|
class MockAnthropic {
|
||||||
this.messages = {
|
messages = {
|
||||||
create: vi.fn().mockImplementation((params) => {
|
create: vi.fn().mockImplementation((params) => {
|
||||||
if (params.stream) {
|
if (params.stream) {
|
||||||
return Promise.resolve(mockStream);
|
return Promise.resolve(mockStream);
|
||||||
@ -72,9 +72,9 @@ vi.mock('@anthropic-ai/sdk', () => {
|
|||||||
});
|
});
|
||||||
})
|
})
|
||||||
};
|
};
|
||||||
});
|
}
|
||||||
|
|
||||||
return { default: mockAnthropic };
|
return { default: MockAnthropic };
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('AnthropicService', () => {
|
describe('AnthropicService', () => {
|
||||||
|
|||||||
@ -29,12 +29,12 @@ vi.mock('./providers.js', () => ({
|
|||||||
getOllamaOptions: vi.fn()
|
getOllamaOptions: vi.fn()
|
||||||
}));
|
}));
|
||||||
|
|
||||||
vi.mock('../formatters/ollama_formatter.js', () => ({
|
vi.mock('../formatters/ollama_formatter.js', () => {
|
||||||
OllamaMessageFormatter: vi.fn().mockImplementation(function () {
|
class MockFormatter {
|
||||||
this.formatMessages = vi.fn().mockReturnValue([
|
formatMessages = vi.fn().mockReturnValue([
|
||||||
{ role: 'user', content: 'Hello' }
|
{ role: 'user', content: 'Hello' }
|
||||||
]);
|
]);
|
||||||
this.formatResponse = vi.fn().mockReturnValue({
|
formatResponse = vi.fn().mockReturnValue({
|
||||||
text: 'Hello! How can I help you today?',
|
text: 'Hello! How can I help you today?',
|
||||||
provider: 'Ollama',
|
provider: 'Ollama',
|
||||||
model: 'llama2',
|
model: 'llama2',
|
||||||
@ -45,8 +45,9 @@ vi.mock('../formatters/ollama_formatter.js', () => ({
|
|||||||
},
|
},
|
||||||
tool_calls: null
|
tool_calls: null
|
||||||
});
|
});
|
||||||
})
|
}
|
||||||
}));
|
return { OllamaMessageFormatter: MockFormatter };
|
||||||
|
});
|
||||||
|
|
||||||
vi.mock('../tools/tool_registry.js', () => ({
|
vi.mock('../tools/tool_registry.js', () => ({
|
||||||
default: {
|
default: {
|
||||||
@ -83,8 +84,8 @@ vi.mock('ollama', () => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const mockOllama = vi.fn().mockImplementation(function () {
|
class MockOllama {
|
||||||
this.chat = vi.fn().mockImplementation((params) => {
|
chat = vi.fn().mockImplementation((params) => {
|
||||||
if (params.stream) {
|
if (params.stream) {
|
||||||
return Promise.resolve(mockStream);
|
return Promise.resolve(mockStream);
|
||||||
}
|
}
|
||||||
@ -98,7 +99,7 @@ vi.mock('ollama', () => {
|
|||||||
done: true
|
done: true
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
this.show = vi.fn().mockResolvedValue({
|
show = vi.fn().mockResolvedValue({
|
||||||
modelfile: 'FROM llama2',
|
modelfile: 'FROM llama2',
|
||||||
parameters: {},
|
parameters: {},
|
||||||
template: '',
|
template: '',
|
||||||
@ -110,7 +111,7 @@ vi.mock('ollama', () => {
|
|||||||
quantization_level: 'Q4_0'
|
quantization_level: 'Q4_0'
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
this.list = vi.fn().mockResolvedValue({
|
list = vi.fn().mockResolvedValue({
|
||||||
models: [
|
models: [
|
||||||
{
|
{
|
||||||
name: 'llama2:latest',
|
name: 'llama2:latest',
|
||||||
@ -118,10 +119,10 @@ vi.mock('ollama', () => {
|
|||||||
size: 3800000000
|
size: 3800000000
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
})
|
});
|
||||||
});
|
}
|
||||||
|
|
||||||
return { Ollama: mockOllama };
|
return { Ollama: MockOllama };
|
||||||
});
|
});
|
||||||
|
|
||||||
// Mock global fetch
|
// Mock global fetch
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user