mirror of
https://github.com/zadam/trilium.git
synced 2025-10-20 23:29:02 +02:00
fix hardcoded values part 3
This commit is contained in:
parent
0d7cfe8061
commit
284ba096d0
@ -10,6 +10,7 @@ import { chunkContent, semanticChunking } from './content_chunking.js';
|
||||
import type { ContentChunk, ChunkOptions } from './content_chunking.js';
|
||||
import { summarizeContent, extractKeyPoints } from './summarization.js';
|
||||
import { getParentNotes, getParentContext, getChildContext, getLinkedNotesContext } from './hierarchy.js';
|
||||
import { SEARCH_CONSTANTS } from '../constants/search_constants.js';
|
||||
|
||||
/**
|
||||
* Get semantic context
|
||||
@ -109,11 +110,11 @@ const DEFAULT_CONTEXT_OPTIONS: Required<ContextOptions> = {
|
||||
includeLinks: true,
|
||||
includeSimilar: false,
|
||||
includeContent: true,
|
||||
maxParentDepth: 3,
|
||||
maxChildren: 10,
|
||||
maxLinks: 10,
|
||||
maxSimilarNotes: 5,
|
||||
maxContentLength: 2000
|
||||
maxParentDepth: SEARCH_CONSTANTS.CONTEXT.MAX_PARENT_DEPTH,
|
||||
maxChildren: SEARCH_CONSTANTS.CONTEXT.MAX_CHILDREN,
|
||||
maxLinks: SEARCH_CONSTANTS.CONTEXT.MAX_LINKS,
|
||||
maxSimilarNotes: SEARCH_CONSTANTS.CONTEXT.MAX_SIMILAR_NOTES,
|
||||
maxContentLength: SEARCH_CONSTANTS.CONTEXT.MAX_CONTENT_LENGTH
|
||||
};
|
||||
|
||||
/**
|
||||
@ -264,7 +265,7 @@ export class ContextExtractor {
|
||||
*/
|
||||
static extractKeyPoints(
|
||||
content: string,
|
||||
maxPoints: number = 5
|
||||
maxPoints: number = SEARCH_CONSTANTS.CONTEXT.MAX_POINTS
|
||||
): string[] {
|
||||
return extractKeyPoints(content, maxPoints);
|
||||
}
|
||||
@ -274,7 +275,7 @@ export class ContextExtractor {
|
||||
*/
|
||||
extractKeyPoints(
|
||||
content: string,
|
||||
maxPoints: number = 5
|
||||
maxPoints: number = SEARCH_CONSTANTS.CONTEXT.MAX_POINTS
|
||||
): string[] {
|
||||
return ContextExtractor.extractKeyPoints(content, maxPoints);
|
||||
}
|
||||
@ -284,7 +285,7 @@ export class ContextExtractor {
|
||||
*/
|
||||
static async getParentNotes(
|
||||
noteId: string,
|
||||
maxParents: number = 5
|
||||
maxParents: number = SEARCH_CONSTANTS.CONTEXT.MAX_POINTS
|
||||
): Promise<{id: string, title: string}[]> {
|
||||
return getParentNotes(noteId, maxParents);
|
||||
}
|
||||
@ -294,7 +295,7 @@ export class ContextExtractor {
|
||||
*/
|
||||
async getParentNotes(
|
||||
noteId: string,
|
||||
maxParents: number = 5
|
||||
maxParents: number = SEARCH_CONSTANTS.CONTEXT.MAX_POINTS
|
||||
): Promise<{id: string, title: string}[]> {
|
||||
return ContextExtractor.getParentNotes(noteId, maxParents);
|
||||
}
|
||||
@ -304,8 +305,8 @@ export class ContextExtractor {
|
||||
*/
|
||||
static async getParentContext(
|
||||
noteId: string,
|
||||
maxDepth: number = 3,
|
||||
maxParents: number = 3
|
||||
maxDepth: number = SEARCH_CONSTANTS.CONTEXT.MAX_PARENT_DEPTH,
|
||||
maxParents: number = SEARCH_CONSTANTS.CONTEXT.MAX_PARENT_DEPTH
|
||||
): Promise<string> {
|
||||
return getParentContext(noteId, maxDepth, maxParents);
|
||||
}
|
||||
@ -315,8 +316,8 @@ export class ContextExtractor {
|
||||
*/
|
||||
async getParentContext(
|
||||
noteId: string,
|
||||
maxDepth: number = 3,
|
||||
maxParents: number = 3
|
||||
maxDepth: number = SEARCH_CONSTANTS.CONTEXT.MAX_PARENT_DEPTH,
|
||||
maxParents: number = SEARCH_CONSTANTS.CONTEXT.MAX_PARENT_DEPTH
|
||||
): Promise<string> {
|
||||
return ContextExtractor.getParentContext(noteId, maxDepth, maxParents);
|
||||
}
|
||||
@ -326,7 +327,7 @@ export class ContextExtractor {
|
||||
*/
|
||||
static async getChildContext(
|
||||
noteId: string,
|
||||
maxChildren: number = 10,
|
||||
maxChildren: number = SEARCH_CONSTANTS.CONTEXT.MAX_CHILDREN,
|
||||
includeContent: boolean = false
|
||||
): Promise<string> {
|
||||
return getChildContext(noteId, maxChildren, includeContent);
|
||||
@ -337,7 +338,7 @@ export class ContextExtractor {
|
||||
*/
|
||||
async getChildContext(
|
||||
noteId: string,
|
||||
maxChildren: number = 10,
|
||||
maxChildren: number = SEARCH_CONSTANTS.CONTEXT.MAX_CHILDREN,
|
||||
includeContent: boolean = false
|
||||
): Promise<string> {
|
||||
return ContextExtractor.getChildContext(noteId, maxChildren, includeContent);
|
||||
@ -348,7 +349,7 @@ export class ContextExtractor {
|
||||
*/
|
||||
static async getLinkedNotesContext(
|
||||
noteId: string,
|
||||
maxRelations: number = 10
|
||||
maxRelations: number = SEARCH_CONSTANTS.CONTEXT.MAX_RELATIONS
|
||||
): Promise<string> {
|
||||
return getLinkedNotesContext(noteId, maxRelations);
|
||||
}
|
||||
@ -358,7 +359,7 @@ export class ContextExtractor {
|
||||
*/
|
||||
async getLinkedNotesContext(
|
||||
noteId: string,
|
||||
maxRelations: number = 10
|
||||
maxRelations: number = SEARCH_CONSTANTS.CONTEXT.MAX_RELATIONS
|
||||
): Promise<string> {
|
||||
return ContextExtractor.getLinkedNotesContext(noteId, maxRelations);
|
||||
}
|
||||
|
@ -17,6 +17,7 @@ import providerManager from '../modules/provider_manager.js';
|
||||
import cacheManager from '../modules/cache_manager.js';
|
||||
import type { NoteSearchResult } from '../../interfaces/context_interfaces.js';
|
||||
import type { LLMServiceInterface } from '../../interfaces/agent_tool_interfaces.js';
|
||||
import { SEARCH_CONSTANTS } from '../../constants/search_constants.js';
|
||||
|
||||
export interface VectorSearchOptions {
|
||||
maxResults?: number;
|
||||
@ -50,8 +51,8 @@ export class VectorSearchService {
|
||||
options: VectorSearchOptions = {}
|
||||
): Promise<NoteSearchResult[]> {
|
||||
const {
|
||||
maxResults = 10,
|
||||
threshold = 0.6,
|
||||
maxResults = SEARCH_CONSTANTS.VECTOR_SEARCH.DEFAULT_MAX_RESULTS,
|
||||
threshold = SEARCH_CONSTANTS.VECTOR_SEARCH.DEFAULT_THRESHOLD,
|
||||
useEnhancedQueries = false,
|
||||
summarizeContent = false,
|
||||
llmService = null
|
||||
@ -227,8 +228,8 @@ export class VectorSearchService {
|
||||
|
||||
// Request summarization with safeguards to prevent recursion
|
||||
const result = await llmService.generateChatCompletion(messages, {
|
||||
temperature: 0.3,
|
||||
maxTokens: 500,
|
||||
temperature: SEARCH_CONSTANTS.TEMPERATURE.VECTOR_SEARCH,
|
||||
maxTokens: SEARCH_CONSTANTS.LIMITS.VECTOR_SEARCH_MAX_TOKENS,
|
||||
// Use any to bypass type checking for these special options
|
||||
// that are recognized by the LLM service but not in the interface
|
||||
...(({
|
||||
@ -262,7 +263,7 @@ export class VectorSearchService {
|
||||
private async findNotesInBranch(
|
||||
embedding: Float32Array,
|
||||
contextNoteId: string,
|
||||
limit = 5
|
||||
limit = SEARCH_CONSTANTS.CONTEXT.MAX_SIMILAR_NOTES
|
||||
): Promise<{noteId: string, similarity: number}[]> {
|
||||
try {
|
||||
// Get all notes in the subtree
|
||||
@ -360,9 +361,9 @@ export class VectorSearchService {
|
||||
const parentNotes = note.getParentNotes();
|
||||
let currentNote = parentNotes.length > 0 ? parentNotes[0] : null;
|
||||
|
||||
// Build path up to 3 levels
|
||||
// Build path up to the maximum parent depth
|
||||
let level = 0;
|
||||
while (currentNote && level < 3) {
|
||||
while (currentNote && level < SEARCH_CONSTANTS.CONTEXT.MAX_PARENT_DEPTH) {
|
||||
path.unshift(currentNote.title);
|
||||
const grandParents = currentNote.getParentNotes();
|
||||
currentNote = grandParents.length > 0 ? grandParents[0] : null;
|
||||
|
@ -17,6 +17,7 @@ import {
|
||||
createOllamaOptions
|
||||
} from './provider_options.js';
|
||||
import { PROVIDER_CONSTANTS } from '../constants/provider_constants.js';
|
||||
import { SEARCH_CONSTANTS, MODEL_CAPABILITIES } from '../constants/search_constants.js';
|
||||
|
||||
/**
|
||||
* Simple local embedding provider implementation
|
||||
@ -402,7 +403,7 @@ export function getOpenAIOptions(
|
||||
// Get temperature from options or global setting
|
||||
const temperature = opts.temperature !== undefined
|
||||
? opts.temperature
|
||||
: parseFloat(options.getOption('aiTemperature') || '0.7');
|
||||
: parseFloat(options.getOption('aiTemperature') || String(SEARCH_CONSTANTS.TEMPERATURE.DEFAULT));
|
||||
|
||||
return {
|
||||
// Connection settings
|
||||
@ -467,7 +468,7 @@ export function getAnthropicOptions(
|
||||
// Get temperature from options or global setting
|
||||
const temperature = opts.temperature !== undefined
|
||||
? opts.temperature
|
||||
: parseFloat(options.getOption('aiTemperature') || '0.7');
|
||||
: parseFloat(options.getOption('aiTemperature') || String(SEARCH_CONSTANTS.TEMPERATURE.DEFAULT));
|
||||
|
||||
return {
|
||||
// Connection settings
|
||||
@ -525,7 +526,7 @@ export async function getOllamaOptions(
|
||||
// Get temperature from options or global setting
|
||||
const temperature = opts.temperature !== undefined
|
||||
? opts.temperature
|
||||
: parseFloat(options.getOption('aiTemperature') || '0.7');
|
||||
: parseFloat(options.getOption('aiTemperature') || String(SEARCH_CONSTANTS.TEMPERATURE.DEFAULT));
|
||||
|
||||
// Use provided context window or get from model if not specified
|
||||
const modelContextWindow = contextWindow || await getOllamaModelContextWindow(modelName);
|
||||
@ -571,11 +572,11 @@ export async function getOllamaOptions(
|
||||
async function getOllamaModelContextWindow(modelName: string): Promise<number> {
|
||||
try {
|
||||
const baseUrl = options.getOption('ollamaBaseUrl');
|
||||
|
||||
|
||||
if (!baseUrl) {
|
||||
throw new Error('Ollama base URL is not configured');
|
||||
}
|
||||
|
||||
|
||||
// Use the official Ollama client
|
||||
const { Ollama } = await import('ollama');
|
||||
const client = new Ollama({ host: baseUrl });
|
||||
@ -593,19 +594,19 @@ async function getOllamaModelContextWindow(modelName: string): Promise<number> {
|
||||
|
||||
// Default context sizes by model family if we couldn't get specific info
|
||||
if (modelName.includes('llama3')) {
|
||||
return 8192;
|
||||
return MODEL_CAPABILITIES['gpt-4'].contextWindowTokens;
|
||||
} else if (modelName.includes('llama2')) {
|
||||
return 4096;
|
||||
return MODEL_CAPABILITIES['default'].contextWindowTokens;
|
||||
} else if (modelName.includes('mistral') || modelName.includes('mixtral')) {
|
||||
return 8192;
|
||||
return MODEL_CAPABILITIES['gpt-4'].contextWindowTokens;
|
||||
} else if (modelName.includes('gemma')) {
|
||||
return 8192;
|
||||
return MODEL_CAPABILITIES['gpt-4'].contextWindowTokens;
|
||||
}
|
||||
|
||||
// Return a reasonable default
|
||||
return 4096;
|
||||
return MODEL_CAPABILITIES['default'].contextWindowTokens;
|
||||
} catch (error) {
|
||||
log.info(`Error getting context window for model ${modelName}: ${error}`);
|
||||
return 4096; // Default fallback
|
||||
return MODEL_CAPABILITIES['default'].contextWindowTokens; // Default fallback
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user