fix hardcoded values part 3

This commit is contained in:
perf3ct 2025-04-15 17:46:42 +00:00
parent 0d7cfe8061
commit 284ba096d0
No known key found for this signature in database
GPG Key ID: 569C4EEC436F5232
3 changed files with 38 additions and 35 deletions

View File

@ -10,6 +10,7 @@ import { chunkContent, semanticChunking } from './content_chunking.js';
import type { ContentChunk, ChunkOptions } from './content_chunking.js'; import type { ContentChunk, ChunkOptions } from './content_chunking.js';
import { summarizeContent, extractKeyPoints } from './summarization.js'; import { summarizeContent, extractKeyPoints } from './summarization.js';
import { getParentNotes, getParentContext, getChildContext, getLinkedNotesContext } from './hierarchy.js'; import { getParentNotes, getParentContext, getChildContext, getLinkedNotesContext } from './hierarchy.js';
import { SEARCH_CONSTANTS } from '../constants/search_constants.js';
/** /**
* Get semantic context * Get semantic context
@ -109,11 +110,11 @@ const DEFAULT_CONTEXT_OPTIONS: Required<ContextOptions> = {
includeLinks: true, includeLinks: true,
includeSimilar: false, includeSimilar: false,
includeContent: true, includeContent: true,
maxParentDepth: 3, maxParentDepth: SEARCH_CONSTANTS.CONTEXT.MAX_PARENT_DEPTH,
maxChildren: 10, maxChildren: SEARCH_CONSTANTS.CONTEXT.MAX_CHILDREN,
maxLinks: 10, maxLinks: SEARCH_CONSTANTS.CONTEXT.MAX_LINKS,
maxSimilarNotes: 5, maxSimilarNotes: SEARCH_CONSTANTS.CONTEXT.MAX_SIMILAR_NOTES,
maxContentLength: 2000 maxContentLength: SEARCH_CONSTANTS.CONTEXT.MAX_CONTENT_LENGTH
}; };
/** /**
@ -264,7 +265,7 @@ export class ContextExtractor {
*/ */
static extractKeyPoints( static extractKeyPoints(
content: string, content: string,
maxPoints: number = 5 maxPoints: number = SEARCH_CONSTANTS.CONTEXT.MAX_POINTS
): string[] { ): string[] {
return extractKeyPoints(content, maxPoints); return extractKeyPoints(content, maxPoints);
} }
@ -274,7 +275,7 @@ export class ContextExtractor {
*/ */
extractKeyPoints( extractKeyPoints(
content: string, content: string,
maxPoints: number = 5 maxPoints: number = SEARCH_CONSTANTS.CONTEXT.MAX_POINTS
): string[] { ): string[] {
return ContextExtractor.extractKeyPoints(content, maxPoints); return ContextExtractor.extractKeyPoints(content, maxPoints);
} }
@ -284,7 +285,7 @@ export class ContextExtractor {
*/ */
static async getParentNotes( static async getParentNotes(
noteId: string, noteId: string,
maxParents: number = 5 maxParents: number = SEARCH_CONSTANTS.CONTEXT.MAX_POINTS
): Promise<{id: string, title: string}[]> { ): Promise<{id: string, title: string}[]> {
return getParentNotes(noteId, maxParents); return getParentNotes(noteId, maxParents);
} }
@ -294,7 +295,7 @@ export class ContextExtractor {
*/ */
async getParentNotes( async getParentNotes(
noteId: string, noteId: string,
maxParents: number = 5 maxParents: number = SEARCH_CONSTANTS.CONTEXT.MAX_POINTS
): Promise<{id: string, title: string}[]> { ): Promise<{id: string, title: string}[]> {
return ContextExtractor.getParentNotes(noteId, maxParents); return ContextExtractor.getParentNotes(noteId, maxParents);
} }
@ -304,8 +305,8 @@ export class ContextExtractor {
*/ */
static async getParentContext( static async getParentContext(
noteId: string, noteId: string,
maxDepth: number = 3, maxDepth: number = SEARCH_CONSTANTS.CONTEXT.MAX_PARENT_DEPTH,
maxParents: number = 3 maxParents: number = SEARCH_CONSTANTS.CONTEXT.MAX_PARENT_DEPTH
): Promise<string> { ): Promise<string> {
return getParentContext(noteId, maxDepth, maxParents); return getParentContext(noteId, maxDepth, maxParents);
} }
@ -315,8 +316,8 @@ export class ContextExtractor {
*/ */
async getParentContext( async getParentContext(
noteId: string, noteId: string,
maxDepth: number = 3, maxDepth: number = SEARCH_CONSTANTS.CONTEXT.MAX_PARENT_DEPTH,
maxParents: number = 3 maxParents: number = SEARCH_CONSTANTS.CONTEXT.MAX_PARENT_DEPTH
): Promise<string> { ): Promise<string> {
return ContextExtractor.getParentContext(noteId, maxDepth, maxParents); return ContextExtractor.getParentContext(noteId, maxDepth, maxParents);
} }
@ -326,7 +327,7 @@ export class ContextExtractor {
*/ */
static async getChildContext( static async getChildContext(
noteId: string, noteId: string,
maxChildren: number = 10, maxChildren: number = SEARCH_CONSTANTS.CONTEXT.MAX_CHILDREN,
includeContent: boolean = false includeContent: boolean = false
): Promise<string> { ): Promise<string> {
return getChildContext(noteId, maxChildren, includeContent); return getChildContext(noteId, maxChildren, includeContent);
@ -337,7 +338,7 @@ export class ContextExtractor {
*/ */
async getChildContext( async getChildContext(
noteId: string, noteId: string,
maxChildren: number = 10, maxChildren: number = SEARCH_CONSTANTS.CONTEXT.MAX_CHILDREN,
includeContent: boolean = false includeContent: boolean = false
): Promise<string> { ): Promise<string> {
return ContextExtractor.getChildContext(noteId, maxChildren, includeContent); return ContextExtractor.getChildContext(noteId, maxChildren, includeContent);
@ -348,7 +349,7 @@ export class ContextExtractor {
*/ */
static async getLinkedNotesContext( static async getLinkedNotesContext(
noteId: string, noteId: string,
maxRelations: number = 10 maxRelations: number = SEARCH_CONSTANTS.CONTEXT.MAX_RELATIONS
): Promise<string> { ): Promise<string> {
return getLinkedNotesContext(noteId, maxRelations); return getLinkedNotesContext(noteId, maxRelations);
} }
@ -358,7 +359,7 @@ export class ContextExtractor {
*/ */
async getLinkedNotesContext( async getLinkedNotesContext(
noteId: string, noteId: string,
maxRelations: number = 10 maxRelations: number = SEARCH_CONSTANTS.CONTEXT.MAX_RELATIONS
): Promise<string> { ): Promise<string> {
return ContextExtractor.getLinkedNotesContext(noteId, maxRelations); return ContextExtractor.getLinkedNotesContext(noteId, maxRelations);
} }

View File

@ -17,6 +17,7 @@ import providerManager from '../modules/provider_manager.js';
import cacheManager from '../modules/cache_manager.js'; import cacheManager from '../modules/cache_manager.js';
import type { NoteSearchResult } from '../../interfaces/context_interfaces.js'; import type { NoteSearchResult } from '../../interfaces/context_interfaces.js';
import type { LLMServiceInterface } from '../../interfaces/agent_tool_interfaces.js'; import type { LLMServiceInterface } from '../../interfaces/agent_tool_interfaces.js';
import { SEARCH_CONSTANTS } from '../../constants/search_constants.js';
export interface VectorSearchOptions { export interface VectorSearchOptions {
maxResults?: number; maxResults?: number;
@ -50,8 +51,8 @@ export class VectorSearchService {
options: VectorSearchOptions = {} options: VectorSearchOptions = {}
): Promise<NoteSearchResult[]> { ): Promise<NoteSearchResult[]> {
const { const {
maxResults = 10, maxResults = SEARCH_CONSTANTS.VECTOR_SEARCH.DEFAULT_MAX_RESULTS,
threshold = 0.6, threshold = SEARCH_CONSTANTS.VECTOR_SEARCH.DEFAULT_THRESHOLD,
useEnhancedQueries = false, useEnhancedQueries = false,
summarizeContent = false, summarizeContent = false,
llmService = null llmService = null
@ -227,8 +228,8 @@ export class VectorSearchService {
// Request summarization with safeguards to prevent recursion // Request summarization with safeguards to prevent recursion
const result = await llmService.generateChatCompletion(messages, { const result = await llmService.generateChatCompletion(messages, {
temperature: 0.3, temperature: SEARCH_CONSTANTS.TEMPERATURE.VECTOR_SEARCH,
maxTokens: 500, maxTokens: SEARCH_CONSTANTS.LIMITS.VECTOR_SEARCH_MAX_TOKENS,
// Use any to bypass type checking for these special options // Use any to bypass type checking for these special options
// that are recognized by the LLM service but not in the interface // that are recognized by the LLM service but not in the interface
...(({ ...(({
@ -262,7 +263,7 @@ export class VectorSearchService {
private async findNotesInBranch( private async findNotesInBranch(
embedding: Float32Array, embedding: Float32Array,
contextNoteId: string, contextNoteId: string,
limit = 5 limit = SEARCH_CONSTANTS.CONTEXT.MAX_SIMILAR_NOTES
): Promise<{noteId: string, similarity: number}[]> { ): Promise<{noteId: string, similarity: number}[]> {
try { try {
// Get all notes in the subtree // Get all notes in the subtree
@ -360,9 +361,9 @@ export class VectorSearchService {
const parentNotes = note.getParentNotes(); const parentNotes = note.getParentNotes();
let currentNote = parentNotes.length > 0 ? parentNotes[0] : null; let currentNote = parentNotes.length > 0 ? parentNotes[0] : null;
// Build path up to 3 levels // Build path up to the maximum parent depth
let level = 0; let level = 0;
while (currentNote && level < 3) { while (currentNote && level < SEARCH_CONSTANTS.CONTEXT.MAX_PARENT_DEPTH) {
path.unshift(currentNote.title); path.unshift(currentNote.title);
const grandParents = currentNote.getParentNotes(); const grandParents = currentNote.getParentNotes();
currentNote = grandParents.length > 0 ? grandParents[0] : null; currentNote = grandParents.length > 0 ? grandParents[0] : null;

View File

@ -17,6 +17,7 @@ import {
createOllamaOptions createOllamaOptions
} from './provider_options.js'; } from './provider_options.js';
import { PROVIDER_CONSTANTS } from '../constants/provider_constants.js'; import { PROVIDER_CONSTANTS } from '../constants/provider_constants.js';
import { SEARCH_CONSTANTS, MODEL_CAPABILITIES } from '../constants/search_constants.js';
/** /**
* Simple local embedding provider implementation * Simple local embedding provider implementation
@ -402,7 +403,7 @@ export function getOpenAIOptions(
// Get temperature from options or global setting // Get temperature from options or global setting
const temperature = opts.temperature !== undefined const temperature = opts.temperature !== undefined
? opts.temperature ? opts.temperature
: parseFloat(options.getOption('aiTemperature') || '0.7'); : parseFloat(options.getOption('aiTemperature') || String(SEARCH_CONSTANTS.TEMPERATURE.DEFAULT));
return { return {
// Connection settings // Connection settings
@ -467,7 +468,7 @@ export function getAnthropicOptions(
// Get temperature from options or global setting // Get temperature from options or global setting
const temperature = opts.temperature !== undefined const temperature = opts.temperature !== undefined
? opts.temperature ? opts.temperature
: parseFloat(options.getOption('aiTemperature') || '0.7'); : parseFloat(options.getOption('aiTemperature') || String(SEARCH_CONSTANTS.TEMPERATURE.DEFAULT));
return { return {
// Connection settings // Connection settings
@ -525,7 +526,7 @@ export async function getOllamaOptions(
// Get temperature from options or global setting // Get temperature from options or global setting
const temperature = opts.temperature !== undefined const temperature = opts.temperature !== undefined
? opts.temperature ? opts.temperature
: parseFloat(options.getOption('aiTemperature') || '0.7'); : parseFloat(options.getOption('aiTemperature') || String(SEARCH_CONSTANTS.TEMPERATURE.DEFAULT));
// Use provided context window or get from model if not specified // Use provided context window or get from model if not specified
const modelContextWindow = contextWindow || await getOllamaModelContextWindow(modelName); const modelContextWindow = contextWindow || await getOllamaModelContextWindow(modelName);
@ -593,19 +594,19 @@ async function getOllamaModelContextWindow(modelName: string): Promise<number> {
// Default context sizes by model family if we couldn't get specific info // Default context sizes by model family if we couldn't get specific info
if (modelName.includes('llama3')) { if (modelName.includes('llama3')) {
return 8192; return MODEL_CAPABILITIES['gpt-4'].contextWindowTokens;
} else if (modelName.includes('llama2')) { } else if (modelName.includes('llama2')) {
return 4096; return MODEL_CAPABILITIES['default'].contextWindowTokens;
} else if (modelName.includes('mistral') || modelName.includes('mixtral')) { } else if (modelName.includes('mistral') || modelName.includes('mixtral')) {
return 8192; return MODEL_CAPABILITIES['gpt-4'].contextWindowTokens;
} else if (modelName.includes('gemma')) { } else if (modelName.includes('gemma')) {
return 8192; return MODEL_CAPABILITIES['gpt-4'].contextWindowTokens;
} }
// Return a reasonable default // Return a reasonable default
return 4096; return MODEL_CAPABILITIES['default'].contextWindowTokens;
} catch (error) { } catch (error) {
log.info(`Error getting context window for model ${modelName}: ${error}`); log.info(`Error getting context window for model ${modelName}: ${error}`);
return 4096; // Default fallback return MODEL_CAPABILITIES['default'].contextWindowTokens; // Default fallback
} }
} }