saving chats finally works again, even if the UI is kinda...broken

wow
This commit is contained in:
perf3ct 2025-04-13 21:16:18 +00:00
parent f252f53e82
commit 9a68155edc
No known key found for this signature in database
GPG Key ID: 569C4EEC436F5232
9 changed files with 912 additions and 41 deletions

View File

@ -42,7 +42,7 @@ export async function checkSessionExists(sessionId: string): Promise<boolean> {
export async function setupStreamingResponse(
sessionId: string,
messageParams: any,
onContentUpdate: (content: string) => void,
onContentUpdate: (content: string, isDone?: boolean) => void,
onThinkingUpdate: (thinking: string) => void,
onToolExecution: (toolData: any) => void,
onComplete: () => void,
@ -131,7 +131,7 @@ export async function setupStreamingResponse(
assistantResponse += message.content;
// Update the UI immediately with each chunk
onContentUpdate(assistantResponse);
onContentUpdate(assistantResponse, false);
// Reset timeout since we got content
if (timeoutId !== null) {
@ -197,7 +197,7 @@ export async function setupStreamingResponse(
console.log(`[${responseId}] Content in done message is identical to existing response, not appending`);
}
onContentUpdate(assistantResponse);
onContentUpdate(assistantResponse, true);
}
// Clean up and resolve

View File

@ -42,6 +42,31 @@ export default class LlmChatPanel extends BasicWidget {
private onSaveData: ((data: any) => Promise<void>) | null = null;
private onGetData: (() => Promise<any>) | null = null;
private messages: MessageData[] = [];
private sources: Array<{noteId: string; title: string; similarity?: number; content?: string}> = [];
private metadata: {
model?: string;
provider?: string;
temperature?: number;
maxTokens?: number;
toolExecutions?: Array<{
id: string;
name: string;
arguments: any;
result: any;
error?: string;
timestamp: string;
}>;
lastUpdated?: string;
usage?: {
promptTokens?: number;
completionTokens?: number;
totalTokens?: number;
};
} = {
model: 'default',
temperature: 0.7,
toolExecutions: []
};
// Public getters and setters for private properties
public getCurrentNoteId(): string | null {
@ -136,13 +161,92 @@ export default class LlmChatPanel extends BasicWidget {
// Extract current tool execution steps if any exist
const toolSteps = extractInChatToolSteps(this.noteContextChatMessages);
// Get tool executions from both UI and any cached executions in metadata
let toolExecutions: Array<{
id: string;
name: string;
arguments: any;
result: any;
error?: string;
timestamp: string;
}> = [];
// First include any tool executions already in metadata (from streaming events)
if (this.metadata?.toolExecutions && Array.isArray(this.metadata.toolExecutions)) {
toolExecutions = [...this.metadata.toolExecutions];
console.log(`Including ${toolExecutions.length} tool executions from metadata`);
}
// Also extract any visible tool steps from the UI
const extractedExecutions = toolSteps.map(step => {
// Parse tool execution information
if (step.type === 'tool-execution') {
try {
const content = JSON.parse(step.content);
return {
id: content.toolCallId || `tool-${Date.now()}-${Math.random().toString(36).substring(2, 7)}`,
name: content.tool || 'unknown',
arguments: content.args || {},
result: content.result || {},
error: content.error,
timestamp: new Date().toISOString()
};
} catch (e) {
// If we can't parse it, create a basic record
return {
id: `tool-${Date.now()}-${Math.random().toString(36).substring(2, 7)}`,
name: 'unknown',
arguments: {},
result: step.content,
timestamp: new Date().toISOString()
};
}
} else if (step.type === 'result' && step.name) {
// Handle result steps with a name
return {
id: `tool-${Date.now()}-${Math.random().toString(36).substring(2, 7)}`,
name: step.name,
arguments: {},
result: step.content,
timestamp: new Date().toISOString()
};
}
return {
id: `tool-${Date.now()}-${Math.random().toString(36).substring(2, 7)}`,
name: 'unknown',
arguments: {},
result: 'Unrecognized tool step',
timestamp: new Date().toISOString()
};
});
// Merge the tool executions, keeping only unique IDs
const existingIds = new Set(toolExecutions.map((t: {id: string}) => t.id));
for (const exec of extractedExecutions) {
if (!existingIds.has(exec.id)) {
toolExecutions.push(exec);
existingIds.add(exec.id);
}
}
const dataToSave: ChatData = {
messages: this.messages,
sessionId: this.sessionId,
toolSteps: toolSteps
toolSteps: toolSteps,
// Add sources if we have them
sources: this.sources || [],
// Add metadata
metadata: {
model: this.metadata?.model || 'default',
provider: this.metadata?.provider || undefined,
temperature: this.metadata?.temperature || 0.7,
lastUpdated: new Date().toISOString(),
// Add tool executions
toolExecutions: toolExecutions
}
};
console.log(`Saving chat data with sessionId: ${this.sessionId} and ${toolSteps.length} tool steps`);
console.log(`Saving chat data with sessionId: ${this.sessionId}, ${toolSteps.length} tool steps, ${this.sources?.length || 0} sources, ${toolExecutions.length} tool executions`);
await this.onSaveData(dataToSave);
} catch (error) {
@ -179,6 +283,39 @@ export default class LlmChatPanel extends BasicWidget {
this.restoreInChatToolSteps(savedData.toolSteps);
}
// Load sources if available
if (savedData.sources && Array.isArray(savedData.sources)) {
this.sources = savedData.sources;
console.log(`Loaded ${this.sources.length} sources from saved data`);
// Show sources in the UI if they exist
if (this.sources.length > 0) {
this.showSources(this.sources);
}
}
// Load metadata if available
if (savedData.metadata) {
this.metadata = {
...this.metadata,
...savedData.metadata
};
// Ensure tool executions are loaded
if (savedData.metadata.toolExecutions && Array.isArray(savedData.metadata.toolExecutions)) {
console.log(`Loaded ${savedData.metadata.toolExecutions.length} tool executions from saved data`);
if (!this.metadata.toolExecutions) {
this.metadata.toolExecutions = [];
}
// Make sure we don't lose any tool executions
this.metadata.toolExecutions = savedData.metadata.toolExecutions;
}
console.log(`Loaded metadata from saved data:`, this.metadata);
}
// Load session ID if available
if (savedData.sessionId) {
try {
@ -188,6 +325,53 @@ export default class LlmChatPanel extends BasicWidget {
if (sessionExists) {
console.log(`Restored session ${savedData.sessionId}`);
this.sessionId = savedData.sessionId;
// If we successfully restored a session, also fetch the latest session data
try {
const sessionData = await server.get<{
metadata?: {
model?: string;
provider?: string;
temperature?: number;
maxTokens?: number;
toolExecutions?: Array<{
id: string;
name: string;
arguments: any;
result: any;
error?: string;
timestamp: string;
}>;
lastUpdated?: string;
usage?: {
promptTokens?: number;
completionTokens?: number;
totalTokens?: number;
};
};
sources?: Array<{
noteId: string;
title: string;
similarity?: number;
content?: string;
}>;
}>(`llm/sessions/${savedData.sessionId}`);
if (sessionData && sessionData.metadata) {
// Update our metadata with the latest from the server
this.metadata = {
...this.metadata,
...sessionData.metadata
};
console.log(`Updated metadata from server for session ${savedData.sessionId}`);
// If server has sources, update those too
if (sessionData.sources && sessionData.sources.length > 0) {
this.sources = sessionData.sources;
}
}
} catch (fetchError) {
console.warn(`Could not fetch latest session data: ${fetchError}`);
}
} else {
console.log(`Saved session ${savedData.sessionId} not found, will create new one`);
this.sessionId = null;
@ -466,13 +650,25 @@ export default class LlmChatPanel extends BasicWidget {
// If the POST request returned content directly, display it
if (postResponse && postResponse.content) {
this.processAssistantResponse(postResponse.content);
// Store metadata from the response
if (postResponse.metadata) {
console.log("Received metadata from response:", postResponse.metadata);
this.metadata = {
...this.metadata,
...postResponse.metadata
};
}
// If there are sources, show them
// Store sources from the response
if (postResponse.sources && postResponse.sources.length > 0) {
console.log(`Received ${postResponse.sources.length} sources from response`);
this.sources = postResponse.sources;
this.showSources(postResponse.sources);
}
// Process the assistant response
this.processAssistantResponse(postResponse.content, postResponse);
hideLoadingIndicator(this.loadingIndicator);
return true;
}
@ -487,7 +683,7 @@ export default class LlmChatPanel extends BasicWidget {
/**
* Process an assistant response - add to UI and save
*/
private async processAssistantResponse(content: string) {
private async processAssistantResponse(content: string, fullResponse?: any) {
// Add the response to the chat UI
this.addMessageToChat('assistant', content);
@ -498,6 +694,21 @@ export default class LlmChatPanel extends BasicWidget {
timestamp: new Date()
});
// If we received tool execution information, add it to metadata
if (fullResponse?.metadata?.toolExecutions) {
console.log(`Storing ${fullResponse.metadata.toolExecutions.length} tool executions from response`);
// Make sure our metadata has toolExecutions
if (!this.metadata.toolExecutions) {
this.metadata.toolExecutions = [];
}
// Add new tool executions
this.metadata.toolExecutions = [
...this.metadata.toolExecutions,
...fullResponse.metadata.toolExecutions
];
}
// Save to note
this.saveCurrentData().catch(err => {
console.error("Failed to save assistant response to note:", err);
@ -512,12 +723,94 @@ export default class LlmChatPanel extends BasicWidget {
throw new Error("No session ID available");
}
// Store tool executions captured during streaming
const toolExecutionsCache: Array<{
id: string;
name: string;
arguments: any;
result: any;
error?: string;
timestamp: string;
}> = [];
return setupStreamingResponse(
this.sessionId,
messageParams,
// Content update handler
(content: string) => {
this.updateStreamingUI(content);
(content: string, isDone: boolean = false) => {
this.updateStreamingUI(content, isDone);
// Update session data with additional metadata when streaming is complete
if (isDone) {
// Update our metadata with info from the server
server.get<{
metadata?: {
model?: string;
provider?: string;
temperature?: number;
maxTokens?: number;
toolExecutions?: Array<{
id: string;
name: string;
arguments: any;
result: any;
error?: string;
timestamp: string;
}>;
lastUpdated?: string;
usage?: {
promptTokens?: number;
completionTokens?: number;
totalTokens?: number;
};
};
sources?: Array<{
noteId: string;
title: string;
similarity?: number;
content?: string;
}>;
}>(`llm/sessions/${this.sessionId}`)
.then((sessionData) => {
console.log("Got updated session data:", sessionData);
// Store metadata
if (sessionData.metadata) {
this.metadata = {
...this.metadata,
...sessionData.metadata
};
}
// Store sources
if (sessionData.sources && sessionData.sources.length > 0) {
this.sources = sessionData.sources;
this.showSources(sessionData.sources);
}
// Make sure we include the cached tool executions
if (toolExecutionsCache.length > 0) {
console.log(`Including ${toolExecutionsCache.length} cached tool executions in metadata`);
if (!this.metadata.toolExecutions) {
this.metadata.toolExecutions = [];
}
// Add any tool executions from our cache that aren't already in metadata
const existingIds = new Set((this.metadata.toolExecutions || []).map((t: {id: string}) => t.id));
for (const toolExec of toolExecutionsCache) {
if (!existingIds.has(toolExec.id)) {
this.metadata.toolExecutions.push(toolExec);
existingIds.add(toolExec.id);
}
}
}
// Save the updated data to the note
this.saveCurrentData()
.catch(err => console.error("Failed to save data after streaming completed:", err));
})
.catch(err => console.error("Error fetching session data after streaming:", err));
}
},
// Thinking update handler
(thinking: string) => {
@ -526,6 +819,38 @@ export default class LlmChatPanel extends BasicWidget {
// Tool execution handler
(toolData: any) => {
this.showToolExecutionInfo(toolData);
// Cache tools we see during streaming to include them in the final saved data
if (toolData && toolData.action === 'result' && toolData.tool) {
// Create a tool execution record
const toolExec = {
id: toolData.toolCallId || `tool-${Date.now()}-${Math.random().toString(36).substring(2, 7)}`,
name: toolData.tool,
arguments: toolData.args || {},
result: toolData.result || {},
error: toolData.error,
timestamp: new Date().toISOString()
};
// Add to both our local cache for immediate saving and to metadata for later saving
toolExecutionsCache.push(toolExec);
// Initialize toolExecutions array if it doesn't exist
if (!this.metadata.toolExecutions) {
this.metadata.toolExecutions = [];
}
// Add tool execution to our metadata
this.metadata.toolExecutions.push(toolExec);
console.log(`Cached tool execution for ${toolData.tool} to be saved later`);
// Save immediately after receiving a tool execution
// This ensures we don't lose tool execution data if streaming fails
this.saveCurrentData().catch(err => {
console.error("Failed to save tool execution data:", err);
});
}
},
// Complete handler
() => {
@ -541,9 +866,9 @@ export default class LlmChatPanel extends BasicWidget {
/**
* Update the UI with streaming content
*/
private updateStreamingUI(assistantResponse: string) {
private updateStreamingUI(assistantResponse: string, isDone: boolean = false) {
const logId = `LlmChatPanel-${Date.now()}`;
console.log(`[${logId}] Updating UI with response text: ${assistantResponse.length} chars`);
console.log(`[${logId}] Updating UI with response text: ${assistantResponse.length} chars, isDone=${isDone}`);
if (!this.noteContextChatMessages) {
console.error(`[${logId}] noteContextChatMessages element not available`);
@ -587,6 +912,31 @@ export default class LlmChatPanel extends BasicWidget {
this.addMessageToChat('assistant', assistantResponse);
console.log(`[${logId}] Successfully added new assistant message`);
}
// Update messages array only if this is the first update or the final update
if (!this.messages.some(m => m.role === 'assistant') || isDone) {
// Add or update the assistant message in our local array
const existingIndex = this.messages.findIndex(m => m.role === 'assistant');
if (existingIndex >= 0) {
// Update existing message
this.messages[existingIndex].content = assistantResponse;
} else {
// Add new message
this.messages.push({
role: 'assistant',
content: assistantResponse,
timestamp: new Date()
});
}
// If this is the final update, save the data
if (isDone) {
console.log(`[${logId}] Streaming finished, saving data to note`);
this.saveCurrentData().catch(err => {
console.error(`[${logId}] Failed to save streaming response to note:`, err);
});
}
}
}
// Always try to scroll to the latest content

View File

@ -29,4 +29,25 @@ export interface ChatData {
messages: MessageData[];
sessionId: string | null;
toolSteps: ToolExecutionStep[];
sources?: Array<{
noteId: string;
title: string;
similarity?: number;
content?: string;
}>;
metadata?: {
model?: string;
provider?: string;
temperature?: number;
maxTokens?: number;
lastUpdated?: string;
toolExecutions?: Array<{
id: string;
name: string;
arguments: any;
result: any;
error?: string;
timestamp: string;
}>;
};
}

View File

@ -99,6 +99,7 @@ export interface ChatCompletionOptions {
useAdvancedContext?: boolean; // Whether to use advanced context enrichment
toolExecutionStatus?: any[]; // Status information about executed tools for feedback
providerMetadata?: ModelMetadata; // Metadata about the provider and model capabilities
sessionId?: string; // Session ID for storing tool execution results
/**
* Maximum number of tool execution iterations

View File

@ -314,7 +314,7 @@ export class AIServiceManager implements IAIServiceManager {
async initializeAgentTools(): Promise<void> {
// Agent tools are already initialized in the constructor
// This method is kept for backward compatibility
log.debug("initializeAgentTools called, but tools are already initialized in constructor");
log.info("initializeAgentTools called, but tools are already initialized in constructor");
}
/**

View File

@ -139,10 +139,16 @@ export class ChatService {
// Select pipeline to use
const pipeline = this.getPipeline();
// Include sessionId in the options for tool execution tracking
const pipelineOptions = {
...(options || session.options || {}),
sessionId: session.id
};
// Execute the pipeline
const response = await pipeline.execute({
messages: session.messages,
options: options || session.options || {},
options: pipelineOptions,
query: content,
streamCallback
});
@ -156,8 +162,21 @@ export class ChatService {
session.messages.push(assistantMessage);
session.isStreaming = false;
// Save the complete conversation
await chatStorageService.updateChat(session.id, session.messages);
// Save metadata about the response
const metadata = {
model: response.model,
provider: response.provider,
usage: response.usage
};
// If there are tool calls, make sure they're stored in metadata
if (response.tool_calls && response.tool_calls.length > 0) {
// Let the storage service extract and save tool executions
// The tool results are already in the messages
}
// Save the complete conversation with metadata
await chatStorageService.updateChat(session.id, session.messages, undefined, metadata);
// If first message, update the title based on content
if (session.messages.length <= 2 && (!session.title || session.title === 'New Chat')) {
@ -228,10 +247,16 @@ export class ChatService {
const pipelineType = showThinking ? 'agent' : 'default';
const pipeline = this.getPipeline(pipelineType);
// Include sessionId in the options for tool execution tracking
const pipelineOptions = {
...(options || session.options || {}),
sessionId: session.id
};
// Execute the pipeline with note context
const response = await pipeline.execute({
messages: session.messages,
options: options || session.options || {},
options: pipelineOptions,
noteId,
query: content,
showThinking,
@ -247,8 +272,22 @@ export class ChatService {
session.messages.push(assistantMessage);
session.isStreaming = false;
// Save the complete conversation
await chatStorageService.updateChat(session.id, session.messages);
// Save metadata about the response
const metadata = {
model: response.model,
provider: response.provider,
usage: response.usage,
contextNoteId: noteId // Store the note ID used for context
};
// If there are tool calls, make sure they're stored in metadata
if (response.tool_calls && response.tool_calls.length > 0) {
// Let the storage service extract and save tool executions
// The tool results are already in the messages
}
// Save the complete conversation with metadata
await chatStorageService.updateChat(session.id, session.messages, undefined, metadata);
// If first message, update the title
if (session.messages.length <= 2 && (!session.title || session.title === 'New Chat')) {
@ -312,7 +351,29 @@ export class ChatService {
};
session.messages.push(contextMessage);
await chatStorageService.updateChat(session.id, session.messages);
// Store the context note id in metadata
const metadata = {
contextNoteId: noteId
};
// Check if the context extraction result has sources
// Note: We're adding a defensive check since TypeScript doesn't know about this property
const contextSources = (contextResult as any).sources || [];
if (contextSources && contextSources.length > 0) {
// Convert the sources to the format expected by recordSources
const sources = contextSources.map((source: any) => ({
noteId: source.noteId,
title: source.title,
similarity: source.similarity,
content: source.content
}));
// Store these sources in metadata
await chatStorageService.recordSources(session.id, sources);
}
await chatStorageService.updateChat(session.id, session.messages, undefined, metadata);
return session;
}
@ -343,7 +404,29 @@ export class ChatService {
};
session.messages.push(contextMessage);
await chatStorageService.updateChat(session.id, session.messages);
// Store the context note id and query in metadata
const metadata = {
contextNoteId: noteId
};
// Check if the semantic context extraction result has sources
// Note: We're adding a defensive check since TypeScript doesn't know about this property
const contextSources = (contextResult as any).sources || [];
if (contextSources && contextSources.length > 0) {
// Convert the sources to the format expected by recordSources
const sources = contextSources.map((source: any) => ({
noteId: source.noteId,
title: source.title,
similarity: source.similarity,
content: source.content
}));
// Store these sources in metadata
await chatStorageService.recordSources(session.id, sources);
}
await chatStorageService.updateChat(session.id, session.messages, undefined, metadata);
return session;
}

View File

@ -2,7 +2,9 @@ import notes from '../notes.js';
import sql from '../sql.js';
import attributes from '../attributes.js';
import type { Message } from './ai_interface.js';
import type { ToolCall } from './tools/tool_interfaces.js';
import { t } from 'i18next';
import log from '../log.js';
interface StoredChat {
id: string;
@ -11,6 +13,39 @@ interface StoredChat {
noteId?: string;
createdAt: Date;
updatedAt: Date;
metadata?: ChatMetadata;
}
interface ChatMetadata {
sources?: Array<{
noteId: string;
title: string;
similarity?: number;
path?: string;
branchId?: string;
content?: string;
}>;
model?: string;
provider?: string;
contextNoteId?: string;
toolExecutions?: Array<ToolExecution>;
usage?: {
promptTokens?: number;
completionTokens?: number;
totalTokens?: number;
};
temperature?: number;
maxTokens?: number;
}
interface ToolExecution {
id: string;
name: string;
arguments: Record<string, any> | string;
result: string | Record<string, any>;
error?: string;
timestamp: Date;
executionTime?: number;
}
/**
@ -56,7 +91,7 @@ export class ChatStorageService {
/**
* Create a new chat
*/
async createChat(title: string, messages: Message[] = []): Promise<StoredChat> {
async createChat(title: string, messages: Message[] = [], metadata?: ChatMetadata): Promise<StoredChat> {
const rootNoteId = await this.getOrCreateChatRoot();
const now = new Date();
@ -67,6 +102,7 @@ export class ChatStorageService {
mime: ChatStorageService.CHAT_MIME,
content: JSON.stringify({
messages,
metadata: metadata || {},
createdAt: now,
updatedAt: now
}, null, 2)
@ -84,7 +120,8 @@ export class ChatStorageService {
messages,
noteId: note.noteId,
createdAt: now,
updatedAt: now
updatedAt: now,
metadata: metadata || {}
};
}
@ -104,9 +141,22 @@ export class ChatStorageService {
return chats.map(chat => {
let messages: Message[] = [];
let metadata: ChatMetadata = {};
let createdAt = new Date(chat.dateCreated);
let updatedAt = new Date(chat.dateModified);
try {
const content = JSON.parse(chat.content);
messages = content.messages || [];
metadata = content.metadata || {};
// Use stored dates if available
if (content.createdAt) {
createdAt = new Date(content.createdAt);
}
if (content.updatedAt) {
updatedAt = new Date(content.updatedAt);
}
} catch (e) {
console.error('Failed to parse chat content:', e);
}
@ -116,8 +166,9 @@ export class ChatStorageService {
title: chat.title,
messages,
noteId: chat.noteId,
createdAt: new Date(chat.dateCreated),
updatedAt: new Date(chat.dateModified)
createdAt,
updatedAt,
metadata
};
});
}
@ -139,9 +190,22 @@ export class ChatStorageService {
}
let messages: Message[] = [];
let metadata: ChatMetadata = {};
let createdAt = new Date(chat.dateCreated);
let updatedAt = new Date(chat.dateModified);
try {
const content = JSON.parse(chat.content);
messages = content.messages || [];
metadata = content.metadata || {};
// Use stored dates if available
if (content.createdAt) {
createdAt = new Date(content.createdAt);
}
if (content.updatedAt) {
updatedAt = new Date(content.updatedAt);
}
} catch (e) {
console.error('Failed to parse chat content:', e);
}
@ -151,15 +215,21 @@ export class ChatStorageService {
title: chat.title,
messages,
noteId: chat.noteId,
createdAt: new Date(chat.dateCreated),
updatedAt: new Date(chat.dateModified)
createdAt,
updatedAt,
metadata
};
}
/**
* Update messages in a chat
*/
async updateChat(chatId: string, messages: Message[], title?: string): Promise<StoredChat | null> {
async updateChat(
chatId: string,
messages: Message[],
title?: string,
metadata?: ChatMetadata
): Promise<StoredChat | null> {
const chat = await this.getChat(chatId);
if (!chat) {
@ -167,12 +237,20 @@ export class ChatStorageService {
}
const now = new Date();
const updatedMetadata = {...(chat.metadata || {}), ...(metadata || {})};
// Extract and store tool calls from the messages
const toolExecutions = this.extractToolExecutionsFromMessages(messages, updatedMetadata.toolExecutions || []);
if (toolExecutions.length > 0) {
updatedMetadata.toolExecutions = toolExecutions;
}
// Update content directly using SQL since we don't have a method for this in the notes service
await sql.execute(
`UPDATE note_contents SET content = ? WHERE noteId = ?`,
[JSON.stringify({
messages,
metadata: updatedMetadata,
createdAt: chat.createdAt,
updatedAt: now
}, null, 2), chatId]
@ -190,7 +268,8 @@ export class ChatStorageService {
...chat,
title: title || chat.title,
messages,
updatedAt: now
updatedAt: now,
metadata: updatedMetadata
};
}
@ -211,6 +290,160 @@ export class ChatStorageService {
return false;
}
}
/**
* Record a new tool execution
*/
async recordToolExecution(
chatId: string,
toolName: string,
toolId: string,
args: Record<string, any> | string,
result: string | Record<string, any>,
error?: string
): Promise<boolean> {
try {
const chat = await this.getChat(chatId);
if (!chat) return false;
const toolExecution: ToolExecution = {
id: toolId,
name: toolName,
arguments: args,
result,
error,
timestamp: new Date(),
executionTime: 0 // Could track this if we passed in a start time
};
const currentToolExecutions = chat.metadata?.toolExecutions || [];
currentToolExecutions.push(toolExecution);
await this.updateChat(
chatId,
chat.messages,
undefined, // Don't change title
{
...chat.metadata,
toolExecutions: currentToolExecutions
}
);
return true;
} catch (e) {
log.error(`Failed to record tool execution: ${e}`);
return false;
}
}
/**
* Extract tool executions from messages
* This helps maintain a record of all tool calls even if messages are truncated
*/
private extractToolExecutionsFromMessages(
messages: Message[],
existingToolExecutions: ToolExecution[] = []
): ToolExecution[] {
const toolExecutions = [...existingToolExecutions];
const executedToolIds = new Set(existingToolExecutions.map(t => t.id));
// Process all messages to find tool calls and their results
const assistantMessages = messages.filter(msg => msg.role === 'assistant' && msg.tool_calls);
const toolMessages = messages.filter(msg => msg.role === 'tool');
// Create a map of tool responses by tool_call_id
const toolResponseMap = new Map<string, string>();
for (const toolMsg of toolMessages) {
if (toolMsg.tool_call_id) {
toolResponseMap.set(toolMsg.tool_call_id, toolMsg.content);
}
}
// Extract all tool calls and pair with responses
for (const assistantMsg of assistantMessages) {
if (!assistantMsg.tool_calls || !Array.isArray(assistantMsg.tool_calls)) continue;
for (const toolCall of assistantMsg.tool_calls as ToolCall[]) {
if (!toolCall.id || executedToolIds.has(toolCall.id)) continue;
const toolResponse = toolResponseMap.get(toolCall.id);
if (!toolResponse) continue; // Skip if no response found
// We found a tool call with a response, record it
let args: Record<string, any> | string;
if (typeof toolCall.function.arguments === 'string') {
try {
args = JSON.parse(toolCall.function.arguments);
} catch (e) {
args = toolCall.function.arguments;
}
} else {
args = toolCall.function.arguments;
}
let result: string | Record<string, any> = toolResponse;
try {
// Try to parse result as JSON if it starts with { or [
if (toolResponse.trim().startsWith('{') || toolResponse.trim().startsWith('[')) {
result = JSON.parse(toolResponse);
}
} catch (e) {
// Keep as string if parsing fails
result = toolResponse;
}
const isError = toolResponse.startsWith('Error:');
const toolExecution: ToolExecution = {
id: toolCall.id,
name: toolCall.function.name,
arguments: args,
result,
error: isError ? toolResponse.substring('Error:'.length).trim() : undefined,
timestamp: new Date()
};
toolExecutions.push(toolExecution);
executedToolIds.add(toolCall.id);
}
}
return toolExecutions;
}
/**
* Store sources used in a chat
*/
async recordSources(
chatId: string,
sources: Array<{
noteId: string;
title: string;
similarity?: number;
path?: string;
branchId?: string;
content?: string;
}>
): Promise<boolean> {
try {
const chat = await this.getChat(chatId);
if (!chat) return false;
await this.updateChat(
chatId,
chat.messages,
undefined, // Don't change title
{
...chat.metadata,
sources
}
);
return true;
} catch (e) {
log.error(`Failed to record sources: ${e}`);
return false;
}
}
}
// Singleton instance

View File

@ -3,6 +3,7 @@ import log from '../../../log.js';
import type { StreamCallback, ToolExecutionInput } from '../interfaces.js';
import { BasePipelineStage } from '../pipeline_stage.js';
import toolRegistry from '../../tools/tool_registry.js';
import chatStorageService from '../../chat_storage_service.js';
/**
* Pipeline stage for handling LLM tool calling
@ -172,6 +173,22 @@ export class ToolCallingStage extends BasePipelineStage<ToolExecutionInput, { re
const executionTime = Date.now() - executionStart;
log.info(`================ TOOL EXECUTION COMPLETED in ${executionTime}ms ================`);
// Record this successful tool execution if there's a sessionId available
if (input.options?.sessionId) {
try {
await chatStorageService.recordToolExecution(
input.options.sessionId,
toolCall.function.name,
toolCall.id || `tool-${Date.now()}-${Math.random().toString(36).substring(2, 9)}`,
args,
result,
undefined // No error for successful execution
);
} catch (storageError) {
log.error(`Failed to record tool execution in chat storage: ${storageError}`);
}
}
// Emit tool completion event if streaming is enabled
if (streamCallback) {
const toolExecutionData = {
@ -190,6 +207,22 @@ export class ToolCallingStage extends BasePipelineStage<ToolExecutionInput, { re
const executionTime = Date.now() - executionStart;
log.error(`================ TOOL EXECUTION FAILED in ${executionTime}ms: ${execError.message} ================`);
// Record this failed tool execution if there's a sessionId available
if (input.options?.sessionId) {
try {
await chatStorageService.recordToolExecution(
input.options.sessionId,
toolCall.function.name,
toolCall.id || `tool-${Date.now()}-${Math.random().toString(36).substring(2, 9)}`,
args,
"", // No result for failed execution
execError.message || String(execError)
);
} catch (storageError) {
log.error(`Failed to record tool execution error in chat storage: ${storageError}`);
}
}
// Emit tool error event if streaming is enabled
if (streamCallback) {
const toolExecutionData = {

View File

@ -476,7 +476,9 @@ class RestChatService {
model: session.metadata.model,
// Set stream based on request type, but ensure it's explicitly a boolean value
// GET requests or format=stream parameter indicates streaming should be used
stream: !!(req.method === 'GET' || req.query.format === 'stream' || req.query.stream === 'true')
stream: !!(req.method === 'GET' || req.query.format === 'stream' || req.query.stream === 'true'),
// Include sessionId for tracking tool executions
sessionId: sessionId
};
// Log the options to verify what's being sent to the pipeline
@ -491,6 +493,9 @@ class RestChatService {
// Create a stream callback wrapper
// This will ensure we properly handle all streaming messages
let messageContent = '';
// Used to track tool call responses for metadata storage
const toolResponseMap = new Map<string, string>();
let streamFinished = false;
// Prepare the pipeline input
@ -621,10 +626,27 @@ class RestChatService {
timestamp: new Date()
});
// Return the response
// Extract sources if they're available
const sources = (response as any).sources || [];
// Store sources in the session metadata if they're present
if (sources.length > 0) {
session.metadata.sources = sources;
log.info(`Stored ${sources.length} sources in session metadata`);
}
// Return the response with complete metadata
return {
content: response.text || '',
sources: (response as any).sources || []
sources: sources,
metadata: {
model: response.model || session.metadata.model,
provider: response.provider || session.metadata.provider,
temperature: session.metadata.temperature,
maxTokens: session.metadata.maxTokens,
lastUpdated: new Date().toISOString(),
toolExecutions: session.metadata.toolExecutions || []
}
};
} else {
// For streaming requests, we've already sent the response
@ -1132,12 +1154,19 @@ class RestChatService {
});
}
// Store the response in the session
session.messages.push({
// Store the response in the session with tool_calls if present
const assistantMessage: any = {
role: 'assistant',
content: messageContent,
timestamp: new Date()
});
};
// If there were tool calls, store them with the message
if (response.tool_calls && response.tool_calls.length > 0) {
assistantMessage.tool_calls = response.tool_calls;
}
session.messages.push(assistantMessage);
return;
} catch (toolError) {
@ -1159,6 +1188,18 @@ class RestChatService {
if (response.stream) {
log.info(`Provider ${service.getName()} supports streaming via stream() method`);
// Store information about the model and provider in session metadata
session.metadata.model = response.model || session.metadata.model;
session.metadata.provider = response.provider || session.metadata.provider;
session.metadata.lastUpdated = new Date().toISOString();
// If response has tool_calls, capture those for later storage in metadata
if (response.tool_calls && response.tool_calls.length > 0) {
log.info(`Storing ${response.tool_calls.length} initial tool calls in session metadata`);
// We'll complete this information when we get the tool results
session.metadata.pendingToolCalls = response.tool_calls;
}
try {
await response.stream(async (chunk: StreamChunk) => {
if (chunk.text) {
@ -1205,6 +1246,41 @@ class RestChatService {
if (chunk.done) {
log.info(`Stream completed from ${service.getName()}, total content: ${messageContent.length} chars`);
// Store tool executions from the conversation into metadata
if (session.metadata.pendingToolCalls) {
const toolExecutions = session.metadata.toolExecutions || [];
// We don't have a toolResponseMap available at this scope
// Just record the pending tool calls with minimal information
for (const toolCall of session.metadata.pendingToolCalls) {
if (!toolCall.id) continue;
// Parse arguments
let args = toolCall.function.arguments;
if (typeof args === 'string') {
try {
args = JSON.parse(args);
} catch {
// Keep as string if not valid JSON
}
}
// Add to tool executions with minimal info
toolExecutions.push({
id: toolCall.id,
name: toolCall.function.name,
arguments: args,
result: "Result not captured in streaming mode",
timestamp: new Date().toISOString()
});
}
// Update session metadata
session.metadata.toolExecutions = toolExecutions;
delete session.metadata.pendingToolCalls;
log.info(`Stored ${toolExecutions.length} tool executions in session metadata`);
}
// Only send final done message if it wasn't already sent with content
// This ensures we don't duplicate the content but still mark completion
if (!chunk.text) {
@ -1397,6 +1473,40 @@ class RestChatService {
/**
* Build context from relevant notes
*/
/**
* Record a tool execution in the session metadata
*/
private recordToolExecution(sessionId: string, tool: any, result: string, error?: string): void {
if (!sessionId) return;
const session = sessions.get(sessionId);
if (!session) return;
try {
const toolExecutions = session.metadata.toolExecutions || [];
// Format tool execution record
const execution = {
id: tool.id || `tool-${Date.now()}-${Math.random().toString(36).substring(2, 7)}`,
name: tool.function?.name || 'unknown',
arguments: typeof tool.function?.arguments === 'string'
? (() => { try { return JSON.parse(tool.function.arguments); } catch { return tool.function.arguments; } })()
: tool.function?.arguments || {},
result: result,
error: error,
timestamp: new Date().toISOString()
};
// Add to tool executions
toolExecutions.push(execution);
session.metadata.toolExecutions = toolExecutions;
log.info(`Recorded tool execution for ${execution.name} in session ${sessionId}`);
} catch (err) {
log.error(`Failed to record tool execution: ${err}`);
}
}
buildContextFromNotes(sources: NoteSource[], query: string): string {
if (!sources || sources.length === 0) {
return query || '';
@ -1466,7 +1576,10 @@ class RestChatService {
temperature: options.temperature,
maxTokens: options.maxTokens,
model: options.model,
provider: options.provider
provider: options.provider,
sources: [],
toolExecutions: [],
lastUpdated: now.toISOString()
}
});
@ -1494,14 +1607,25 @@ class RestChatService {
throw new Error(`Session with ID ${sessionId} not found`);
}
// Return session without internal metadata
// Return session with metadata and additional fields
return {
id: session.id,
title: session.title,
createdAt: session.createdAt,
lastActive: session.lastActive,
messages: session.messages,
noteContext: session.noteContext
noteContext: session.noteContext,
// Include additional fields for the frontend
sources: session.metadata.sources || [],
metadata: {
model: session.metadata.model,
provider: session.metadata.provider,
temperature: session.metadata.temperature,
maxTokens: session.metadata.maxTokens,
lastUpdated: session.lastActive.toISOString(),
// Include simplified tool executions if available
toolExecutions: session.metadata.toolExecutions || []
}
};
} catch (error: any) {
log.error(`Error getting LLM session: ${error.message || 'Unknown error'}`);
@ -1532,7 +1656,7 @@ class RestChatService {
session.noteContext = updates.noteContext;
}
// Update metadata
// Update basic metadata
if (updates.temperature !== undefined) {
session.metadata.temperature = updates.temperature;
}
@ -1549,13 +1673,39 @@ class RestChatService {
session.metadata.provider = updates.provider;
}
// Handle new extended metadata from the frontend
if (updates.metadata) {
// Update various metadata fields but keep existing ones
session.metadata = {
...session.metadata,
...updates.metadata,
// Make sure timestamp is updated
lastUpdated: new Date().toISOString()
};
}
// Handle sources as a top-level field
if (updates.sources && Array.isArray(updates.sources)) {
session.metadata.sources = updates.sources;
}
// Handle tool executions from frontend
if (updates.toolExecutions && Array.isArray(updates.toolExecutions)) {
session.metadata.toolExecutions = updates.toolExecutions;
} else if (updates.metadata?.toolExecutions && Array.isArray(updates.metadata.toolExecutions)) {
session.metadata.toolExecutions = updates.metadata.toolExecutions;
}
// Update timestamp
session.lastActive = new Date();
return {
id: session.id,
title: session.title,
updatedAt: session.lastActive
updatedAt: session.lastActive,
// Include updated metadata in response
metadata: session.metadata,
sources: session.metadata.sources || []
};
} catch (error: any) {
log.error(`Error updating LLM session: ${error.message || 'Unknown error'}`);