diff --git a/apps/client/src/components/app_context.ts b/apps/client/src/components/app_context.ts index cba5c91af..ce33d1447 100644 --- a/apps/client/src/components/app_context.ts +++ b/apps/client/src/components/app_context.ts @@ -116,7 +116,7 @@ export type CommandMappings = { openedFileUpdated: CommandData & { entityType: string; entityId: string; - lastModifiedMs: number; + lastModifiedMs?: number; filePath: string; }; focusAndSelectTitle: CommandData & { diff --git a/apps/client/src/services/branches.ts b/apps/client/src/services/branches.ts index 86ec8e9a8..b1231e598 100644 --- a/apps/client/src/services/branches.ts +++ b/apps/client/src/services/branches.ts @@ -210,7 +210,7 @@ function makeToast(id: string, message: string): ToastOptions { } ws.subscribeToMessages(async (message) => { - if (message.taskType !== "deleteNotes") { + if (!("taskType" in message) || message.taskType !== "deleteNotes") { return; } @@ -228,7 +228,7 @@ ws.subscribeToMessages(async (message) => { }); ws.subscribeToMessages(async (message) => { - if (message.taskType !== "undeleteNotes") { + if (!("taskType" in message) || message.taskType !== "undeleteNotes") { return; } diff --git a/apps/client/src/services/file_watcher.ts b/apps/client/src/services/file_watcher.ts index cda3c2852..c3df01b0e 100644 --- a/apps/client/src/services/file_watcher.ts +++ b/apps/client/src/services/file_watcher.ts @@ -1,16 +1,8 @@ import ws from "./ws.js"; import appContext from "../components/app_context.js"; +import { OpenedFileUpdateStatus } from "@triliumnext/commons"; -// TODO: Deduplicate -interface Message { - type: string; - entityType: string; - entityId: string; - lastModifiedMs: number; - filePath: string; -} - -const fileModificationStatus: Record> = { +const fileModificationStatus: Record> = { notes: {}, attachments: {} }; @@ -39,7 +31,7 @@ function ignoreModification(entityType: string, entityId: string) { delete fileModificationStatus[entityType][entityId]; } -ws.subscribeToMessages(async (message: Message) => { +ws.subscribeToMessages(async message => { if (message.type !== "openedFileUpdated") { return; } diff --git a/apps/client/src/services/import.ts b/apps/client/src/services/import.ts index 035bed6a6..6121ab422 100644 --- a/apps/client/src/services/import.ts +++ b/apps/client/src/services/import.ts @@ -4,6 +4,7 @@ import ws from "./ws.js"; import utils from "./utils.js"; import appContext from "../components/app_context.js"; import { t } from "./i18n.js"; +import { WebSocketMessage } from "@triliumnext/commons"; type BooleanLike = boolean | "true" | "false"; @@ -66,7 +67,7 @@ function makeToast(id: string, message: string): ToastOptions { } ws.subscribeToMessages(async (message) => { - if (message.taskType !== "importNotes") { + if (!("taskType" in message) || message.taskType !== "importNotes") { return; } @@ -81,14 +82,14 @@ ws.subscribeToMessages(async (message) => { toastService.showPersistent(toast); - if (message.result.importedNoteId) { + if (typeof message.result === "object" && message.result.importedNoteId) { await appContext.tabManager.getActiveContext()?.setNote(message.result.importedNoteId); } } }); -ws.subscribeToMessages(async (message) => { - if (message.taskType !== "importAttachments") { +ws.subscribeToMessages(async (message: WebSocketMessage) => { + if (!("taskType" in message) || message.taskType !== "importAttachments") { return; } @@ -103,7 +104,7 @@ ws.subscribeToMessages(async (message) => { toastService.showPersistent(toast); - if (message.result.parentNoteId) { + if (typeof message.result === "object" && message.result.parentNoteId) { await appContext.tabManager.getActiveContext()?.setNote(message.result.importedNoteId, { viewScope: { viewMode: "attachments" diff --git a/apps/client/src/services/protected_session.ts b/apps/client/src/services/protected_session.ts index fc34a805f..94148a455 100644 --- a/apps/client/src/services/protected_session.ts +++ b/apps/client/src/services/protected_session.ts @@ -107,7 +107,7 @@ function makeToast(message: Message, title: string, text: string): ToastOptions } ws.subscribeToMessages(async (message) => { - if (message.taskType !== "protectNotes") { + if (!("taskType" in message) || message.taskType !== "protectNotes") { return; } diff --git a/apps/client/src/services/ws.ts b/apps/client/src/services/ws.ts index dcd63e577..b873289b4 100644 --- a/apps/client/src/services/ws.ts +++ b/apps/client/src/services/ws.ts @@ -6,8 +6,9 @@ import frocaUpdater from "./froca_updater.js"; import appContext from "../components/app_context.js"; import { t } from "./i18n.js"; import type { EntityChange } from "../server_types.js"; +import { WebSocketMessage } from "@triliumnext/commons"; -type MessageHandler = (message: any) => void; +type MessageHandler = (message: WebSocketMessage) => void; const messageHandlers: MessageHandler[] = []; let ws: WebSocket; diff --git a/apps/client/src/widgets/dialogs/export.tsx b/apps/client/src/widgets/dialogs/export.tsx index 594104b66..441b86315 100644 --- a/apps/client/src/widgets/dialogs/export.tsx +++ b/apps/client/src/widgets/dialogs/export.tsx @@ -140,7 +140,7 @@ ws.subscribeToMessages(async (message) => { }; } - if (message.taskType !== "export") { + if (!("taskType" in message) || message.taskType !== "export") { return; } @@ -155,4 +155,4 @@ ws.subscribeToMessages(async (message) => { toastService.showPersistent(toast); } -}); \ No newline at end of file +}); diff --git a/apps/client/src/widgets/sync_status.ts b/apps/client/src/widgets/sync_status.ts index ce26b6078..d6bd8b675 100644 --- a/apps/client/src/widgets/sync_status.ts +++ b/apps/client/src/widgets/sync_status.ts @@ -5,6 +5,7 @@ import options from "../services/options.js"; import syncService from "../services/sync.js"; import { escapeQuotes } from "../services/utils.js"; import { Tooltip } from "bootstrap"; +import { WebSocketMessage } from "@triliumnext/commons"; const TPL = /*html*/`
@@ -117,8 +118,7 @@ export default class SyncStatusWidget extends BasicWidget { this.$widget.find(`.sync-status-${className}`).show(); } - // TriliumNextTODO: Use Type Message from "services/ws.ts" - processMessage(message: { type: string; lastSyncedPush: number; data: { lastSyncedPush: number } }) { + processMessage(message: WebSocketMessage) { if (message.type === "sync-pull-in-progress") { this.syncState = "in-progress"; this.lastSyncedPush = message.lastSyncedPush; diff --git a/apps/client/src/widgets/watched_file_update_status.ts b/apps/client/src/widgets/watched_file_update_status.ts index 5181c333b..efec3d2af 100644 --- a/apps/client/src/widgets/watched_file_update_status.ts +++ b/apps/client/src/widgets/watched_file_update_status.ts @@ -73,13 +73,13 @@ export default class WatchedFileUpdateStatusWidget extends NoteContextAwareWidge async refreshWithNote(note: FNote) { const { entityType, entityId } = this.getEntity(); - if (!entityType || !entityId) { - return; - } + if (!entityType || !entityId) return; const status = fileWatcher.getFileModificationStatus(entityType, entityId); this.$filePath.text(status.filePath); - this.$fileLastModified.text(dayjs.unix(status.lastModifiedMs / 1000).format("HH:mm:ss")); + if (status.lastModifiedMs) { + this.$fileLastModified.text(dayjs.unix(status.lastModifiedMs / 1000).format("HH:mm:ss")); + } } getEntity() { diff --git a/apps/server/src/routes/api/llm.ts b/apps/server/src/routes/api/llm.ts index a15660676..78573ceda 100644 --- a/apps/server/src/routes/api/llm.ts +++ b/apps/server/src/routes/api/llm.ts @@ -1,18 +1,9 @@ import type { Request, Response } from "express"; import log from "../../services/log.js"; -import options from "../../services/options.js"; import restChatService from "../../services/llm/rest_chat_service.js"; import chatStorageService from '../../services/llm/chat_storage_service.js'; - -// Define basic interfaces -interface ChatMessage { - role: 'user' | 'assistant' | 'system'; - content: string; - timestamp?: Date; -} - - +import { WebSocketMessage } from "@triliumnext/commons"; /** * @swagger @@ -419,7 +410,7 @@ async function sendMessage(req: Request, res: Response) { */ async function streamMessage(req: Request, res: Response) { log.info("=== Starting streamMessage ==="); - + try { const chatNoteId = req.params.chatNoteId; const { content, useAdvancedContext, showThinking, mentions } = req.body; @@ -434,7 +425,7 @@ async function streamMessage(req: Request, res: Response) { (res as any).triliumResponseHandled = true; return; } - + // Send immediate success response res.status(200).json({ success: true, @@ -442,12 +433,12 @@ async function streamMessage(req: Request, res: Response) { }); // Mark response as handled to prevent further processing (res as any).triliumResponseHandled = true; - + // Start background streaming process after sending response handleStreamingProcess(chatNoteId, content, useAdvancedContext, showThinking, mentions) .catch(error => { log.error(`Background streaming error: ${error.message}`); - + // Send error via WebSocket since HTTP response was already sent import('../../services/ws.js').then(wsModule => { wsModule.default.sendMessageToAllClients({ @@ -460,11 +451,11 @@ async function streamMessage(req: Request, res: Response) { log.error(`Could not send WebSocket error: ${wsError}`); }); }); - + } catch (error) { // Handle any synchronous errors log.error(`Synchronous error in streamMessage: ${error}`); - + if (!res.headersSent) { res.status(500).json({ success: false, @@ -481,21 +472,21 @@ async function streamMessage(req: Request, res: Response) { * This is separate from the HTTP request/response cycle */ async function handleStreamingProcess( - chatNoteId: string, - content: string, - useAdvancedContext: boolean, - showThinking: boolean, + chatNoteId: string, + content: string, + useAdvancedContext: boolean, + showThinking: boolean, mentions: any[] ) { log.info("=== Starting background streaming process ==="); - + // Get or create chat directly from storage let chat = await chatStorageService.getChat(chatNoteId); if (!chat) { chat = await chatStorageService.createChat('New Chat'); log.info(`Created new chat with ID: ${chat.id} for stream request`); } - + // Add the user message to the chat immediately chat.messages.push({ role: 'user', @@ -544,9 +535,9 @@ async function handleStreamingProcess( thinking: showThinking ? 'Initializing streaming LLM response...' : undefined }); - // Instead of calling the complex handleSendMessage service, + // Instead of calling the complex handleSendMessage service, // let's implement streaming directly to avoid response conflicts - + try { // Check if AI is enabled const optionsModule = await import('../../services/options.js'); @@ -570,7 +561,7 @@ async function handleStreamingProcess( // Get selected model const { getSelectedModelConfig } = await import('../../services/llm/config/configuration_helpers.js'); const modelConfig = await getSelectedModelConfig(); - + if (!modelConfig) { throw new Error("No valid AI model configuration found"); } @@ -590,7 +581,7 @@ async function handleStreamingProcess( chatNoteId: chatNoteId }, streamCallback: (data, done, rawChunk) => { - const message = { + const message: WebSocketMessage = { type: 'llm-stream' as const, chatNoteId: chatNoteId, done: done @@ -634,7 +625,7 @@ async function handleStreamingProcess( // Execute the pipeline await pipeline.execute(pipelineInput); - + } catch (error: any) { log.error(`Error in direct streaming: ${error.message}`); wsService.sendMessageToAllClients({ diff --git a/apps/server/src/services/import/mime.ts b/apps/server/src/services/import/mime.ts index 479bd3494..cce580a08 100644 --- a/apps/server/src/services/import/mime.ts +++ b/apps/server/src/services/import/mime.ts @@ -2,8 +2,7 @@ import mimeTypes from "mime-types"; import path from "path"; -import type { TaskData } from "../task_context_interface.js"; -import type { NoteType } from "@triliumnext/commons"; +import type { NoteType, TaskData } from "@triliumnext/commons"; const CODE_MIME_TYPES = new Set([ "application/json", diff --git a/apps/server/src/services/llm/chat/handlers/stream_handler.ts b/apps/server/src/services/llm/chat/handlers/stream_handler.ts index 3aeb26d83..1ebca5d5a 100644 --- a/apps/server/src/services/llm/chat/handlers/stream_handler.ts +++ b/apps/server/src/services/llm/chat/handlers/stream_handler.ts @@ -4,7 +4,6 @@ import log from "../../../log.js"; import type { Response } from "express"; import type { StreamChunk } from "../../ai_interface.js"; -import type { LLMStreamMessage } from "../../interfaces/chat_ws_messages.js"; import type { ChatSession } from "../../interfaces/chat_session.js"; /** @@ -46,7 +45,7 @@ export class StreamHandler { type: 'llm-stream', chatNoteId, thinking: 'Preparing response...' - } as LLMStreamMessage); + }); try { // Import the tool handler @@ -66,7 +65,7 @@ export class StreamHandler { type: 'llm-stream', chatNoteId, thinking: 'Analyzing tools needed for this request...' - } as LLMStreamMessage); + }); try { // Execute the tools @@ -82,7 +81,7 @@ export class StreamHandler { tool: toolResult.name, result: toolResult.content.substring(0, 100) + (toolResult.content.length > 100 ? '...' : '') } - } as LLMStreamMessage); + }); } // Make follow-up request with tool results @@ -123,7 +122,7 @@ export class StreamHandler { chatNoteId, error: `Error executing tools: ${toolError instanceof Error ? toolError.message : 'Unknown error'}`, done: true - } as LLMStreamMessage); + }); } } else if (response.stream) { // Handle standard streaming through the stream() method @@ -152,7 +151,7 @@ export class StreamHandler { chatNoteId, content: messageContent, done: true - } as LLMStreamMessage); + }); log.info(`Complete response sent`); @@ -174,14 +173,14 @@ export class StreamHandler { type: 'llm-stream', chatNoteId, error: `Error generating response: ${streamingError instanceof Error ? streamingError.message : 'Unknown error'}` - } as LLMStreamMessage); + }); // Signal completion wsService.sendMessageToAllClients({ type: 'llm-stream', chatNoteId, done: true - } as LLMStreamMessage); + }); } } @@ -218,7 +217,7 @@ export class StreamHandler { done: !!chunk.done, // Include done flag with each chunk // Include any raw data from the provider that might contain thinking/tool info ...(chunk.raw ? { raw: chunk.raw } : {}) - } as LLMStreamMessage); + }); // Log the first chunk (useful for debugging) if (messageContent.length === chunk.text.length) { @@ -232,7 +231,7 @@ export class StreamHandler { type: 'llm-stream', chatNoteId, thinking: chunk.raw.thinking - } as LLMStreamMessage); + }); } // If the provider indicates tool execution, relay that @@ -241,7 +240,7 @@ export class StreamHandler { type: 'llm-stream', chatNoteId, toolExecution: chunk.raw.toolExecution - } as LLMStreamMessage); + }); } // Handle direct tool_calls in the response (for OpenAI) @@ -252,7 +251,7 @@ export class StreamHandler { wsService.sendMessageToAllClients({ type: 'tool_execution_start', chatNoteId - } as LLMStreamMessage); + }); // Process each tool call for (const toolCall of chunk.tool_calls) { @@ -277,7 +276,7 @@ export class StreamHandler { toolCallId: toolCall.id, args: args } - } as LLMStreamMessage); + }); } } @@ -337,7 +336,7 @@ export class StreamHandler { type: 'llm-stream', chatNoteId, done: true - } as LLMStreamMessage); + }); } // Store the full response in the session @@ -360,7 +359,7 @@ export class StreamHandler { chatNoteId, error: `Error during streaming: ${streamError instanceof Error ? streamError.message : 'Unknown error'}`, done: true - } as LLMStreamMessage); + }); throw streamError; } diff --git a/apps/server/src/services/llm/chat/index.ts b/apps/server/src/services/llm/chat/index.ts index 79b587a09..622f65374 100644 --- a/apps/server/src/services/llm/chat/index.ts +++ b/apps/server/src/services/llm/chat/index.ts @@ -7,7 +7,6 @@ import { ToolHandler } from './handlers/tool_handler.js'; import { StreamHandler } from './handlers/stream_handler.js'; import * as messageFormatter from './utils/message_formatter.js'; import type { ChatSession, ChatMessage, NoteSource } from '../interfaces/chat_session.js'; -import type { LLMStreamMessage } from '../interfaces/chat_ws_messages.js'; // Export components export { @@ -22,6 +21,5 @@ export { export type { ChatSession, ChatMessage, - NoteSource, - LLMStreamMessage + NoteSource }; diff --git a/apps/server/src/services/llm/chat/rest_chat_service.ts b/apps/server/src/services/llm/chat/rest_chat_service.ts index 5bf57c042..45af7e944 100644 --- a/apps/server/src/services/llm/chat/rest_chat_service.ts +++ b/apps/server/src/services/llm/chat/rest_chat_service.ts @@ -4,18 +4,15 @@ */ import log from "../../log.js"; import type { Request, Response } from "express"; -import type { Message, ChatCompletionOptions } from "../ai_interface.js"; +import type { Message } from "../ai_interface.js"; import aiServiceManager from "../ai_service_manager.js"; import { ChatPipeline } from "../pipeline/chat_pipeline.js"; import type { ChatPipelineInput } from "../pipeline/interfaces.js"; import options from "../../options.js"; import { ToolHandler } from "./handlers/tool_handler.js"; -import type { LLMStreamMessage } from "../interfaces/chat_ws_messages.js"; import chatStorageService from '../chat_storage_service.js'; -import { - isAIEnabled, - getSelectedModelConfig, -} from '../config/configuration_helpers.js'; +import { getSelectedModelConfig } from '../config/configuration_helpers.js'; +import { WebSocketMessage } from "@triliumnext/commons"; /** * Simplified service to handle chat API interactions @@ -79,7 +76,7 @@ class RestChatService { throw new Error("Database is not initialized"); } - // Get or create AI service - will throw meaningful error if not possible + // Get or create AI service - will throw meaningful error if not possible await aiServiceManager.getOrCreateAnyService(); // Load or create chat directly from storage @@ -204,7 +201,7 @@ class RestChatService { accumulatedContentRef: { value: string }, chat: { id: string; messages: Message[]; title: string } ) { - const message: LLMStreamMessage = { + const message: WebSocketMessage = { type: 'llm-stream', chatNoteId: chatNoteId, done: done @@ -237,7 +234,7 @@ class RestChatService { // Send WebSocket message wsService.sendMessageToAllClients(message); - + // When streaming is complete, save the accumulated content to the chat note if (done) { try { @@ -248,7 +245,7 @@ class RestChatService { role: 'assistant', content: accumulatedContentRef.value }); - + // Save the updated chat back to storage await chatStorageService.updateChat(chat.id, chat.messages, chat.title); log.info(`Saved streaming assistant response: ${accumulatedContentRef.value.length} characters`); @@ -257,7 +254,7 @@ class RestChatService { // Log error but don't break the response flow log.error(`Error saving streaming response: ${error}`); } - + // Note: For WebSocket-only streaming, we don't end the HTTP response here // since it was already handled by the calling endpoint } diff --git a/apps/server/src/services/llm/interfaces/chat_ws_messages.ts b/apps/server/src/services/llm/interfaces/chat_ws_messages.ts deleted file mode 100644 index f75d399f4..000000000 --- a/apps/server/src/services/llm/interfaces/chat_ws_messages.ts +++ /dev/null @@ -1,24 +0,0 @@ -/** - * Interfaces for WebSocket LLM streaming messages - */ - -/** - * Interface for WebSocket LLM streaming messages - */ -export interface LLMStreamMessage { - type: 'llm-stream' | 'tool_execution_start' | 'tool_result' | 'tool_execution_error' | 'tool_completion_processing'; - chatNoteId: string; - content?: string; - thinking?: string; - toolExecution?: { - action?: string; - tool?: string; - toolCallId?: string; - result?: string | Record; - error?: string; - args?: Record; - }; - done?: boolean; - error?: string; - raw?: unknown; -} diff --git a/apps/server/src/services/task_context.ts b/apps/server/src/services/task_context.ts index f83154147..7cef303f8 100644 --- a/apps/server/src/services/task_context.ts +++ b/apps/server/src/services/task_context.ts @@ -1,6 +1,6 @@ "use strict"; -import type { TaskData } from "./task_context_interface.js"; +import type { TaskData } from "@triliumnext/commons"; import ws from "./ws.js"; // taskId => TaskContext @@ -61,7 +61,7 @@ class TaskContext { taskId: this.taskId, taskType: this.taskType, data: this.data, - message: message + message }); } @@ -71,7 +71,7 @@ class TaskContext { taskId: this.taskId, taskType: this.taskType, data: this.data, - result: result + result }); } } diff --git a/apps/server/src/services/task_context_interface.ts b/apps/server/src/services/task_context_interface.ts deleted file mode 100644 index 3c359d742..000000000 --- a/apps/server/src/services/task_context_interface.ts +++ /dev/null @@ -1,7 +0,0 @@ -export interface TaskData { - safeImport?: boolean; - textImportedAsText?: boolean; - codeImportedAsCode?: boolean; - shrinkImages?: boolean; - replaceUnderscoresWithSpaces?: boolean; -} diff --git a/apps/server/src/services/ws.ts b/apps/server/src/services/ws.ts index 71e94707e..f89b44869 100644 --- a/apps/server/src/services/ws.ts +++ b/apps/server/src/services/ws.ts @@ -1,5 +1,5 @@ import { WebSocketServer as WebSocketServer, WebSocket } from "ws"; -import { isDev, isElectron, randomString } from "./utils.js"; +import { isElectron, randomString } from "./utils.js"; import log from "./log.js"; import sql from "./sql.js"; import cls from "./cls.js"; @@ -15,52 +15,6 @@ import { WebSocketMessage, type EntityChange } from "@triliumnext/commons"; let webSocketServer!: WebSocketServer; let lastSyncedPush: number | null = null; -interface Message { - type: string; - data?: { - lastSyncedPush?: number | null; - entityChanges?: any[]; - shrinkImages?: boolean; - } | null; - lastSyncedPush?: number | null; - - progressCount?: number; - taskId?: string; - taskType?: string | null; - message?: string; - reason?: string; - result?: string | Record; - - script?: string; - params?: any[]; - noteId?: string; - messages?: string[]; - startNoteId?: string; - currentNoteId?: string; - entityType?: string; - entityId?: string; - originEntityName?: "notes"; - originEntityId?: string | null; - lastModifiedMs?: number; - filePath?: string; - - // LLM streaming specific fields - chatNoteId?: string; - content?: string; - thinking?: string; - toolExecution?: { - action?: string; - tool?: string; - toolCallId?: string; - result?: string | Record; - error?: string; - args?: Record; - }; - done?: boolean; - error?: string; - raw?: unknown; -} - type SessionParser = (req: IncomingMessage, params: {}, cb: () => void) => void; function init(httpServer: HttpServer, sessionParser: SessionParser) { webSocketServer = new WebSocketServer({ @@ -106,7 +60,7 @@ Stack: ${message.stack}`); }); } -function sendMessage(client: WebSocket, message: Message) { +function sendMessage(client: WebSocket, message: WebSocketMessage) { const jsonStr = JSON.stringify(message); if (client.readyState === WebSocket.OPEN) { @@ -114,7 +68,7 @@ function sendMessage(client: WebSocket, message: Message) { } } -function sendMessageToAllClients(message: Message) { +function sendMessageToAllClients(message: WebSocketMessage) { const jsonStr = JSON.stringify(message); if (webSocketServer) { diff --git a/packages/commons/src/lib/server_api.ts b/packages/commons/src/lib/server_api.ts index 0d56685cc..2454fbf1b 100644 --- a/packages/commons/src/lib/server_api.ts +++ b/packages/commons/src/lib/server_api.ts @@ -270,3 +270,96 @@ export interface EntityChangeRecord { entityChange: EntityChange; entity?: EntityRow; } + +type TaskStatus = { + type: "taskProgressCount", + taskId: string; + taskType: TypeT; + data: DataT, + progressCount: number +} | { + type: "taskError", + taskId: string; + taskType: TypeT; + data: DataT; + message: string; +} | { + type: "taskSucceeded", + taskId: string; + taskType: TypeT; + data: DataT; + result?: string | Record +} + +type TaskDefinitions = + TaskStatus<"protectNotes", { protect: boolean; }> + | TaskStatus<"importNotes", null> + | TaskStatus<"importAttachments", null> + | TaskStatus<"deleteNotes", null> + | TaskStatus<"undeleteNotes", null> + | TaskStatus<"export", null> +; + +export interface OpenedFileUpdateStatus { + entityType: string; + entityId: string; + lastModifiedMs?: number; + filePath: string; +} + +export type WebSocketMessage = TaskDefinitions | { + type: "ping" +} | { + type: "frontend-update", + data: { + lastSyncedPush: number, + entityChanges: EntityChange[] + } +} | { + type: "openNote", + noteId: string +} | OpenedFileUpdateStatus & { + type: "openedFileUpdated" +} | { + type: "protectedSessionLogin" +} | { + type: "protectedSessionLogout" +} | { + type: "toast", + message: string; +} | { + type: "api-log-messages", + noteId: string, + messages: string[] +} | { + type: "execute-script"; + script: string; + params: unknown[]; + startNoteId?: string; + currentNoteId: string; + originEntityName: string; + originEntityId?: string | null; +} | { + type: "reload-frontend"; + reason: string; +} | { + type: "sync-pull-in-progress" | "sync-push-in-progress" | "sync-finished" | "sync-failed"; + lastSyncedPush: number; +} | { + type: "consistency-checks-failed" +} | { + type: "llm-stream", + chatNoteId: string; + done?: boolean; + error?: string; + thinking?: string; + content?: string; + toolExecution?: { + action?: string; + tool?: string; + toolCallId?: string; + result?: string | Record; + error?: string; + args?: Record; + } +}