why does it work now

This commit is contained in:
perf3ct 2025-04-16 21:09:44 +00:00
parent ec6214bee0
commit daccadd5e0
No known key found for this signature in database
GPG Key ID: 569C4EEC436F5232

View File

@ -66,12 +66,25 @@ export class OpenAIService extends BaseAIService {
params.tool_choice = providerOptions.tool_choice as OpenAI.Chat.ChatCompletionToolChoiceOption; params.tool_choice = providerOptions.tool_choice as OpenAI.Chat.ChatCompletionToolChoiceOption;
} }
// Log the request parameters
console.log('OpenAI API Request:', JSON.stringify({
endpoint: 'chat.completions.create',
model: params.model,
messages: params.messages,
temperature: params.temperature,
max_tokens: params.max_tokens,
stream: params.stream,
tools: params.tools,
tool_choice: params.tool_choice
}, null, 2));
// If streaming is requested // If streaming is requested
if (providerOptions.stream) { if (providerOptions.stream) {
params.stream = true; params.stream = true;
// Get stream from OpenAI SDK // Get stream from OpenAI SDK
const stream = await client.chat.completions.create(params); const stream = await client.chat.completions.create(params);
console.log('OpenAI API Stream Started');
// Create a closure to hold accumulated tool calls // Create a closure to hold accumulated tool calls
let accumulatedToolCalls: any[] = []; let accumulatedToolCalls: any[] = [];
@ -90,6 +103,9 @@ export class OpenAIService extends BaseAIService {
// Process the stream // Process the stream
if (Symbol.asyncIterator in stream) { if (Symbol.asyncIterator in stream) {
for await (const chunk of stream as AsyncIterable<OpenAI.Chat.ChatCompletionChunk>) { for await (const chunk of stream as AsyncIterable<OpenAI.Chat.ChatCompletionChunk>) {
// Log each chunk received from OpenAI
console.log('OpenAI API Stream Chunk:', JSON.stringify(chunk, null, 2));
const content = chunk.choices[0]?.delta?.content || ''; const content = chunk.choices[0]?.delta?.content || '';
const isDone = !!chunk.choices[0]?.finish_reason; const isDone = !!chunk.choices[0]?.finish_reason;
@ -153,12 +169,17 @@ export class OpenAIService extends BaseAIService {
await callback(streamChunk); await callback(streamChunk);
if (isDone) { if (isDone) {
console.log('OpenAI API Stream Complete. Final text length:', completeText.length);
if (accumulatedToolCalls.length > 0) {
console.log('OpenAI API Tool Calls:', JSON.stringify(accumulatedToolCalls, null, 2));
}
break; break;
} }
} }
} else { } else {
// Fallback for non-iterable response // Fallback for non-iterable response
console.warn('Stream is not iterable, falling back to non-streaming response'); console.warn('Stream is not iterable, falling back to non-streaming response');
console.log('OpenAI API Non-iterable Stream Response:', JSON.stringify(stream, null, 2));
if ('choices' in stream) { if ('choices' in stream) {
const content = stream.choices[0]?.message?.content || ''; const content = stream.choices[0]?.message?.content || '';
@ -168,6 +189,7 @@ export class OpenAIService extends BaseAIService {
const toolCalls = stream.choices[0]?.message?.tool_calls; const toolCalls = stream.choices[0]?.message?.tool_calls;
if (toolCalls) { if (toolCalls) {
response.tool_calls = toolCalls; response.tool_calls = toolCalls;
console.log('OpenAI API Tool Calls in Non-iterable Response:', JSON.stringify(toolCalls, null, 2));
} }
await callback({ await callback({
@ -198,6 +220,9 @@ export class OpenAIService extends BaseAIService {
const completion = await client.chat.completions.create(params); const completion = await client.chat.completions.create(params);
// Log the full response from OpenAI
console.log('OpenAI API Response:', JSON.stringify(completion, null, 2));
if (!('choices' in completion)) { if (!('choices' in completion)) {
throw new Error('Unexpected response format from OpenAI API'); throw new Error('Unexpected response format from OpenAI API');
} }