Initial commit: KidsAI Explorer with complete functionality
- Complete KidsAI Explorer application - Multi-language support (English/German) - AI-powered educational guidance using OpenAI - Interactive chat interface for children - Proper placeholder translation fixes - Mobile-responsive design - Educational framework for critical thinking
This commit is contained in:
287
node_modules/openai/lib/AbstractChatCompletionRunner.mjs
generated
vendored
Normal file
287
node_modules/openai/lib/AbstractChatCompletionRunner.mjs
generated
vendored
Normal file
@@ -0,0 +1,287 @@
|
||||
var _AbstractChatCompletionRunner_instances, _AbstractChatCompletionRunner_getFinalContent, _AbstractChatCompletionRunner_getFinalMessage, _AbstractChatCompletionRunner_getFinalFunctionToolCall, _AbstractChatCompletionRunner_getFinalFunctionToolCallResult, _AbstractChatCompletionRunner_calculateTotalUsage, _AbstractChatCompletionRunner_validateParams, _AbstractChatCompletionRunner_stringifyFunctionCallResult;
|
||||
import { __classPrivateFieldGet } from "../internal/tslib.mjs";
|
||||
import { OpenAIError } from "../error.mjs";
|
||||
import { isRunnableFunctionWithParse, } from "./RunnableFunction.mjs";
|
||||
import { isAssistantMessage, isToolMessage } from "./chatCompletionUtils.mjs";
|
||||
import { EventStream } from "./EventStream.mjs";
|
||||
import { isAutoParsableTool, parseChatCompletion } from "../lib/parser.mjs";
|
||||
const DEFAULT_MAX_CHAT_COMPLETIONS = 10;
|
||||
export class AbstractChatCompletionRunner extends EventStream {
|
||||
constructor() {
|
||||
super(...arguments);
|
||||
_AbstractChatCompletionRunner_instances.add(this);
|
||||
this._chatCompletions = [];
|
||||
this.messages = [];
|
||||
}
|
||||
_addChatCompletion(chatCompletion) {
|
||||
this._chatCompletions.push(chatCompletion);
|
||||
this._emit('chatCompletion', chatCompletion);
|
||||
const message = chatCompletion.choices[0]?.message;
|
||||
if (message)
|
||||
this._addMessage(message);
|
||||
return chatCompletion;
|
||||
}
|
||||
_addMessage(message, emit = true) {
|
||||
if (!('content' in message))
|
||||
message.content = null;
|
||||
this.messages.push(message);
|
||||
if (emit) {
|
||||
this._emit('message', message);
|
||||
if (isToolMessage(message) && message.content) {
|
||||
// Note, this assumes that {role: 'tool', content: …} is always the result of a call of tool of type=function.
|
||||
this._emit('functionToolCallResult', message.content);
|
||||
}
|
||||
else if (isAssistantMessage(message) && message.tool_calls) {
|
||||
for (const tool_call of message.tool_calls) {
|
||||
if (tool_call.type === 'function') {
|
||||
this._emit('functionToolCall', tool_call.function);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @returns a promise that resolves with the final ChatCompletion, or rejects
|
||||
* if an error occurred or the stream ended prematurely without producing a ChatCompletion.
|
||||
*/
|
||||
async finalChatCompletion() {
|
||||
await this.done();
|
||||
const completion = this._chatCompletions[this._chatCompletions.length - 1];
|
||||
if (!completion)
|
||||
throw new OpenAIError('stream ended without producing a ChatCompletion');
|
||||
return completion;
|
||||
}
|
||||
/**
|
||||
* @returns a promise that resolves with the content of the final ChatCompletionMessage, or rejects
|
||||
* if an error occurred or the stream ended prematurely without producing a ChatCompletionMessage.
|
||||
*/
|
||||
async finalContent() {
|
||||
await this.done();
|
||||
return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalContent).call(this);
|
||||
}
|
||||
/**
|
||||
* @returns a promise that resolves with the the final assistant ChatCompletionMessage response,
|
||||
* or rejects if an error occurred or the stream ended prematurely without producing a ChatCompletionMessage.
|
||||
*/
|
||||
async finalMessage() {
|
||||
await this.done();
|
||||
return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalMessage).call(this);
|
||||
}
|
||||
/**
|
||||
* @returns a promise that resolves with the content of the final FunctionCall, or rejects
|
||||
* if an error occurred or the stream ended prematurely without producing a ChatCompletionMessage.
|
||||
*/
|
||||
async finalFunctionToolCall() {
|
||||
await this.done();
|
||||
return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalFunctionToolCall).call(this);
|
||||
}
|
||||
async finalFunctionToolCallResult() {
|
||||
await this.done();
|
||||
return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalFunctionToolCallResult).call(this);
|
||||
}
|
||||
async totalUsage() {
|
||||
await this.done();
|
||||
return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_calculateTotalUsage).call(this);
|
||||
}
|
||||
allChatCompletions() {
|
||||
return [...this._chatCompletions];
|
||||
}
|
||||
_emitFinal() {
|
||||
const completion = this._chatCompletions[this._chatCompletions.length - 1];
|
||||
if (completion)
|
||||
this._emit('finalChatCompletion', completion);
|
||||
const finalMessage = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalMessage).call(this);
|
||||
if (finalMessage)
|
||||
this._emit('finalMessage', finalMessage);
|
||||
const finalContent = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalContent).call(this);
|
||||
if (finalContent)
|
||||
this._emit('finalContent', finalContent);
|
||||
const finalFunctionCall = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalFunctionToolCall).call(this);
|
||||
if (finalFunctionCall)
|
||||
this._emit('finalFunctionToolCall', finalFunctionCall);
|
||||
const finalFunctionCallResult = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalFunctionToolCallResult).call(this);
|
||||
if (finalFunctionCallResult != null)
|
||||
this._emit('finalFunctionToolCallResult', finalFunctionCallResult);
|
||||
if (this._chatCompletions.some((c) => c.usage)) {
|
||||
this._emit('totalUsage', __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_calculateTotalUsage).call(this));
|
||||
}
|
||||
}
|
||||
async _createChatCompletion(client, params, options) {
|
||||
const signal = options?.signal;
|
||||
if (signal) {
|
||||
if (signal.aborted)
|
||||
this.controller.abort();
|
||||
signal.addEventListener('abort', () => this.controller.abort());
|
||||
}
|
||||
__classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_validateParams).call(this, params);
|
||||
const chatCompletion = await client.chat.completions.create({ ...params, stream: false }, { ...options, signal: this.controller.signal });
|
||||
this._connected();
|
||||
return this._addChatCompletion(parseChatCompletion(chatCompletion, params));
|
||||
}
|
||||
async _runChatCompletion(client, params, options) {
|
||||
for (const message of params.messages) {
|
||||
this._addMessage(message, false);
|
||||
}
|
||||
return await this._createChatCompletion(client, params, options);
|
||||
}
|
||||
async _runTools(client, params, options) {
|
||||
const role = 'tool';
|
||||
const { tool_choice = 'auto', stream, ...restParams } = params;
|
||||
const singleFunctionToCall = typeof tool_choice !== 'string' && tool_choice?.function?.name;
|
||||
const { maxChatCompletions = DEFAULT_MAX_CHAT_COMPLETIONS } = options || {};
|
||||
// TODO(someday): clean this logic up
|
||||
const inputTools = params.tools.map((tool) => {
|
||||
if (isAutoParsableTool(tool)) {
|
||||
if (!tool.$callback) {
|
||||
throw new OpenAIError('Tool given to `.runTools()` that does not have an associated function');
|
||||
}
|
||||
return {
|
||||
type: 'function',
|
||||
function: {
|
||||
function: tool.$callback,
|
||||
name: tool.function.name,
|
||||
description: tool.function.description || '',
|
||||
parameters: tool.function.parameters,
|
||||
parse: tool.$parseRaw,
|
||||
strict: true,
|
||||
},
|
||||
};
|
||||
}
|
||||
return tool;
|
||||
});
|
||||
const functionsByName = {};
|
||||
for (const f of inputTools) {
|
||||
if (f.type === 'function') {
|
||||
functionsByName[f.function.name || f.function.function.name] = f.function;
|
||||
}
|
||||
}
|
||||
const tools = 'tools' in params ?
|
||||
inputTools.map((t) => t.type === 'function' ?
|
||||
{
|
||||
type: 'function',
|
||||
function: {
|
||||
name: t.function.name || t.function.function.name,
|
||||
parameters: t.function.parameters,
|
||||
description: t.function.description,
|
||||
strict: t.function.strict,
|
||||
},
|
||||
}
|
||||
: t)
|
||||
: undefined;
|
||||
for (const message of params.messages) {
|
||||
this._addMessage(message, false);
|
||||
}
|
||||
for (let i = 0; i < maxChatCompletions; ++i) {
|
||||
const chatCompletion = await this._createChatCompletion(client, {
|
||||
...restParams,
|
||||
tool_choice,
|
||||
tools,
|
||||
messages: [...this.messages],
|
||||
}, options);
|
||||
const message = chatCompletion.choices[0]?.message;
|
||||
if (!message) {
|
||||
throw new OpenAIError(`missing message in ChatCompletion response`);
|
||||
}
|
||||
if (!message.tool_calls?.length) {
|
||||
return;
|
||||
}
|
||||
for (const tool_call of message.tool_calls) {
|
||||
if (tool_call.type !== 'function')
|
||||
continue;
|
||||
const tool_call_id = tool_call.id;
|
||||
const { name, arguments: args } = tool_call.function;
|
||||
const fn = functionsByName[name];
|
||||
if (!fn) {
|
||||
const content = `Invalid tool_call: ${JSON.stringify(name)}. Available options are: ${Object.keys(functionsByName)
|
||||
.map((name) => JSON.stringify(name))
|
||||
.join(', ')}. Please try again`;
|
||||
this._addMessage({ role, tool_call_id, content });
|
||||
continue;
|
||||
}
|
||||
else if (singleFunctionToCall && singleFunctionToCall !== name) {
|
||||
const content = `Invalid tool_call: ${JSON.stringify(name)}. ${JSON.stringify(singleFunctionToCall)} requested. Please try again`;
|
||||
this._addMessage({ role, tool_call_id, content });
|
||||
continue;
|
||||
}
|
||||
let parsed;
|
||||
try {
|
||||
parsed = isRunnableFunctionWithParse(fn) ? await fn.parse(args) : args;
|
||||
}
|
||||
catch (error) {
|
||||
const content = error instanceof Error ? error.message : String(error);
|
||||
this._addMessage({ role, tool_call_id, content });
|
||||
continue;
|
||||
}
|
||||
// @ts-expect-error it can't rule out `never` type.
|
||||
const rawContent = await fn.function(parsed, this);
|
||||
const content = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_stringifyFunctionCallResult).call(this, rawContent);
|
||||
this._addMessage({ role, tool_call_id, content });
|
||||
if (singleFunctionToCall) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
_AbstractChatCompletionRunner_instances = new WeakSet(), _AbstractChatCompletionRunner_getFinalContent = function _AbstractChatCompletionRunner_getFinalContent() {
|
||||
return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalMessage).call(this).content ?? null;
|
||||
}, _AbstractChatCompletionRunner_getFinalMessage = function _AbstractChatCompletionRunner_getFinalMessage() {
|
||||
let i = this.messages.length;
|
||||
while (i-- > 0) {
|
||||
const message = this.messages[i];
|
||||
if (isAssistantMessage(message)) {
|
||||
// TODO: support audio here
|
||||
const ret = {
|
||||
...message,
|
||||
content: message.content ?? null,
|
||||
refusal: message.refusal ?? null,
|
||||
};
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
throw new OpenAIError('stream ended without producing a ChatCompletionMessage with role=assistant');
|
||||
}, _AbstractChatCompletionRunner_getFinalFunctionToolCall = function _AbstractChatCompletionRunner_getFinalFunctionToolCall() {
|
||||
for (let i = this.messages.length - 1; i >= 0; i--) {
|
||||
const message = this.messages[i];
|
||||
if (isAssistantMessage(message) && message?.tool_calls?.length) {
|
||||
return message.tool_calls.at(-1)?.function;
|
||||
}
|
||||
}
|
||||
return;
|
||||
}, _AbstractChatCompletionRunner_getFinalFunctionToolCallResult = function _AbstractChatCompletionRunner_getFinalFunctionToolCallResult() {
|
||||
for (let i = this.messages.length - 1; i >= 0; i--) {
|
||||
const message = this.messages[i];
|
||||
if (isToolMessage(message) &&
|
||||
message.content != null &&
|
||||
typeof message.content === 'string' &&
|
||||
this.messages.some((x) => x.role === 'assistant' &&
|
||||
x.tool_calls?.some((y) => y.type === 'function' && y.id === message.tool_call_id))) {
|
||||
return message.content;
|
||||
}
|
||||
}
|
||||
return;
|
||||
}, _AbstractChatCompletionRunner_calculateTotalUsage = function _AbstractChatCompletionRunner_calculateTotalUsage() {
|
||||
const total = {
|
||||
completion_tokens: 0,
|
||||
prompt_tokens: 0,
|
||||
total_tokens: 0,
|
||||
};
|
||||
for (const { usage } of this._chatCompletions) {
|
||||
if (usage) {
|
||||
total.completion_tokens += usage.completion_tokens;
|
||||
total.prompt_tokens += usage.prompt_tokens;
|
||||
total.total_tokens += usage.total_tokens;
|
||||
}
|
||||
}
|
||||
return total;
|
||||
}, _AbstractChatCompletionRunner_validateParams = function _AbstractChatCompletionRunner_validateParams(params) {
|
||||
if (params.n != null && params.n > 1) {
|
||||
throw new OpenAIError('ChatCompletion convenience helpers only support n=1 at this time. To use n>1, please use chat.completions.create() directly.');
|
||||
}
|
||||
}, _AbstractChatCompletionRunner_stringifyFunctionCallResult = function _AbstractChatCompletionRunner_stringifyFunctionCallResult(rawContent) {
|
||||
return (typeof rawContent === 'string' ? rawContent
|
||||
: rawContent === undefined ? 'undefined'
|
||||
: JSON.stringify(rawContent));
|
||||
};
|
||||
//# sourceMappingURL=AbstractChatCompletionRunner.mjs.map
|
||||
Reference in New Issue
Block a user