Initial commit: KidsAI Explorer with complete functionality
- Complete KidsAI Explorer application - Multi-language support (English/German) - AI-powered educational guidance using OpenAI - Interactive chat interface for children - Proper placeholder translation fixes - Mobile-responsive design - Educational framework for critical thinking
This commit is contained in:
236
node_modules/openai/lib/responses/ResponseStream.mjs
generated
vendored
Normal file
236
node_modules/openai/lib/responses/ResponseStream.mjs
generated
vendored
Normal file
@@ -0,0 +1,236 @@
|
||||
var _ResponseStream_instances, _ResponseStream_params, _ResponseStream_currentResponseSnapshot, _ResponseStream_finalResponse, _ResponseStream_beginRequest, _ResponseStream_addEvent, _ResponseStream_endRequest, _ResponseStream_accumulateResponse;
|
||||
import { __classPrivateFieldGet, __classPrivateFieldSet } from "../../internal/tslib.mjs";
|
||||
import { APIUserAbortError, OpenAIError } from "../../error.mjs";
|
||||
import { EventStream } from "../EventStream.mjs";
|
||||
import { maybeParseResponse } from "../ResponsesParser.mjs";
|
||||
export class ResponseStream extends EventStream {
|
||||
constructor(params) {
|
||||
super();
|
||||
_ResponseStream_instances.add(this);
|
||||
_ResponseStream_params.set(this, void 0);
|
||||
_ResponseStream_currentResponseSnapshot.set(this, void 0);
|
||||
_ResponseStream_finalResponse.set(this, void 0);
|
||||
__classPrivateFieldSet(this, _ResponseStream_params, params, "f");
|
||||
}
|
||||
static createResponse(client, params, options) {
|
||||
const runner = new ResponseStream(params);
|
||||
runner._run(() => runner._createOrRetrieveResponse(client, params, {
|
||||
...options,
|
||||
headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'stream' },
|
||||
}));
|
||||
return runner;
|
||||
}
|
||||
async _createOrRetrieveResponse(client, params, options) {
|
||||
const signal = options?.signal;
|
||||
if (signal) {
|
||||
if (signal.aborted)
|
||||
this.controller.abort();
|
||||
signal.addEventListener('abort', () => this.controller.abort());
|
||||
}
|
||||
__classPrivateFieldGet(this, _ResponseStream_instances, "m", _ResponseStream_beginRequest).call(this);
|
||||
let stream;
|
||||
let starting_after = null;
|
||||
if ('response_id' in params) {
|
||||
stream = await client.responses.retrieve(params.response_id, { stream: true }, { ...options, signal: this.controller.signal, stream: true });
|
||||
starting_after = params.starting_after ?? null;
|
||||
}
|
||||
else {
|
||||
stream = await client.responses.create({ ...params, stream: true }, { ...options, signal: this.controller.signal });
|
||||
}
|
||||
this._connected();
|
||||
for await (const event of stream) {
|
||||
__classPrivateFieldGet(this, _ResponseStream_instances, "m", _ResponseStream_addEvent).call(this, event, starting_after);
|
||||
}
|
||||
if (stream.controller.signal?.aborted) {
|
||||
throw new APIUserAbortError();
|
||||
}
|
||||
return __classPrivateFieldGet(this, _ResponseStream_instances, "m", _ResponseStream_endRequest).call(this);
|
||||
}
|
||||
[(_ResponseStream_params = new WeakMap(), _ResponseStream_currentResponseSnapshot = new WeakMap(), _ResponseStream_finalResponse = new WeakMap(), _ResponseStream_instances = new WeakSet(), _ResponseStream_beginRequest = function _ResponseStream_beginRequest() {
|
||||
if (this.ended)
|
||||
return;
|
||||
__classPrivateFieldSet(this, _ResponseStream_currentResponseSnapshot, undefined, "f");
|
||||
}, _ResponseStream_addEvent = function _ResponseStream_addEvent(event, starting_after) {
|
||||
if (this.ended)
|
||||
return;
|
||||
const maybeEmit = (name, event) => {
|
||||
if (starting_after == null || event.sequence_number > starting_after) {
|
||||
this._emit(name, event);
|
||||
}
|
||||
};
|
||||
const response = __classPrivateFieldGet(this, _ResponseStream_instances, "m", _ResponseStream_accumulateResponse).call(this, event);
|
||||
maybeEmit('event', event);
|
||||
switch (event.type) {
|
||||
case 'response.output_text.delta': {
|
||||
const output = response.output[event.output_index];
|
||||
if (!output) {
|
||||
throw new OpenAIError(`missing output at index ${event.output_index}`);
|
||||
}
|
||||
if (output.type === 'message') {
|
||||
const content = output.content[event.content_index];
|
||||
if (!content) {
|
||||
throw new OpenAIError(`missing content at index ${event.content_index}`);
|
||||
}
|
||||
if (content.type !== 'output_text') {
|
||||
throw new OpenAIError(`expected content to be 'output_text', got ${content.type}`);
|
||||
}
|
||||
maybeEmit('response.output_text.delta', {
|
||||
...event,
|
||||
snapshot: content.text,
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'response.function_call_arguments.delta': {
|
||||
const output = response.output[event.output_index];
|
||||
if (!output) {
|
||||
throw new OpenAIError(`missing output at index ${event.output_index}`);
|
||||
}
|
||||
if (output.type === 'function_call') {
|
||||
maybeEmit('response.function_call_arguments.delta', {
|
||||
...event,
|
||||
snapshot: output.arguments,
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
maybeEmit(event.type, event);
|
||||
break;
|
||||
}
|
||||
}, _ResponseStream_endRequest = function _ResponseStream_endRequest() {
|
||||
if (this.ended) {
|
||||
throw new OpenAIError(`stream has ended, this shouldn't happen`);
|
||||
}
|
||||
const snapshot = __classPrivateFieldGet(this, _ResponseStream_currentResponseSnapshot, "f");
|
||||
if (!snapshot) {
|
||||
throw new OpenAIError(`request ended without sending any events`);
|
||||
}
|
||||
__classPrivateFieldSet(this, _ResponseStream_currentResponseSnapshot, undefined, "f");
|
||||
const parsedResponse = finalizeResponse(snapshot, __classPrivateFieldGet(this, _ResponseStream_params, "f"));
|
||||
__classPrivateFieldSet(this, _ResponseStream_finalResponse, parsedResponse, "f");
|
||||
return parsedResponse;
|
||||
}, _ResponseStream_accumulateResponse = function _ResponseStream_accumulateResponse(event) {
|
||||
let snapshot = __classPrivateFieldGet(this, _ResponseStream_currentResponseSnapshot, "f");
|
||||
if (!snapshot) {
|
||||
if (event.type !== 'response.created') {
|
||||
throw new OpenAIError(`When snapshot hasn't been set yet, expected 'response.created' event, got ${event.type}`);
|
||||
}
|
||||
snapshot = __classPrivateFieldSet(this, _ResponseStream_currentResponseSnapshot, event.response, "f");
|
||||
return snapshot;
|
||||
}
|
||||
switch (event.type) {
|
||||
case 'response.output_item.added': {
|
||||
snapshot.output.push(event.item);
|
||||
break;
|
||||
}
|
||||
case 'response.content_part.added': {
|
||||
const output = snapshot.output[event.output_index];
|
||||
if (!output) {
|
||||
throw new OpenAIError(`missing output at index ${event.output_index}`);
|
||||
}
|
||||
if (output.type === 'message') {
|
||||
output.content.push(event.part);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'response.output_text.delta': {
|
||||
const output = snapshot.output[event.output_index];
|
||||
if (!output) {
|
||||
throw new OpenAIError(`missing output at index ${event.output_index}`);
|
||||
}
|
||||
if (output.type === 'message') {
|
||||
const content = output.content[event.content_index];
|
||||
if (!content) {
|
||||
throw new OpenAIError(`missing content at index ${event.content_index}`);
|
||||
}
|
||||
if (content.type !== 'output_text') {
|
||||
throw new OpenAIError(`expected content to be 'output_text', got ${content.type}`);
|
||||
}
|
||||
content.text += event.delta;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'response.function_call_arguments.delta': {
|
||||
const output = snapshot.output[event.output_index];
|
||||
if (!output) {
|
||||
throw new OpenAIError(`missing output at index ${event.output_index}`);
|
||||
}
|
||||
if (output.type === 'function_call') {
|
||||
output.arguments += event.delta;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'response.completed': {
|
||||
__classPrivateFieldSet(this, _ResponseStream_currentResponseSnapshot, event.response, "f");
|
||||
break;
|
||||
}
|
||||
}
|
||||
return snapshot;
|
||||
}, Symbol.asyncIterator)]() {
|
||||
const pushQueue = [];
|
||||
const readQueue = [];
|
||||
let done = false;
|
||||
this.on('event', (event) => {
|
||||
const reader = readQueue.shift();
|
||||
if (reader) {
|
||||
reader.resolve(event);
|
||||
}
|
||||
else {
|
||||
pushQueue.push(event);
|
||||
}
|
||||
});
|
||||
this.on('end', () => {
|
||||
done = true;
|
||||
for (const reader of readQueue) {
|
||||
reader.resolve(undefined);
|
||||
}
|
||||
readQueue.length = 0;
|
||||
});
|
||||
this.on('abort', (err) => {
|
||||
done = true;
|
||||
for (const reader of readQueue) {
|
||||
reader.reject(err);
|
||||
}
|
||||
readQueue.length = 0;
|
||||
});
|
||||
this.on('error', (err) => {
|
||||
done = true;
|
||||
for (const reader of readQueue) {
|
||||
reader.reject(err);
|
||||
}
|
||||
readQueue.length = 0;
|
||||
});
|
||||
return {
|
||||
next: async () => {
|
||||
if (!pushQueue.length) {
|
||||
if (done) {
|
||||
return { value: undefined, done: true };
|
||||
}
|
||||
return new Promise((resolve, reject) => readQueue.push({ resolve, reject })).then((event) => (event ? { value: event, done: false } : { value: undefined, done: true }));
|
||||
}
|
||||
const event = pushQueue.shift();
|
||||
return { value: event, done: false };
|
||||
},
|
||||
return: async () => {
|
||||
this.abort();
|
||||
return { value: undefined, done: true };
|
||||
},
|
||||
};
|
||||
}
|
||||
/**
|
||||
* @returns a promise that resolves with the final Response, or rejects
|
||||
* if an error occurred or the stream ended prematurely without producing a REsponse.
|
||||
*/
|
||||
async finalResponse() {
|
||||
await this.done();
|
||||
const response = __classPrivateFieldGet(this, _ResponseStream_finalResponse, "f");
|
||||
if (!response)
|
||||
throw new OpenAIError('stream ended without producing a ChatCompletion');
|
||||
return response;
|
||||
}
|
||||
}
|
||||
function finalizeResponse(snapshot, params) {
|
||||
return maybeParseResponse(snapshot, params);
|
||||
}
|
||||
//# sourceMappingURL=ResponseStream.mjs.map
|
||||
Reference in New Issue
Block a user