Initial commit: KidsAI Explorer with complete functionality
- Complete KidsAI Explorer application - Multi-language support (English/German) - AI-powered educational guidance using OpenAI - Interactive chat interface for children - Proper placeholder translation fixes - Mobile-responsive design - Educational framework for critical thinking
This commit is contained in:
9
node_modules/openai/lib/responses/EventTypes.d.mts
generated
vendored
Normal file
9
node_modules/openai/lib/responses/EventTypes.d.mts
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
import { ResponseAudioDeltaEvent, ResponseAudioDoneEvent, ResponseAudioTranscriptDeltaEvent, ResponseAudioTranscriptDoneEvent, ResponseCodeInterpreterCallCodeDeltaEvent, ResponseCodeInterpreterCallCodeDoneEvent, ResponseCodeInterpreterCallCompletedEvent, ResponseCodeInterpreterCallInProgressEvent, ResponseCodeInterpreterCallInterpretingEvent, ResponseCompletedEvent, ResponseContentPartAddedEvent, ResponseContentPartDoneEvent, ResponseCreatedEvent, ResponseErrorEvent, ResponseFailedEvent, ResponseFileSearchCallCompletedEvent, ResponseFileSearchCallInProgressEvent, ResponseFileSearchCallSearchingEvent, ResponseFunctionCallArgumentsDeltaEvent as RawResponseFunctionCallArgumentsDeltaEvent, ResponseFunctionCallArgumentsDoneEvent, ResponseInProgressEvent, ResponseOutputItemAddedEvent, ResponseOutputItemDoneEvent, ResponseRefusalDeltaEvent, ResponseRefusalDoneEvent, ResponseTextDeltaEvent as RawResponseTextDeltaEvent, ResponseTextDoneEvent, ResponseIncompleteEvent, ResponseWebSearchCallCompletedEvent, ResponseWebSearchCallInProgressEvent, ResponseWebSearchCallSearchingEvent } from "../../resources/responses/responses.mjs";
|
||||
export type ResponseFunctionCallArgumentsDeltaEvent = RawResponseFunctionCallArgumentsDeltaEvent & {
|
||||
snapshot: string;
|
||||
};
|
||||
export type ResponseTextDeltaEvent = RawResponseTextDeltaEvent & {
|
||||
snapshot: string;
|
||||
};
|
||||
export type ParsedResponseStreamEvent = ResponseAudioDeltaEvent | ResponseAudioDoneEvent | ResponseAudioTranscriptDeltaEvent | ResponseAudioTranscriptDoneEvent | ResponseCodeInterpreterCallCodeDeltaEvent | ResponseCodeInterpreterCallCodeDoneEvent | ResponseCodeInterpreterCallCompletedEvent | ResponseCodeInterpreterCallInProgressEvent | ResponseCodeInterpreterCallInterpretingEvent | ResponseCompletedEvent | ResponseContentPartAddedEvent | ResponseContentPartDoneEvent | ResponseCreatedEvent | ResponseErrorEvent | ResponseFileSearchCallCompletedEvent | ResponseFileSearchCallInProgressEvent | ResponseFileSearchCallSearchingEvent | ResponseFunctionCallArgumentsDeltaEvent | ResponseFunctionCallArgumentsDoneEvent | ResponseInProgressEvent | ResponseFailedEvent | ResponseIncompleteEvent | ResponseOutputItemAddedEvent | ResponseOutputItemDoneEvent | ResponseRefusalDeltaEvent | ResponseRefusalDoneEvent | ResponseTextDeltaEvent | ResponseTextDoneEvent | ResponseWebSearchCallCompletedEvent | ResponseWebSearchCallInProgressEvent | ResponseWebSearchCallSearchingEvent;
|
||||
//# sourceMappingURL=EventTypes.d.mts.map
|
||||
1
node_modules/openai/lib/responses/EventTypes.d.mts.map
generated
vendored
Normal file
1
node_modules/openai/lib/responses/EventTypes.d.mts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"EventTypes.d.mts","sourceRoot":"","sources":["../../src/lib/responses/EventTypes.ts"],"names":[],"mappings":"OAAO,EACL,uBAAuB,EACvB,sBAAsB,EACtB,iCAAiC,EACjC,gCAAgC,EAChC,yCAAyC,EACzC,wCAAwC,EACxC,yCAAyC,EACzC,0CAA0C,EAC1C,4CAA4C,EAC5C,sBAAsB,EACtB,6BAA6B,EAC7B,4BAA4B,EAC5B,oBAAoB,EACpB,kBAAkB,EAClB,mBAAmB,EACnB,oCAAoC,EACpC,qCAAqC,EACrC,oCAAoC,EACpC,uCAAuC,IAAI,0CAA0C,EACrF,sCAAsC,EACtC,uBAAuB,EACvB,4BAA4B,EAC5B,2BAA2B,EAC3B,yBAAyB,EACzB,wBAAwB,EACxB,sBAAsB,IAAI,yBAAyB,EACnD,qBAAqB,EACrB,uBAAuB,EACvB,mCAAmC,EACnC,oCAAoC,EACpC,mCAAmC,EACpC;AAED,MAAM,MAAM,uCAAuC,GAAG,0CAA0C,GAAG;IACjG,QAAQ,EAAE,MAAM,CAAC;CAClB,CAAC;AAEF,MAAM,MAAM,sBAAsB,GAAG,yBAAyB,GAAG;IAC/D,QAAQ,EAAE,MAAM,CAAC;CAClB,CAAC;AAEF,MAAM,MAAM,yBAAyB,GACjC,uBAAuB,GACvB,sBAAsB,GACtB,iCAAiC,GACjC,gCAAgC,GAChC,yCAAyC,GACzC,wCAAwC,GACxC,yCAAyC,GACzC,0CAA0C,GAC1C,4CAA4C,GAC5C,sBAAsB,GACtB,6BAA6B,GAC7B,4BAA4B,GAC5B,oBAAoB,GACpB,kBAAkB,GAClB,oCAAoC,GACpC,qCAAqC,GACrC,oCAAoC,GACpC,uCAAuC,GACvC,sCAAsC,GACtC,uBAAuB,GACvB,mBAAmB,GACnB,uBAAuB,GACvB,4BAA4B,GAC5B,2BAA2B,GAC3B,yBAAyB,GACzB,wBAAwB,GACxB,sBAAsB,GACtB,qBAAqB,GACrB,mCAAmC,GACnC,oCAAoC,GACpC,mCAAmC,CAAC"}
|
||||
9
node_modules/openai/lib/responses/EventTypes.d.ts
generated
vendored
Normal file
9
node_modules/openai/lib/responses/EventTypes.d.ts
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
import { ResponseAudioDeltaEvent, ResponseAudioDoneEvent, ResponseAudioTranscriptDeltaEvent, ResponseAudioTranscriptDoneEvent, ResponseCodeInterpreterCallCodeDeltaEvent, ResponseCodeInterpreterCallCodeDoneEvent, ResponseCodeInterpreterCallCompletedEvent, ResponseCodeInterpreterCallInProgressEvent, ResponseCodeInterpreterCallInterpretingEvent, ResponseCompletedEvent, ResponseContentPartAddedEvent, ResponseContentPartDoneEvent, ResponseCreatedEvent, ResponseErrorEvent, ResponseFailedEvent, ResponseFileSearchCallCompletedEvent, ResponseFileSearchCallInProgressEvent, ResponseFileSearchCallSearchingEvent, ResponseFunctionCallArgumentsDeltaEvent as RawResponseFunctionCallArgumentsDeltaEvent, ResponseFunctionCallArgumentsDoneEvent, ResponseInProgressEvent, ResponseOutputItemAddedEvent, ResponseOutputItemDoneEvent, ResponseRefusalDeltaEvent, ResponseRefusalDoneEvent, ResponseTextDeltaEvent as RawResponseTextDeltaEvent, ResponseTextDoneEvent, ResponseIncompleteEvent, ResponseWebSearchCallCompletedEvent, ResponseWebSearchCallInProgressEvent, ResponseWebSearchCallSearchingEvent } from "../../resources/responses/responses.js";
|
||||
export type ResponseFunctionCallArgumentsDeltaEvent = RawResponseFunctionCallArgumentsDeltaEvent & {
|
||||
snapshot: string;
|
||||
};
|
||||
export type ResponseTextDeltaEvent = RawResponseTextDeltaEvent & {
|
||||
snapshot: string;
|
||||
};
|
||||
export type ParsedResponseStreamEvent = ResponseAudioDeltaEvent | ResponseAudioDoneEvent | ResponseAudioTranscriptDeltaEvent | ResponseAudioTranscriptDoneEvent | ResponseCodeInterpreterCallCodeDeltaEvent | ResponseCodeInterpreterCallCodeDoneEvent | ResponseCodeInterpreterCallCompletedEvent | ResponseCodeInterpreterCallInProgressEvent | ResponseCodeInterpreterCallInterpretingEvent | ResponseCompletedEvent | ResponseContentPartAddedEvent | ResponseContentPartDoneEvent | ResponseCreatedEvent | ResponseErrorEvent | ResponseFileSearchCallCompletedEvent | ResponseFileSearchCallInProgressEvent | ResponseFileSearchCallSearchingEvent | ResponseFunctionCallArgumentsDeltaEvent | ResponseFunctionCallArgumentsDoneEvent | ResponseInProgressEvent | ResponseFailedEvent | ResponseIncompleteEvent | ResponseOutputItemAddedEvent | ResponseOutputItemDoneEvent | ResponseRefusalDeltaEvent | ResponseRefusalDoneEvent | ResponseTextDeltaEvent | ResponseTextDoneEvent | ResponseWebSearchCallCompletedEvent | ResponseWebSearchCallInProgressEvent | ResponseWebSearchCallSearchingEvent;
|
||||
//# sourceMappingURL=EventTypes.d.ts.map
|
||||
1
node_modules/openai/lib/responses/EventTypes.d.ts.map
generated
vendored
Normal file
1
node_modules/openai/lib/responses/EventTypes.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"EventTypes.d.ts","sourceRoot":"","sources":["../../src/lib/responses/EventTypes.ts"],"names":[],"mappings":"OAAO,EACL,uBAAuB,EACvB,sBAAsB,EACtB,iCAAiC,EACjC,gCAAgC,EAChC,yCAAyC,EACzC,wCAAwC,EACxC,yCAAyC,EACzC,0CAA0C,EAC1C,4CAA4C,EAC5C,sBAAsB,EACtB,6BAA6B,EAC7B,4BAA4B,EAC5B,oBAAoB,EACpB,kBAAkB,EAClB,mBAAmB,EACnB,oCAAoC,EACpC,qCAAqC,EACrC,oCAAoC,EACpC,uCAAuC,IAAI,0CAA0C,EACrF,sCAAsC,EACtC,uBAAuB,EACvB,4BAA4B,EAC5B,2BAA2B,EAC3B,yBAAyB,EACzB,wBAAwB,EACxB,sBAAsB,IAAI,yBAAyB,EACnD,qBAAqB,EACrB,uBAAuB,EACvB,mCAAmC,EACnC,oCAAoC,EACpC,mCAAmC,EACpC;AAED,MAAM,MAAM,uCAAuC,GAAG,0CAA0C,GAAG;IACjG,QAAQ,EAAE,MAAM,CAAC;CAClB,CAAC;AAEF,MAAM,MAAM,sBAAsB,GAAG,yBAAyB,GAAG;IAC/D,QAAQ,EAAE,MAAM,CAAC;CAClB,CAAC;AAEF,MAAM,MAAM,yBAAyB,GACjC,uBAAuB,GACvB,sBAAsB,GACtB,iCAAiC,GACjC,gCAAgC,GAChC,yCAAyC,GACzC,wCAAwC,GACxC,yCAAyC,GACzC,0CAA0C,GAC1C,4CAA4C,GAC5C,sBAAsB,GACtB,6BAA6B,GAC7B,4BAA4B,GAC5B,oBAAoB,GACpB,kBAAkB,GAClB,oCAAoC,GACpC,qCAAqC,GACrC,oCAAoC,GACpC,uCAAuC,GACvC,sCAAsC,GACtC,uBAAuB,GACvB,mBAAmB,GACnB,uBAAuB,GACvB,4BAA4B,GAC5B,2BAA2B,GAC3B,yBAAyB,GACzB,wBAAwB,GACxB,sBAAsB,GACtB,qBAAqB,GACrB,mCAAmC,GACnC,oCAAoC,GACpC,mCAAmC,CAAC"}
|
||||
3
node_modules/openai/lib/responses/EventTypes.js
generated
vendored
Normal file
3
node_modules/openai/lib/responses/EventTypes.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
//# sourceMappingURL=EventTypes.js.map
|
||||
1
node_modules/openai/lib/responses/EventTypes.js.map
generated
vendored
Normal file
1
node_modules/openai/lib/responses/EventTypes.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"EventTypes.js","sourceRoot":"","sources":["../../src/lib/responses/EventTypes.ts"],"names":[],"mappings":""}
|
||||
2
node_modules/openai/lib/responses/EventTypes.mjs
generated
vendored
Normal file
2
node_modules/openai/lib/responses/EventTypes.mjs
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
export {};
|
||||
//# sourceMappingURL=EventTypes.mjs.map
|
||||
1
node_modules/openai/lib/responses/EventTypes.mjs.map
generated
vendored
Normal file
1
node_modules/openai/lib/responses/EventTypes.mjs.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"EventTypes.mjs","sourceRoot":"","sources":["../../src/lib/responses/EventTypes.ts"],"names":[],"mappings":""}
|
||||
59
node_modules/openai/lib/responses/ResponseStream.d.mts
generated
vendored
Normal file
59
node_modules/openai/lib/responses/ResponseStream.d.mts
generated
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
import { ResponseTextConfig, type ParsedResponse, type ResponseCreateParamsBase, type ResponseStreamEvent } from "../../resources/responses/responses.mjs";
|
||||
import { RequestOptions } from "../../internal/request-options.mjs";
|
||||
import OpenAI from "../../index.mjs";
|
||||
import { type BaseEvents, EventStream } from "../EventStream.mjs";
|
||||
import { type ResponseFunctionCallArgumentsDeltaEvent, type ResponseTextDeltaEvent } from "./EventTypes.mjs";
|
||||
import { ParseableToolsParams } from "../ResponsesParser.mjs";
|
||||
export type ResponseStreamParams = ResponseCreateAndStreamParams | ResponseStreamByIdParams;
|
||||
export type ResponseCreateAndStreamParams = Omit<ResponseCreateParamsBase, 'stream'> & {
|
||||
stream?: true;
|
||||
};
|
||||
export type ResponseStreamByIdParams = {
|
||||
/**
|
||||
* The ID of the response to stream.
|
||||
*/
|
||||
response_id: string;
|
||||
/**
|
||||
* If provided, the stream will start after the event with the given sequence number.
|
||||
*/
|
||||
starting_after?: number;
|
||||
/**
|
||||
* Configuration options for a text response from the model. Can be plain text or
|
||||
* structured JSON data. Learn more:
|
||||
*
|
||||
* - [Text inputs and outputs](https://platform.openai.com/docs/guides/text)
|
||||
* - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs)
|
||||
*/
|
||||
text?: ResponseTextConfig;
|
||||
/**
|
||||
* An array of tools the model may call while generating a response. When continuing a stream, provide
|
||||
* the same tools as the original request.
|
||||
*/
|
||||
tools?: ParseableToolsParams;
|
||||
};
|
||||
type ResponseEvents = BaseEvents & Omit<{
|
||||
[K in ResponseStreamEvent['type']]: (event: Extract<ResponseStreamEvent, {
|
||||
type: K;
|
||||
}>) => void;
|
||||
}, 'response.output_text.delta' | 'response.function_call_arguments.delta'> & {
|
||||
event: (event: ResponseStreamEvent) => void;
|
||||
'response.output_text.delta': (event: ResponseTextDeltaEvent) => void;
|
||||
'response.function_call_arguments.delta': (event: ResponseFunctionCallArgumentsDeltaEvent) => void;
|
||||
};
|
||||
export type ResponseStreamingParams = Omit<ResponseCreateParamsBase, 'stream'> & {
|
||||
stream?: true;
|
||||
};
|
||||
export declare class ResponseStream<ParsedT = null> extends EventStream<ResponseEvents> implements AsyncIterable<ResponseStreamEvent> {
|
||||
#private;
|
||||
constructor(params: ResponseStreamingParams | null);
|
||||
static createResponse<ParsedT>(client: OpenAI, params: ResponseStreamParams, options?: RequestOptions): ResponseStream<ParsedT>;
|
||||
protected _createOrRetrieveResponse(client: OpenAI, params: ResponseStreamParams, options?: RequestOptions): Promise<ParsedResponse<ParsedT>>;
|
||||
[Symbol.asyncIterator](this: ResponseStream<ParsedT>): AsyncIterator<ResponseStreamEvent>;
|
||||
/**
|
||||
* @returns a promise that resolves with the final Response, or rejects
|
||||
* if an error occurred or the stream ended prematurely without producing a REsponse.
|
||||
*/
|
||||
finalResponse(): Promise<ParsedResponse<ParsedT>>;
|
||||
}
|
||||
export {};
|
||||
//# sourceMappingURL=ResponseStream.d.mts.map
|
||||
1
node_modules/openai/lib/responses/ResponseStream.d.mts.map
generated
vendored
Normal file
1
node_modules/openai/lib/responses/ResponseStream.d.mts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"ResponseStream.d.mts","sourceRoot":"","sources":["../../src/lib/responses/ResponseStream.ts"],"names":[],"mappings":"OAAO,EACL,kBAAkB,EAClB,KAAK,cAAc,EAEnB,KAAK,wBAAwB,EAE7B,KAAK,mBAAmB,EACzB;OACM,EAAE,cAAc,EAAE;OAElB,MAAM;OACN,EAAE,KAAK,UAAU,EAAE,WAAW,EAAE;OAChC,EAAE,KAAK,uCAAuC,EAAE,KAAK,sBAAsB,EAAE;OAC7E,EAAsB,oBAAoB,EAAE;AAGnD,MAAM,MAAM,oBAAoB,GAAG,6BAA6B,GAAG,wBAAwB,CAAC;AAE5F,MAAM,MAAM,6BAA6B,GAAG,IAAI,CAAC,wBAAwB,EAAE,QAAQ,CAAC,GAAG;IACrF,MAAM,CAAC,EAAE,IAAI,CAAC;CACf,CAAC;AAEF,MAAM,MAAM,wBAAwB,GAAG;IACrC;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;;;;;OAMG;IACH,IAAI,CAAC,EAAE,kBAAkB,CAAC;IAE1B;;;OAGG;IACH,KAAK,CAAC,EAAE,oBAAoB,CAAC;CAC9B,CAAC;AAEF,KAAK,cAAc,GAAG,UAAU,GAC9B,IAAI,CACF;KACG,CAAC,IAAI,mBAAmB,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE,OAAO,CAAC,mBAAmB,EAAE;QAAE,IAAI,EAAE,CAAC,CAAA;KAAE,CAAC,KAAK,IAAI;CAC/F,EACD,4BAA4B,GAAG,wCAAwC,CACxE,GAAG;IACF,KAAK,EAAE,CAAC,KAAK,EAAE,mBAAmB,KAAK,IAAI,CAAC;IAC5C,4BAA4B,EAAE,CAAC,KAAK,EAAE,sBAAsB,KAAK,IAAI,CAAC;IACtE,wCAAwC,EAAE,CAAC,KAAK,EAAE,uCAAuC,KAAK,IAAI,CAAC;CACpG,CAAC;AAEJ,MAAM,MAAM,uBAAuB,GAAG,IAAI,CAAC,wBAAwB,EAAE,QAAQ,CAAC,GAAG;IAC/E,MAAM,CAAC,EAAE,IAAI,CAAC;CACf,CAAC;AAEF,qBAAa,cAAc,CAAC,OAAO,GAAG,IAAI,CACxC,SAAQ,WAAW,CAAC,cAAc,CAClC,YAAW,aAAa,CAAC,mBAAmB,CAAC;;gBAMjC,MAAM,EAAE,uBAAuB,GAAG,IAAI;IAKlD,MAAM,CAAC,cAAc,CAAC,OAAO,EAC3B,MAAM,EAAE,MAAM,EACd,MAAM,EAAE,oBAAoB,EAC5B,OAAO,CAAC,EAAE,cAAc,GACvB,cAAc,CAAC,OAAO,CAAC;cAoFV,yBAAyB,CACvC,MAAM,EAAE,MAAM,EACd,MAAM,EAAE,oBAAoB,EAC5B,OAAO,CAAC,EAAE,cAAc,GACvB,OAAO,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;IAiGnC,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,IAAI,EAAE,cAAc,CAAC,OAAO,CAAC,GAAG,aAAa,CAAC,mBAAmB,CAAC;IA6DzF;;;OAGG;IACG,aAAa,IAAI,OAAO,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;CAMxD"}
|
||||
59
node_modules/openai/lib/responses/ResponseStream.d.ts
generated
vendored
Normal file
59
node_modules/openai/lib/responses/ResponseStream.d.ts
generated
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
import { ResponseTextConfig, type ParsedResponse, type ResponseCreateParamsBase, type ResponseStreamEvent } from "../../resources/responses/responses.js";
|
||||
import { RequestOptions } from "../../internal/request-options.js";
|
||||
import OpenAI from "../../index.js";
|
||||
import { type BaseEvents, EventStream } from "../EventStream.js";
|
||||
import { type ResponseFunctionCallArgumentsDeltaEvent, type ResponseTextDeltaEvent } from "./EventTypes.js";
|
||||
import { ParseableToolsParams } from "../ResponsesParser.js";
|
||||
export type ResponseStreamParams = ResponseCreateAndStreamParams | ResponseStreamByIdParams;
|
||||
export type ResponseCreateAndStreamParams = Omit<ResponseCreateParamsBase, 'stream'> & {
|
||||
stream?: true;
|
||||
};
|
||||
export type ResponseStreamByIdParams = {
|
||||
/**
|
||||
* The ID of the response to stream.
|
||||
*/
|
||||
response_id: string;
|
||||
/**
|
||||
* If provided, the stream will start after the event with the given sequence number.
|
||||
*/
|
||||
starting_after?: number;
|
||||
/**
|
||||
* Configuration options for a text response from the model. Can be plain text or
|
||||
* structured JSON data. Learn more:
|
||||
*
|
||||
* - [Text inputs and outputs](https://platform.openai.com/docs/guides/text)
|
||||
* - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs)
|
||||
*/
|
||||
text?: ResponseTextConfig;
|
||||
/**
|
||||
* An array of tools the model may call while generating a response. When continuing a stream, provide
|
||||
* the same tools as the original request.
|
||||
*/
|
||||
tools?: ParseableToolsParams;
|
||||
};
|
||||
type ResponseEvents = BaseEvents & Omit<{
|
||||
[K in ResponseStreamEvent['type']]: (event: Extract<ResponseStreamEvent, {
|
||||
type: K;
|
||||
}>) => void;
|
||||
}, 'response.output_text.delta' | 'response.function_call_arguments.delta'> & {
|
||||
event: (event: ResponseStreamEvent) => void;
|
||||
'response.output_text.delta': (event: ResponseTextDeltaEvent) => void;
|
||||
'response.function_call_arguments.delta': (event: ResponseFunctionCallArgumentsDeltaEvent) => void;
|
||||
};
|
||||
export type ResponseStreamingParams = Omit<ResponseCreateParamsBase, 'stream'> & {
|
||||
stream?: true;
|
||||
};
|
||||
export declare class ResponseStream<ParsedT = null> extends EventStream<ResponseEvents> implements AsyncIterable<ResponseStreamEvent> {
|
||||
#private;
|
||||
constructor(params: ResponseStreamingParams | null);
|
||||
static createResponse<ParsedT>(client: OpenAI, params: ResponseStreamParams, options?: RequestOptions): ResponseStream<ParsedT>;
|
||||
protected _createOrRetrieveResponse(client: OpenAI, params: ResponseStreamParams, options?: RequestOptions): Promise<ParsedResponse<ParsedT>>;
|
||||
[Symbol.asyncIterator](this: ResponseStream<ParsedT>): AsyncIterator<ResponseStreamEvent>;
|
||||
/**
|
||||
* @returns a promise that resolves with the final Response, or rejects
|
||||
* if an error occurred or the stream ended prematurely without producing a REsponse.
|
||||
*/
|
||||
finalResponse(): Promise<ParsedResponse<ParsedT>>;
|
||||
}
|
||||
export {};
|
||||
//# sourceMappingURL=ResponseStream.d.ts.map
|
||||
1
node_modules/openai/lib/responses/ResponseStream.d.ts.map
generated
vendored
Normal file
1
node_modules/openai/lib/responses/ResponseStream.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"ResponseStream.d.ts","sourceRoot":"","sources":["../../src/lib/responses/ResponseStream.ts"],"names":[],"mappings":"OAAO,EACL,kBAAkB,EAClB,KAAK,cAAc,EAEnB,KAAK,wBAAwB,EAE7B,KAAK,mBAAmB,EACzB;OACM,EAAE,cAAc,EAAE;OAElB,MAAM;OACN,EAAE,KAAK,UAAU,EAAE,WAAW,EAAE;OAChC,EAAE,KAAK,uCAAuC,EAAE,KAAK,sBAAsB,EAAE;OAC7E,EAAsB,oBAAoB,EAAE;AAGnD,MAAM,MAAM,oBAAoB,GAAG,6BAA6B,GAAG,wBAAwB,CAAC;AAE5F,MAAM,MAAM,6BAA6B,GAAG,IAAI,CAAC,wBAAwB,EAAE,QAAQ,CAAC,GAAG;IACrF,MAAM,CAAC,EAAE,IAAI,CAAC;CACf,CAAC;AAEF,MAAM,MAAM,wBAAwB,GAAG;IACrC;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;;;;;OAMG;IACH,IAAI,CAAC,EAAE,kBAAkB,CAAC;IAE1B;;;OAGG;IACH,KAAK,CAAC,EAAE,oBAAoB,CAAC;CAC9B,CAAC;AAEF,KAAK,cAAc,GAAG,UAAU,GAC9B,IAAI,CACF;KACG,CAAC,IAAI,mBAAmB,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE,OAAO,CAAC,mBAAmB,EAAE;QAAE,IAAI,EAAE,CAAC,CAAA;KAAE,CAAC,KAAK,IAAI;CAC/F,EACD,4BAA4B,GAAG,wCAAwC,CACxE,GAAG;IACF,KAAK,EAAE,CAAC,KAAK,EAAE,mBAAmB,KAAK,IAAI,CAAC;IAC5C,4BAA4B,EAAE,CAAC,KAAK,EAAE,sBAAsB,KAAK,IAAI,CAAC;IACtE,wCAAwC,EAAE,CAAC,KAAK,EAAE,uCAAuC,KAAK,IAAI,CAAC;CACpG,CAAC;AAEJ,MAAM,MAAM,uBAAuB,GAAG,IAAI,CAAC,wBAAwB,EAAE,QAAQ,CAAC,GAAG;IAC/E,MAAM,CAAC,EAAE,IAAI,CAAC;CACf,CAAC;AAEF,qBAAa,cAAc,CAAC,OAAO,GAAG,IAAI,CACxC,SAAQ,WAAW,CAAC,cAAc,CAClC,YAAW,aAAa,CAAC,mBAAmB,CAAC;;gBAMjC,MAAM,EAAE,uBAAuB,GAAG,IAAI;IAKlD,MAAM,CAAC,cAAc,CAAC,OAAO,EAC3B,MAAM,EAAE,MAAM,EACd,MAAM,EAAE,oBAAoB,EAC5B,OAAO,CAAC,EAAE,cAAc,GACvB,cAAc,CAAC,OAAO,CAAC;cAoFV,yBAAyB,CACvC,MAAM,EAAE,MAAM,EACd,MAAM,EAAE,oBAAoB,EAC5B,OAAO,CAAC,EAAE,cAAc,GACvB,OAAO,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;IAiGnC,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,IAAI,EAAE,cAAc,CAAC,OAAO,CAAC,GAAG,aAAa,CAAC,mBAAmB,CAAC;IA6DzF;;;OAGG;IACG,aAAa,IAAI,OAAO,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;CAMxD"}
|
||||
240
node_modules/openai/lib/responses/ResponseStream.js
generated
vendored
Normal file
240
node_modules/openai/lib/responses/ResponseStream.js
generated
vendored
Normal file
@@ -0,0 +1,240 @@
|
||||
"use strict";
|
||||
var _ResponseStream_instances, _ResponseStream_params, _ResponseStream_currentResponseSnapshot, _ResponseStream_finalResponse, _ResponseStream_beginRequest, _ResponseStream_addEvent, _ResponseStream_endRequest, _ResponseStream_accumulateResponse;
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ResponseStream = void 0;
|
||||
const tslib_1 = require("../../internal/tslib.js");
|
||||
const error_1 = require("../../error.js");
|
||||
const EventStream_1 = require("../EventStream.js");
|
||||
const ResponsesParser_1 = require("../ResponsesParser.js");
|
||||
class ResponseStream extends EventStream_1.EventStream {
|
||||
constructor(params) {
|
||||
super();
|
||||
_ResponseStream_instances.add(this);
|
||||
_ResponseStream_params.set(this, void 0);
|
||||
_ResponseStream_currentResponseSnapshot.set(this, void 0);
|
||||
_ResponseStream_finalResponse.set(this, void 0);
|
||||
tslib_1.__classPrivateFieldSet(this, _ResponseStream_params, params, "f");
|
||||
}
|
||||
static createResponse(client, params, options) {
|
||||
const runner = new ResponseStream(params);
|
||||
runner._run(() => runner._createOrRetrieveResponse(client, params, {
|
||||
...options,
|
||||
headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'stream' },
|
||||
}));
|
||||
return runner;
|
||||
}
|
||||
async _createOrRetrieveResponse(client, params, options) {
|
||||
const signal = options?.signal;
|
||||
if (signal) {
|
||||
if (signal.aborted)
|
||||
this.controller.abort();
|
||||
signal.addEventListener('abort', () => this.controller.abort());
|
||||
}
|
||||
tslib_1.__classPrivateFieldGet(this, _ResponseStream_instances, "m", _ResponseStream_beginRequest).call(this);
|
||||
let stream;
|
||||
let starting_after = null;
|
||||
if ('response_id' in params) {
|
||||
stream = await client.responses.retrieve(params.response_id, { stream: true }, { ...options, signal: this.controller.signal, stream: true });
|
||||
starting_after = params.starting_after ?? null;
|
||||
}
|
||||
else {
|
||||
stream = await client.responses.create({ ...params, stream: true }, { ...options, signal: this.controller.signal });
|
||||
}
|
||||
this._connected();
|
||||
for await (const event of stream) {
|
||||
tslib_1.__classPrivateFieldGet(this, _ResponseStream_instances, "m", _ResponseStream_addEvent).call(this, event, starting_after);
|
||||
}
|
||||
if (stream.controller.signal?.aborted) {
|
||||
throw new error_1.APIUserAbortError();
|
||||
}
|
||||
return tslib_1.__classPrivateFieldGet(this, _ResponseStream_instances, "m", _ResponseStream_endRequest).call(this);
|
||||
}
|
||||
[(_ResponseStream_params = new WeakMap(), _ResponseStream_currentResponseSnapshot = new WeakMap(), _ResponseStream_finalResponse = new WeakMap(), _ResponseStream_instances = new WeakSet(), _ResponseStream_beginRequest = function _ResponseStream_beginRequest() {
|
||||
if (this.ended)
|
||||
return;
|
||||
tslib_1.__classPrivateFieldSet(this, _ResponseStream_currentResponseSnapshot, undefined, "f");
|
||||
}, _ResponseStream_addEvent = function _ResponseStream_addEvent(event, starting_after) {
|
||||
if (this.ended)
|
||||
return;
|
||||
const maybeEmit = (name, event) => {
|
||||
if (starting_after == null || event.sequence_number > starting_after) {
|
||||
this._emit(name, event);
|
||||
}
|
||||
};
|
||||
const response = tslib_1.__classPrivateFieldGet(this, _ResponseStream_instances, "m", _ResponseStream_accumulateResponse).call(this, event);
|
||||
maybeEmit('event', event);
|
||||
switch (event.type) {
|
||||
case 'response.output_text.delta': {
|
||||
const output = response.output[event.output_index];
|
||||
if (!output) {
|
||||
throw new error_1.OpenAIError(`missing output at index ${event.output_index}`);
|
||||
}
|
||||
if (output.type === 'message') {
|
||||
const content = output.content[event.content_index];
|
||||
if (!content) {
|
||||
throw new error_1.OpenAIError(`missing content at index ${event.content_index}`);
|
||||
}
|
||||
if (content.type !== 'output_text') {
|
||||
throw new error_1.OpenAIError(`expected content to be 'output_text', got ${content.type}`);
|
||||
}
|
||||
maybeEmit('response.output_text.delta', {
|
||||
...event,
|
||||
snapshot: content.text,
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'response.function_call_arguments.delta': {
|
||||
const output = response.output[event.output_index];
|
||||
if (!output) {
|
||||
throw new error_1.OpenAIError(`missing output at index ${event.output_index}`);
|
||||
}
|
||||
if (output.type === 'function_call') {
|
||||
maybeEmit('response.function_call_arguments.delta', {
|
||||
...event,
|
||||
snapshot: output.arguments,
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
maybeEmit(event.type, event);
|
||||
break;
|
||||
}
|
||||
}, _ResponseStream_endRequest = function _ResponseStream_endRequest() {
|
||||
if (this.ended) {
|
||||
throw new error_1.OpenAIError(`stream has ended, this shouldn't happen`);
|
||||
}
|
||||
const snapshot = tslib_1.__classPrivateFieldGet(this, _ResponseStream_currentResponseSnapshot, "f");
|
||||
if (!snapshot) {
|
||||
throw new error_1.OpenAIError(`request ended without sending any events`);
|
||||
}
|
||||
tslib_1.__classPrivateFieldSet(this, _ResponseStream_currentResponseSnapshot, undefined, "f");
|
||||
const parsedResponse = finalizeResponse(snapshot, tslib_1.__classPrivateFieldGet(this, _ResponseStream_params, "f"));
|
||||
tslib_1.__classPrivateFieldSet(this, _ResponseStream_finalResponse, parsedResponse, "f");
|
||||
return parsedResponse;
|
||||
}, _ResponseStream_accumulateResponse = function _ResponseStream_accumulateResponse(event) {
|
||||
let snapshot = tslib_1.__classPrivateFieldGet(this, _ResponseStream_currentResponseSnapshot, "f");
|
||||
if (!snapshot) {
|
||||
if (event.type !== 'response.created') {
|
||||
throw new error_1.OpenAIError(`When snapshot hasn't been set yet, expected 'response.created' event, got ${event.type}`);
|
||||
}
|
||||
snapshot = tslib_1.__classPrivateFieldSet(this, _ResponseStream_currentResponseSnapshot, event.response, "f");
|
||||
return snapshot;
|
||||
}
|
||||
switch (event.type) {
|
||||
case 'response.output_item.added': {
|
||||
snapshot.output.push(event.item);
|
||||
break;
|
||||
}
|
||||
case 'response.content_part.added': {
|
||||
const output = snapshot.output[event.output_index];
|
||||
if (!output) {
|
||||
throw new error_1.OpenAIError(`missing output at index ${event.output_index}`);
|
||||
}
|
||||
if (output.type === 'message') {
|
||||
output.content.push(event.part);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'response.output_text.delta': {
|
||||
const output = snapshot.output[event.output_index];
|
||||
if (!output) {
|
||||
throw new error_1.OpenAIError(`missing output at index ${event.output_index}`);
|
||||
}
|
||||
if (output.type === 'message') {
|
||||
const content = output.content[event.content_index];
|
||||
if (!content) {
|
||||
throw new error_1.OpenAIError(`missing content at index ${event.content_index}`);
|
||||
}
|
||||
if (content.type !== 'output_text') {
|
||||
throw new error_1.OpenAIError(`expected content to be 'output_text', got ${content.type}`);
|
||||
}
|
||||
content.text += event.delta;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'response.function_call_arguments.delta': {
|
||||
const output = snapshot.output[event.output_index];
|
||||
if (!output) {
|
||||
throw new error_1.OpenAIError(`missing output at index ${event.output_index}`);
|
||||
}
|
||||
if (output.type === 'function_call') {
|
||||
output.arguments += event.delta;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'response.completed': {
|
||||
tslib_1.__classPrivateFieldSet(this, _ResponseStream_currentResponseSnapshot, event.response, "f");
|
||||
break;
|
||||
}
|
||||
}
|
||||
return snapshot;
|
||||
}, Symbol.asyncIterator)]() {
|
||||
const pushQueue = [];
|
||||
const readQueue = [];
|
||||
let done = false;
|
||||
this.on('event', (event) => {
|
||||
const reader = readQueue.shift();
|
||||
if (reader) {
|
||||
reader.resolve(event);
|
||||
}
|
||||
else {
|
||||
pushQueue.push(event);
|
||||
}
|
||||
});
|
||||
this.on('end', () => {
|
||||
done = true;
|
||||
for (const reader of readQueue) {
|
||||
reader.resolve(undefined);
|
||||
}
|
||||
readQueue.length = 0;
|
||||
});
|
||||
this.on('abort', (err) => {
|
||||
done = true;
|
||||
for (const reader of readQueue) {
|
||||
reader.reject(err);
|
||||
}
|
||||
readQueue.length = 0;
|
||||
});
|
||||
this.on('error', (err) => {
|
||||
done = true;
|
||||
for (const reader of readQueue) {
|
||||
reader.reject(err);
|
||||
}
|
||||
readQueue.length = 0;
|
||||
});
|
||||
return {
|
||||
next: async () => {
|
||||
if (!pushQueue.length) {
|
||||
if (done) {
|
||||
return { value: undefined, done: true };
|
||||
}
|
||||
return new Promise((resolve, reject) => readQueue.push({ resolve, reject })).then((event) => (event ? { value: event, done: false } : { value: undefined, done: true }));
|
||||
}
|
||||
const event = pushQueue.shift();
|
||||
return { value: event, done: false };
|
||||
},
|
||||
return: async () => {
|
||||
this.abort();
|
||||
return { value: undefined, done: true };
|
||||
},
|
||||
};
|
||||
}
|
||||
/**
|
||||
* @returns a promise that resolves with the final Response, or rejects
|
||||
* if an error occurred or the stream ended prematurely without producing a REsponse.
|
||||
*/
|
||||
async finalResponse() {
|
||||
await this.done();
|
||||
const response = tslib_1.__classPrivateFieldGet(this, _ResponseStream_finalResponse, "f");
|
||||
if (!response)
|
||||
throw new error_1.OpenAIError('stream ended without producing a ChatCompletion');
|
||||
return response;
|
||||
}
|
||||
}
|
||||
exports.ResponseStream = ResponseStream;
|
||||
function finalizeResponse(snapshot, params) {
|
||||
return (0, ResponsesParser_1.maybeParseResponse)(snapshot, params);
|
||||
}
|
||||
//# sourceMappingURL=ResponseStream.js.map
|
||||
1
node_modules/openai/lib/responses/ResponseStream.js.map
generated
vendored
Normal file
1
node_modules/openai/lib/responses/ResponseStream.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
236
node_modules/openai/lib/responses/ResponseStream.mjs
generated
vendored
Normal file
236
node_modules/openai/lib/responses/ResponseStream.mjs
generated
vendored
Normal file
@@ -0,0 +1,236 @@
|
||||
var _ResponseStream_instances, _ResponseStream_params, _ResponseStream_currentResponseSnapshot, _ResponseStream_finalResponse, _ResponseStream_beginRequest, _ResponseStream_addEvent, _ResponseStream_endRequest, _ResponseStream_accumulateResponse;
|
||||
import { __classPrivateFieldGet, __classPrivateFieldSet } from "../../internal/tslib.mjs";
|
||||
import { APIUserAbortError, OpenAIError } from "../../error.mjs";
|
||||
import { EventStream } from "../EventStream.mjs";
|
||||
import { maybeParseResponse } from "../ResponsesParser.mjs";
|
||||
export class ResponseStream extends EventStream {
|
||||
constructor(params) {
|
||||
super();
|
||||
_ResponseStream_instances.add(this);
|
||||
_ResponseStream_params.set(this, void 0);
|
||||
_ResponseStream_currentResponseSnapshot.set(this, void 0);
|
||||
_ResponseStream_finalResponse.set(this, void 0);
|
||||
__classPrivateFieldSet(this, _ResponseStream_params, params, "f");
|
||||
}
|
||||
static createResponse(client, params, options) {
|
||||
const runner = new ResponseStream(params);
|
||||
runner._run(() => runner._createOrRetrieveResponse(client, params, {
|
||||
...options,
|
||||
headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'stream' },
|
||||
}));
|
||||
return runner;
|
||||
}
|
||||
async _createOrRetrieveResponse(client, params, options) {
|
||||
const signal = options?.signal;
|
||||
if (signal) {
|
||||
if (signal.aborted)
|
||||
this.controller.abort();
|
||||
signal.addEventListener('abort', () => this.controller.abort());
|
||||
}
|
||||
__classPrivateFieldGet(this, _ResponseStream_instances, "m", _ResponseStream_beginRequest).call(this);
|
||||
let stream;
|
||||
let starting_after = null;
|
||||
if ('response_id' in params) {
|
||||
stream = await client.responses.retrieve(params.response_id, { stream: true }, { ...options, signal: this.controller.signal, stream: true });
|
||||
starting_after = params.starting_after ?? null;
|
||||
}
|
||||
else {
|
||||
stream = await client.responses.create({ ...params, stream: true }, { ...options, signal: this.controller.signal });
|
||||
}
|
||||
this._connected();
|
||||
for await (const event of stream) {
|
||||
__classPrivateFieldGet(this, _ResponseStream_instances, "m", _ResponseStream_addEvent).call(this, event, starting_after);
|
||||
}
|
||||
if (stream.controller.signal?.aborted) {
|
||||
throw new APIUserAbortError();
|
||||
}
|
||||
return __classPrivateFieldGet(this, _ResponseStream_instances, "m", _ResponseStream_endRequest).call(this);
|
||||
}
|
||||
[(_ResponseStream_params = new WeakMap(), _ResponseStream_currentResponseSnapshot = new WeakMap(), _ResponseStream_finalResponse = new WeakMap(), _ResponseStream_instances = new WeakSet(), _ResponseStream_beginRequest = function _ResponseStream_beginRequest() {
|
||||
if (this.ended)
|
||||
return;
|
||||
__classPrivateFieldSet(this, _ResponseStream_currentResponseSnapshot, undefined, "f");
|
||||
}, _ResponseStream_addEvent = function _ResponseStream_addEvent(event, starting_after) {
|
||||
if (this.ended)
|
||||
return;
|
||||
const maybeEmit = (name, event) => {
|
||||
if (starting_after == null || event.sequence_number > starting_after) {
|
||||
this._emit(name, event);
|
||||
}
|
||||
};
|
||||
const response = __classPrivateFieldGet(this, _ResponseStream_instances, "m", _ResponseStream_accumulateResponse).call(this, event);
|
||||
maybeEmit('event', event);
|
||||
switch (event.type) {
|
||||
case 'response.output_text.delta': {
|
||||
const output = response.output[event.output_index];
|
||||
if (!output) {
|
||||
throw new OpenAIError(`missing output at index ${event.output_index}`);
|
||||
}
|
||||
if (output.type === 'message') {
|
||||
const content = output.content[event.content_index];
|
||||
if (!content) {
|
||||
throw new OpenAIError(`missing content at index ${event.content_index}`);
|
||||
}
|
||||
if (content.type !== 'output_text') {
|
||||
throw new OpenAIError(`expected content to be 'output_text', got ${content.type}`);
|
||||
}
|
||||
maybeEmit('response.output_text.delta', {
|
||||
...event,
|
||||
snapshot: content.text,
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'response.function_call_arguments.delta': {
|
||||
const output = response.output[event.output_index];
|
||||
if (!output) {
|
||||
throw new OpenAIError(`missing output at index ${event.output_index}`);
|
||||
}
|
||||
if (output.type === 'function_call') {
|
||||
maybeEmit('response.function_call_arguments.delta', {
|
||||
...event,
|
||||
snapshot: output.arguments,
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
maybeEmit(event.type, event);
|
||||
break;
|
||||
}
|
||||
}, _ResponseStream_endRequest = function _ResponseStream_endRequest() {
|
||||
if (this.ended) {
|
||||
throw new OpenAIError(`stream has ended, this shouldn't happen`);
|
||||
}
|
||||
const snapshot = __classPrivateFieldGet(this, _ResponseStream_currentResponseSnapshot, "f");
|
||||
if (!snapshot) {
|
||||
throw new OpenAIError(`request ended without sending any events`);
|
||||
}
|
||||
__classPrivateFieldSet(this, _ResponseStream_currentResponseSnapshot, undefined, "f");
|
||||
const parsedResponse = finalizeResponse(snapshot, __classPrivateFieldGet(this, _ResponseStream_params, "f"));
|
||||
__classPrivateFieldSet(this, _ResponseStream_finalResponse, parsedResponse, "f");
|
||||
return parsedResponse;
|
||||
}, _ResponseStream_accumulateResponse = function _ResponseStream_accumulateResponse(event) {
|
||||
let snapshot = __classPrivateFieldGet(this, _ResponseStream_currentResponseSnapshot, "f");
|
||||
if (!snapshot) {
|
||||
if (event.type !== 'response.created') {
|
||||
throw new OpenAIError(`When snapshot hasn't been set yet, expected 'response.created' event, got ${event.type}`);
|
||||
}
|
||||
snapshot = __classPrivateFieldSet(this, _ResponseStream_currentResponseSnapshot, event.response, "f");
|
||||
return snapshot;
|
||||
}
|
||||
switch (event.type) {
|
||||
case 'response.output_item.added': {
|
||||
snapshot.output.push(event.item);
|
||||
break;
|
||||
}
|
||||
case 'response.content_part.added': {
|
||||
const output = snapshot.output[event.output_index];
|
||||
if (!output) {
|
||||
throw new OpenAIError(`missing output at index ${event.output_index}`);
|
||||
}
|
||||
if (output.type === 'message') {
|
||||
output.content.push(event.part);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'response.output_text.delta': {
|
||||
const output = snapshot.output[event.output_index];
|
||||
if (!output) {
|
||||
throw new OpenAIError(`missing output at index ${event.output_index}`);
|
||||
}
|
||||
if (output.type === 'message') {
|
||||
const content = output.content[event.content_index];
|
||||
if (!content) {
|
||||
throw new OpenAIError(`missing content at index ${event.content_index}`);
|
||||
}
|
||||
if (content.type !== 'output_text') {
|
||||
throw new OpenAIError(`expected content to be 'output_text', got ${content.type}`);
|
||||
}
|
||||
content.text += event.delta;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'response.function_call_arguments.delta': {
|
||||
const output = snapshot.output[event.output_index];
|
||||
if (!output) {
|
||||
throw new OpenAIError(`missing output at index ${event.output_index}`);
|
||||
}
|
||||
if (output.type === 'function_call') {
|
||||
output.arguments += event.delta;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'response.completed': {
|
||||
__classPrivateFieldSet(this, _ResponseStream_currentResponseSnapshot, event.response, "f");
|
||||
break;
|
||||
}
|
||||
}
|
||||
return snapshot;
|
||||
}, Symbol.asyncIterator)]() {
|
||||
const pushQueue = [];
|
||||
const readQueue = [];
|
||||
let done = false;
|
||||
this.on('event', (event) => {
|
||||
const reader = readQueue.shift();
|
||||
if (reader) {
|
||||
reader.resolve(event);
|
||||
}
|
||||
else {
|
||||
pushQueue.push(event);
|
||||
}
|
||||
});
|
||||
this.on('end', () => {
|
||||
done = true;
|
||||
for (const reader of readQueue) {
|
||||
reader.resolve(undefined);
|
||||
}
|
||||
readQueue.length = 0;
|
||||
});
|
||||
this.on('abort', (err) => {
|
||||
done = true;
|
||||
for (const reader of readQueue) {
|
||||
reader.reject(err);
|
||||
}
|
||||
readQueue.length = 0;
|
||||
});
|
||||
this.on('error', (err) => {
|
||||
done = true;
|
||||
for (const reader of readQueue) {
|
||||
reader.reject(err);
|
||||
}
|
||||
readQueue.length = 0;
|
||||
});
|
||||
return {
|
||||
next: async () => {
|
||||
if (!pushQueue.length) {
|
||||
if (done) {
|
||||
return { value: undefined, done: true };
|
||||
}
|
||||
return new Promise((resolve, reject) => readQueue.push({ resolve, reject })).then((event) => (event ? { value: event, done: false } : { value: undefined, done: true }));
|
||||
}
|
||||
const event = pushQueue.shift();
|
||||
return { value: event, done: false };
|
||||
},
|
||||
return: async () => {
|
||||
this.abort();
|
||||
return { value: undefined, done: true };
|
||||
},
|
||||
};
|
||||
}
|
||||
/**
|
||||
* @returns a promise that resolves with the final Response, or rejects
|
||||
* if an error occurred or the stream ended prematurely without producing a REsponse.
|
||||
*/
|
||||
async finalResponse() {
|
||||
await this.done();
|
||||
const response = __classPrivateFieldGet(this, _ResponseStream_finalResponse, "f");
|
||||
if (!response)
|
||||
throw new OpenAIError('stream ended without producing a ChatCompletion');
|
||||
return response;
|
||||
}
|
||||
}
|
||||
function finalizeResponse(snapshot, params) {
|
||||
return maybeParseResponse(snapshot, params);
|
||||
}
|
||||
//# sourceMappingURL=ResponseStream.mjs.map
|
||||
1
node_modules/openai/lib/responses/ResponseStream.mjs.map
generated
vendored
Normal file
1
node_modules/openai/lib/responses/ResponseStream.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user