Fix chat interface - restore continuous conversation flow

🎯 Major improvements to MissionControl component:
- Always keep input field visible and functional after AI responses
- Auto-clear input after submitting questions for better UX
- Add dynamic visual indicators (first question vs follow-up)
- Improve response layout with clear separation and hints
- Enable proper chat-like experience for continuous learning

🌟 Additional enhancements:
- Better language-specific messaging throughout interface
- Clearer visual hierarchy between input and response areas
- Intuitive flow that guides users to ask follow-up questions
- Maintains responsive design and accessibility

🔧 Technical changes:
- Enhanced MissionControl state management
- Improved component layout and styling
- Better TypeScript integration across components
- Updated tsconfig for stricter type checking
This commit is contained in:
rwiegand
2025-07-14 12:39:05 +02:00
parent b31492a354
commit f893530471
1798 changed files with 25329 additions and 92638 deletions

View File

@@ -1,40 +1,37 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
import { APIResource } from "../../core/resource.mjs";
import { CursorPage } from "../../core/pagination.mjs";
import { buildHeaders } from "../../internal/headers.mjs";
import { sleep } from "../../internal/utils/sleep.mjs";
import { APIResource } from "../../resource.mjs";
import { isRequestOptions } from "../../core.mjs";
import { sleep } from "../../core.mjs";
import { allSettledWithThrow } from "../../lib/Util.mjs";
import { path } from "../../internal/utils/path.mjs";
import { VectorStoreFilesPage } from "./files.mjs";
export class FileBatches extends APIResource {
/**
* Create a vector store file batch.
*/
create(vectorStoreID, body, options) {
return this._client.post(path `/vector_stores/${vectorStoreID}/file_batches`, {
create(vectorStoreId, body, options) {
return this._client.post(`/vector_stores/${vectorStoreId}/file_batches`, {
body,
...options,
headers: buildHeaders([{ 'OpenAI-Beta': 'assistants=v2' }, options?.headers]),
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
});
}
/**
* Retrieves a vector store file batch.
*/
retrieve(batchID, params, options) {
const { vector_store_id } = params;
return this._client.get(path `/vector_stores/${vector_store_id}/file_batches/${batchID}`, {
retrieve(vectorStoreId, batchId, options) {
return this._client.get(`/vector_stores/${vectorStoreId}/file_batches/${batchId}`, {
...options,
headers: buildHeaders([{ 'OpenAI-Beta': 'assistants=v2' }, options?.headers]),
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
});
}
/**
* Cancel a vector store file batch. This attempts to cancel the processing of
* files in this batch as soon as possible.
*/
cancel(batchID, params, options) {
const { vector_store_id } = params;
return this._client.post(path `/vector_stores/${vector_store_id}/file_batches/${batchID}/cancel`, {
cancel(vectorStoreId, batchId, options) {
return this._client.post(`/vector_stores/${vectorStoreId}/file_batches/${batchId}/cancel`, {
...options,
headers: buildHeaders([{ 'OpenAI-Beta': 'assistants=v2' }, options?.headers]),
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
});
}
/**
@@ -44,12 +41,11 @@ export class FileBatches extends APIResource {
const batch = await this.create(vectorStoreId, body);
return await this.poll(vectorStoreId, batch.id, options);
}
/**
* Returns a list of vector store files in a batch.
*/
listFiles(batchID, params, options) {
const { vector_store_id, ...query } = params;
return this._client.getAPIList(path `/vector_stores/${vector_store_id}/file_batches/${batchID}/files`, (CursorPage), { query, ...options, headers: buildHeaders([{ 'OpenAI-Beta': 'assistants=v2' }, options?.headers]) });
listFiles(vectorStoreId, batchId, query = {}, options) {
if (isRequestOptions(query)) {
return this.listFiles(vectorStoreId, batchId, {}, query);
}
return this._client.getAPIList(`/vector_stores/${vectorStoreId}/file_batches/${batchId}/files`, VectorStoreFilesPage, { query, ...options, headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers } });
}
/**
* Wait for the given file batch to be processed.
@@ -57,16 +53,13 @@ export class FileBatches extends APIResource {
* Note: this will return even if one of the files failed to process, you need to
* check batch.file_counts.failed_count to handle this case.
*/
async poll(vectorStoreID, batchID, options) {
const headers = buildHeaders([
options?.headers,
{
'X-Stainless-Poll-Helper': 'true',
'X-Stainless-Custom-Poll-Interval': options?.pollIntervalMs?.toString() ?? undefined,
},
]);
async poll(vectorStoreId, batchId, options) {
const headers = { ...options?.headers, 'X-Stainless-Poll-Helper': 'true' };
if (options?.pollIntervalMs) {
headers['X-Stainless-Custom-Poll-Interval'] = options.pollIntervalMs.toString();
}
while (true) {
const { data: batch, response } = await this.retrieve(batchID, { vector_store_id: vectorStoreID }, {
const { data: batch, response } = await this.retrieve(vectorStoreId, batchId, {
...options,
headers,
}).withResponse();
@@ -126,4 +119,5 @@ export class FileBatches extends APIResource {
});
}
}
export { VectorStoreFilesPage };
//# sourceMappingURL=file-batches.mjs.map