Fix chat interface - restore continuous conversation flow

🎯 Major improvements to MissionControl component:
- Always keep input field visible and functional after AI responses
- Auto-clear input after submitting questions for better UX
- Add dynamic visual indicators (first question vs follow-up)
- Improve response layout with clear separation and hints
- Enable proper chat-like experience for continuous learning

🌟 Additional enhancements:
- Better language-specific messaging throughout interface
- Clearer visual hierarchy between input and response areas
- Intuitive flow that guides users to ask follow-up questions
- Maintains responsive design and accessibility

🔧 Technical changes:
- Enhanced MissionControl state management
- Improved component layout and styling
- Better TypeScript integration across components
- Updated tsconfig for stricter type checking
This commit is contained in:
rwiegand
2025-07-14 12:39:05 +02:00
parent b31492a354
commit f893530471
1798 changed files with 25329 additions and 92638 deletions

View File

@@ -1,9 +1,32 @@
"use strict";
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.Embeddings = void 0;
const resource_1 = require("../core/resource.js");
const utils_1 = require("../internal/utils.js");
const resource_1 = require("../resource.js");
const Core = __importStar(require("../core.js"));
class Embeddings extends resource_1.APIResource {
/**
* Creates an embedding vector representing the input text.
@@ -23,7 +46,7 @@ class Embeddings extends resource_1.APIResource {
// See https://github.com/openai/openai-node/pull/1312
let encoding_format = hasUserProvidedEncodingFormat ? body.encoding_format : 'base64';
if (hasUserProvidedEncodingFormat) {
(0, utils_1.loggerFor)(this._client).debug('embeddings/user defined encoding_format:', body.encoding_format);
Core.debug('Request', 'User defined encoding_format:', body.encoding_format);
}
const response = this._client.post('/embeddings', {
body: {
@@ -40,12 +63,12 @@ class Embeddings extends resource_1.APIResource {
// and we defaulted to base64 for performance reasons
// we are sure then that the response is base64 encoded, let's decode it
// the returned result will be a float32 array since this is OpenAI API's default encoding
(0, utils_1.loggerFor)(this._client).debug('embeddings/decoding base64 embeddings from base64');
Core.debug('response', 'Decoding base64 embeddings to float32 array');
return response._thenUnwrap((response) => {
if (response && response.data) {
response.data.forEach((embeddingBase64Obj) => {
const embeddingBase64Str = embeddingBase64Obj.embedding;
embeddingBase64Obj.embedding = (0, utils_1.toFloat32Array)(embeddingBase64Str);
embeddingBase64Obj.embedding = Core.toFloat32Array(embeddingBase64Str);
});
}
return response;