Files
trading_bot_v3/app/api/analyze/route.ts
mindesbunister 483d4c6576 🔧 Fix AI Analysis Service - Improved Prompts & Error Handling
 FIXED: AI analysis prompts to bypass OpenAI safety guardrails
 FIXED: Added technical analysis focus instead of trading advice tone
 FIXED: Improved JSON parsing and error handling
 ADDED: Option to use existing screenshots for testing (useExisting param)
 IMPROVED: Better image detail settings and temperature for consistency

🐛 DEBUGGING: Still investigating why AI claims it can't see images
- OpenAI vision capabilities confirmed working with public images
- Model gpt-4o has proper vision support
- Issue appears to be with chart image content or encoding

🎯 NEXT: Debug image encoding and model response inconsistency
2025-07-12 15:08:24 +02:00

85 lines
3.1 KiB
TypeScript

import { NextRequest, NextResponse } from 'next/server'
import { aiAnalysisService } from '../../../lib/ai-analysis'
import { enhancedScreenshotService } from '../../../lib/enhanced-screenshot'
import { settingsManager } from '../../../lib/settings'
import path from 'path'
import fs from 'fs'
export async function POST(req: NextRequest) {
try {
const { symbol, layouts, timeframe, useExisting } = await req.json()
// Load current settings
const settings = await settingsManager.loadSettings()
// Use provided values or fall back to saved settings
const finalSymbol = symbol || settings.symbol
const finalTimeframe = timeframe || settings.timeframe
const finalLayouts = layouts || settings.layouts
if (!finalSymbol) {
return NextResponse.json({ error: 'Missing symbol' }, { status: 400 })
}
let screenshots: string[] = []
// If useExisting is true, find existing screenshots
if (useExisting) {
console.log('Using existing screenshots for analysis...')
const screenshotsDir = path.join(process.cwd(), 'screenshots')
const allFiles = await fs.promises.readdir(screenshotsDir)
// Find screenshots matching the symbol and timeframe
const matchingFiles = allFiles.filter(file =>
file.includes(finalSymbol) &&
file.includes(finalTimeframe) &&
file.endsWith('.png') &&
!file.includes('debug')
)
if (matchingFiles.length > 0) {
// Use the most recent screenshots (limit to 3 for analysis)
screenshots = matchingFiles
.sort((a, b) => b.localeCompare(a)) // Sort by name (which includes timestamp)
.slice(0, 3)
.map(file => path.join(screenshotsDir, file))
} else {
return NextResponse.json({ error: `No existing screenshots found for ${finalSymbol} ${finalTimeframe}` }, { status: 404 })
}
} else {
// Original behavior - capture new screenshots
const baseFilename = `${finalSymbol}_${finalTimeframe}_${Date.now()}`
screenshots = await enhancedScreenshotService.capture(finalSymbol, `${baseFilename}.png`, finalLayouts, finalTimeframe)
}
let result
if (screenshots.length === 1) {
// Single screenshot analysis
const filename = path.basename(screenshots[0])
result = await aiAnalysisService.analyzeScreenshot(filename)
} else {
// Multiple screenshots analysis
const filenames = screenshots.map((screenshot: string) => path.basename(screenshot))
result = await aiAnalysisService.analyzeMultipleScreenshots(filenames)
}
if (!result) {
return NextResponse.json({ error: 'Analysis failed' }, { status: 500 })
}
return NextResponse.json({
...result,
layoutsAnalyzed: finalLayouts,
settings: {
symbol: finalSymbol,
timeframe: finalTimeframe,
layouts: finalLayouts
},
screenshots: screenshots.map((s: string) => path.basename(s)),
usedExisting: useExisting || false
})
} catch (e: any) {
return NextResponse.json({ error: e.message }, { status: 500 })
}
}