Features: - FastAPI backend with stocks, news, signals, watchlist, analytics endpoints - React frontend with TailwindCSS dark mode trading dashboard - Celery workers for news fetching, sentiment analysis, pattern detection - TimescaleDB schema for time-series stock data - Docker Compose setup for all services - OpenAI integration for sentiment analysis
141 lines
4.0 KiB
Python
141 lines
4.0 KiB
Python
"""
|
|
Analytics API Endpoints
|
|
"""
|
|
|
|
from typing import Optional
|
|
from datetime import datetime, timedelta
|
|
from fastapi import APIRouter, Depends, Query
|
|
from sqlalchemy.ext.asyncio import AsyncSession
|
|
from sqlalchemy import select, func
|
|
|
|
from app.core.database import get_db
|
|
from app.models.stock import Stock
|
|
from app.models.news import NewsArticle
|
|
from app.models.signal import BuySignal
|
|
from app.models.panic import PanicEvent
|
|
|
|
router = APIRouter()
|
|
|
|
|
|
@router.get("/dashboard")
|
|
async def get_dashboard_stats(db: AsyncSession = Depends(get_db)):
|
|
"""Get overview statistics for the dashboard."""
|
|
now = datetime.utcnow()
|
|
last_24h = now - timedelta(hours=24)
|
|
last_7d = now - timedelta(days=7)
|
|
|
|
# Count stocks
|
|
stocks_count = await db.execute(
|
|
select(func.count(Stock.id)).where(Stock.is_active == True)
|
|
)
|
|
|
|
# Count news (last 24h)
|
|
news_count = await db.execute(
|
|
select(func.count(NewsArticle.id))
|
|
.where(NewsArticle.published_at >= last_24h)
|
|
)
|
|
|
|
# Count active signals
|
|
signals_count = await db.execute(
|
|
select(func.count(BuySignal.id))
|
|
.where(BuySignal.status == "active")
|
|
)
|
|
|
|
# Average sentiment (last 24h)
|
|
avg_sentiment = await db.execute(
|
|
select(func.avg(NewsArticle.sentiment_score))
|
|
.where(NewsArticle.published_at >= last_24h)
|
|
.where(NewsArticle.sentiment_score.isnot(None))
|
|
)
|
|
|
|
return {
|
|
"stocks_tracked": stocks_count.scalar() or 0,
|
|
"news_last_24h": news_count.scalar() or 0,
|
|
"active_signals": signals_count.scalar() or 0,
|
|
"avg_sentiment_24h": round(avg_sentiment.scalar() or 0, 2),
|
|
"timestamp": now.isoformat(),
|
|
}
|
|
|
|
|
|
@router.get("/sentiment/trend")
|
|
async def get_sentiment_trend(
|
|
db: AsyncSession = Depends(get_db),
|
|
days: int = Query(7, ge=1, le=30, description="Number of days"),
|
|
):
|
|
"""Get sentiment trend over time."""
|
|
# This would aggregate sentiment by day
|
|
# Placeholder for now
|
|
return {
|
|
"days": days,
|
|
"trend": [],
|
|
"message": "Sentiment trend data will be populated by the workers",
|
|
}
|
|
|
|
|
|
@router.get("/sector/panic")
|
|
async def get_sector_panic_levels(db: AsyncSession = Depends(get_db)):
|
|
"""Get current panic levels by sector."""
|
|
# This would calculate average sentiment by sector
|
|
# Placeholder for now
|
|
return {
|
|
"sectors": [],
|
|
"message": "Sector panic data will be populated by the workers",
|
|
}
|
|
|
|
|
|
@router.get("/patterns/top")
|
|
async def get_top_patterns(
|
|
db: AsyncSession = Depends(get_db),
|
|
limit: int = Query(10, ge=1, le=50),
|
|
):
|
|
"""Get top historical patterns with best recovery rates."""
|
|
# This would query historical_patterns table
|
|
# Placeholder for now
|
|
return {
|
|
"patterns": [],
|
|
"message": "Pattern data will be populated by the pattern matcher",
|
|
}
|
|
|
|
|
|
@router.get("/panic-events/recent")
|
|
async def get_recent_panic_events(
|
|
db: AsyncSession = Depends(get_db),
|
|
days: int = Query(30, ge=1, le=90, description="Number of days"),
|
|
limit: int = Query(20, ge=1, le=50),
|
|
):
|
|
"""Get recent panic events."""
|
|
since = datetime.utcnow() - timedelta(days=days)
|
|
|
|
query = (
|
|
select(PanicEvent)
|
|
.where(PanicEvent.start_time >= since)
|
|
.order_by(PanicEvent.start_time.desc())
|
|
.limit(limit)
|
|
)
|
|
result = await db.execute(query)
|
|
events = result.scalars().all()
|
|
|
|
return {
|
|
"days": days,
|
|
"count": len(events),
|
|
"events": events,
|
|
}
|
|
|
|
|
|
@router.get("/performance")
|
|
async def get_signal_performance(
|
|
db: AsyncSession = Depends(get_db),
|
|
days: int = Query(90, ge=30, le=365, description="Number of days to analyze"),
|
|
):
|
|
"""Get performance metrics for past signals."""
|
|
# This would analyze triggered signals and their outcomes
|
|
# Placeholder for now
|
|
return {
|
|
"days": days,
|
|
"total_signals": 0,
|
|
"triggered_signals": 0,
|
|
"avg_return": 0,
|
|
"win_rate": 0,
|
|
"message": "Performance data requires historical signal outcomes",
|
|
}
|