commit 074787f067f21e2679c8d80bd5cd1cd59e6f8529 Author: mindesbunister Date: Thu Jan 8 14:15:51 2026 +0100 Initial project structure: MarketScanner - Fear-to-Fortune Trading Intelligence Features: - FastAPI backend with stocks, news, signals, watchlist, analytics endpoints - React frontend with TailwindCSS dark mode trading dashboard - Celery workers for news fetching, sentiment analysis, pattern detection - TimescaleDB schema for time-series stock data - Docker Compose setup for all services - OpenAI integration for sentiment analysis diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..5ba98f5 --- /dev/null +++ b/.env.example @@ -0,0 +1,108 @@ +# MarketScanner Environment Configuration +# Copy this file to .env and fill in your values + +# ============================================================================= +# DATABASE +# ============================================================================= +POSTGRES_HOST=db +POSTGRES_PORT=5432 +POSTGRES_DB=marketscanner +POSTGRES_USER=marketscanner +POSTGRES_PASSWORD=CHANGE_ME_STRONG_PASSWORD + +# ============================================================================= +# REDIS +# ============================================================================= +REDIS_HOST=redis +REDIS_PORT=6379 +REDIS_PASSWORD=CHANGE_ME_REDIS_PASSWORD + +# ============================================================================= +# RABBITMQ +# ============================================================================= +RABBITMQ_HOST=rabbitmq +RABBITMQ_PORT=5672 +RABBITMQ_USER=marketscanner +RABBITMQ_PASSWORD=CHANGE_ME_RABBITMQ_PASSWORD + +# ============================================================================= +# API KEYS - STOCK DATA +# ============================================================================= +# Alpha Vantage (free tier: 5 calls/min, 500 calls/day) +# Get yours at: https://www.alphavantage.co/support/#api-key +ALPHA_VANTAGE_API_KEY=your_alpha_vantage_key + +# Polygon.io (optional, for real-time data) +# Get yours at: https://polygon.io/ +POLYGON_API_KEY=your_polygon_key + +# Yahoo Finance (no key needed, but rate limited) +YAHOO_FINANCE_ENABLED=true + +# ============================================================================= +# API KEYS - NEWS +# ============================================================================= +# NewsAPI.org (free tier: 100 requests/day) +# Get yours at: https://newsapi.org/register +NEWS_API_KEY=your_newsapi_key + +# Finnhub (free tier available) +# Get yours at: https://finnhub.io/ +FINNHUB_API_KEY=your_finnhub_key + +# ============================================================================= +# API KEYS - AI/NLP +# ============================================================================= +# OpenAI (for sentiment analysis) +# Get yours at: https://platform.openai.com/api-keys +OPENAI_API_KEY=your_openai_key +OPENAI_MODEL=gpt-4o-mini + +# Alternative: Use local LLM (Ollama) +USE_LOCAL_LLM=false +OLLAMA_HOST=http://ollama:11434 +OLLAMA_MODEL=llama3.2 + +# ============================================================================= +# APPLICATION SETTINGS +# ============================================================================= +# Backend +BACKEND_HOST=0.0.0.0 +BACKEND_PORT=8000 +SECRET_KEY=CHANGE_ME_GENERATE_A_RANDOM_STRING +DEBUG=false + +# Frontend +FRONTEND_PORT=3000 +VITE_API_URL=http://localhost:8000 + +# ============================================================================= +# SCANNING SETTINGS +# ============================================================================= +# How often to fetch news (in seconds) +NEWS_SCAN_INTERVAL=300 + +# How often to fetch stock prices (in seconds) +STOCK_PRICE_INTERVAL=60 + +# Maximum stocks to track simultaneously +MAX_TRACKED_STOCKS=500 + +# Panic threshold (-100 to 0, lower = more panic) +PANIC_THRESHOLD=-50 + +# ============================================================================= +# ALERT SETTINGS +# ============================================================================= +# Telegram alerts (optional) +TELEGRAM_BOT_TOKEN=your_telegram_bot_token +TELEGRAM_CHAT_ID=your_telegram_chat_id + +# Discord webhook (optional) +DISCORD_WEBHOOK_URL=your_discord_webhook_url + +# ============================================================================= +# DEVELOPMENT +# ============================================================================= +# Set to true for hot reload and debug logs +DEV_MODE=false diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md new file mode 100644 index 0000000..feee148 --- /dev/null +++ b/.github/copilot-instructions.md @@ -0,0 +1,36 @@ +# Copilot Instructions for MarketScanner + +## Project Overview +MarketScanner is a "fear-to-fortune" trading intelligence system that identifies buying opportunities by analyzing how stocks historically respond to panic-inducing news. + +## Tech Stack +- **Backend**: Python 3.12 + FastAPI + Celery + PostgreSQL/TimescaleDB + Redis +- **Frontend**: React 18 + TypeScript + TailwindCSS + Vite +- **Infrastructure**: Docker + Docker Compose + +## Key Concepts +1. **Panic Detection**: Monitor news sentiment and price drops to identify panic events +2. **Pattern Matching**: Match current panic against historical recovery patterns +3. **Confidence Scoring**: Calculate buy signal confidence based on historical success rates +4. **Real-time Monitoring**: Continuous news and price data fetching via Celery workers + +## Code Style +- Python: Follow PEP 8, use type hints, async/await for database operations +- TypeScript: Strict mode, functional components with hooks +- Use structured logging (structlog) +- Prefer composition over inheritance + +## Database +- TimescaleDB hypertables for time-series data (stock_prices) +- UUID primary keys +- Soft deletes where appropriate + +## API Design +- RESTful endpoints under `/api/v1/` +- Pydantic schemas for validation +- Pagination with skip/limit + +## Testing +- pytest for Python +- Mock external APIs in tests +- Test critical business logic (pattern matching, confidence scoring) diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..d195bb3 --- /dev/null +++ b/.gitignore @@ -0,0 +1,68 @@ +# Environment +.env +.env.local +.env.*.local + +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +.pytest_cache/ +.coverage +htmlcov/ +.mypy_cache/ + +# Node.js +node_modules/ +npm-debug.log* +yarn-debug.log* +yarn-error.log* +.pnpm-debug.log* + +# Build outputs +frontend/dist/ +backend/logs/ + +# IDE +.idea/ +.vscode/ +*.swp +*.swo +*~ + +# OS +.DS_Store +Thumbs.db + +# Docker +docker-compose.override.yml + +# Logs +*.log +logs/ + +# Database +*.db +*.sqlite + +# Secrets (never commit these!) +*.pem +*.key +secrets/ diff --git a/README.md b/README.md new file mode 100644 index 0000000..72c21c3 --- /dev/null +++ b/README.md @@ -0,0 +1,129 @@ +# πŸ“ˆ MarketScanner + +> **"Buy when there's blood in the streets, even if the blood is your own."** β€” Baron Rothschild + +A fear-to-fortune trading intelligence system that identifies buying opportunities by analyzing how stocks historically respond to panic-inducing news. + +## 🎯 What This Does + +1. **Monitors News** - Real-time scanning of financial news, social media, SEC filings +2. **Tracks Sentiment** - NLP-powered sentiment analysis on every piece of news +3. **Correlates with Price** - Links news events to actual stock movements +4. **Finds Patterns** - "Last time $TICKER had this type of news, it dropped X% then recovered Y% in Z months" +5. **Signals Opportunities** - Shows you when panic creates buying opportunities based on historical behavior + +## πŸ—οΈ Architecture + +``` +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ MarketScanner β”‚ +β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€ +β”‚ News Scrapers β†’ Sentiment Analysis β†’ Pattern Matching β”‚ +β”‚ ↓ ↓ ↓ β”‚ +β”‚ TimescaleDB (Time-series data storage) β”‚ +β”‚ ↓ β”‚ +β”‚ FastAPI Backend β†’ React Dashboard β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ +``` + +## πŸ› οΈ Tech Stack + +- **Backend**: Python 3.12 + FastAPI +- **Frontend**: React 18 + TypeScript + TailwindCSS +- **Database**: PostgreSQL + TimescaleDB +- **Cache**: Redis +- **Queue**: Celery + RabbitMQ +- **NLP**: OpenAI API / Local LLM +- **Charts**: TradingView Lightweight Charts +- **Containerization**: Docker + Docker Compose + +## πŸš€ Quick Start + +```bash +# Clone the repository +git clone git@gitea.egonetix.de:root/marketscanner.git +cd marketscanner + +# Copy environment template +cp .env.example .env + +# Edit .env with your API keys +nano .env + +# Start all services +docker-compose up -d + +# Access the dashboard +open http://localhost:3000 +``` + +## πŸ“ Project Structure + +``` +marketscanner/ +β”œβ”€β”€ backend/ # FastAPI backend +β”‚ β”œβ”€β”€ app/ +β”‚ β”‚ β”œβ”€β”€ api/ # API routes +β”‚ β”‚ β”œβ”€β”€ core/ # Config, security +β”‚ β”‚ β”œβ”€β”€ models/ # Database models +β”‚ β”‚ β”œβ”€β”€ schemas/ # Pydantic schemas +β”‚ β”‚ β”œβ”€β”€ services/ # Business logic +β”‚ β”‚ β”‚ β”œβ”€β”€ news/ # News scrapers +β”‚ β”‚ β”‚ β”œβ”€β”€ sentiment/ # NLP analysis +β”‚ β”‚ β”‚ β”œβ”€β”€ stocks/ # Stock data fetchers +β”‚ β”‚ β”‚ └── patterns/ # Pattern matching +β”‚ β”‚ └── workers/ # Celery tasks +β”‚ β”œβ”€β”€ tests/ +β”‚ └── requirements.txt +β”œβ”€β”€ frontend/ # React dashboard +β”‚ β”œβ”€β”€ src/ +β”‚ β”‚ β”œβ”€β”€ components/ +β”‚ β”‚ β”œβ”€β”€ pages/ +β”‚ β”‚ β”œβ”€β”€ hooks/ +β”‚ β”‚ └── services/ +β”‚ └── package.json +β”œβ”€β”€ docker/ # Docker configs +β”œβ”€β”€ docker-compose.yml +β”œβ”€β”€ .env.example +└── README.md +``` + +## πŸ”‘ Required API Keys + +| Service | Purpose | Cost | +|---------|---------|------| +| Alpha Vantage | Stock data | Free tier available | +| News API | News aggregation | Free tier available | +| OpenAI | Sentiment analysis | Pay per use | +| Polygon.io | Real-time data (optional) | Paid | + +## πŸ“Š Features + +### Dashboard +- Real-time panic score monitoring +- Sector heat maps +- Historical pattern overlays +- Buy signal alerts with confidence scores + +### Panic Detection +- Sentiment scoring (-100 to +100) +- Volume spike detection +- Price velocity measurement +- Fear index correlation + +### Pattern Matching +- Company-specific recovery patterns +- Sector-wide panic analysis +- Event-type categorization (scandal, earnings miss, macro events) + +## ⚠️ Disclaimer + +This software is for educational and research purposes only. It is not financial advice. Trading stocks involves risk of loss. Past performance does not guarantee future results. Always do your own research and consult with a qualified financial advisor. + +## πŸ“œ License + +MIT License - See [LICENSE](LICENSE) for details. + +--- + +*"The time to buy is when there's blood in the streets."* πŸ©ΈπŸ“ˆ diff --git a/backend/Dockerfile b/backend/Dockerfile new file mode 100644 index 0000000..da06f4b --- /dev/null +++ b/backend/Dockerfile @@ -0,0 +1,26 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + libpq-dev \ + curl \ + && rm -rf /var/lib/apt/lists/* + +# Install Python dependencies +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy application code +COPY . . + +# Create logs directory +RUN mkdir -p /app/logs + +# Expose port +EXPOSE 8000 + +# Run the application +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"] diff --git a/backend/app/api/__init__.py b/backend/app/api/__init__.py new file mode 100644 index 0000000..b46c7b7 --- /dev/null +++ b/backend/app/api/__init__.py @@ -0,0 +1,16 @@ +""" +API Router - Main entry point for all API routes +""" + +from fastapi import APIRouter + +from app.api.endpoints import stocks, news, signals, watchlist, analytics + +router = APIRouter() + +# Include all endpoint routers +router.include_router(stocks.router, prefix="/stocks", tags=["Stocks"]) +router.include_router(news.router, prefix="/news", tags=["News"]) +router.include_router(signals.router, prefix="/signals", tags=["Buy Signals"]) +router.include_router(watchlist.router, prefix="/watchlist", tags=["Watchlist"]) +router.include_router(analytics.router, prefix="/analytics", tags=["Analytics"]) diff --git a/backend/app/api/endpoints/__init__.py b/backend/app/api/endpoints/__init__.py new file mode 100644 index 0000000..7246130 --- /dev/null +++ b/backend/app/api/endpoints/__init__.py @@ -0,0 +1 @@ +"""API endpoints module.""" diff --git a/backend/app/api/endpoints/analytics.py b/backend/app/api/endpoints/analytics.py new file mode 100644 index 0000000..a259b54 --- /dev/null +++ b/backend/app/api/endpoints/analytics.py @@ -0,0 +1,140 @@ +""" +Analytics API Endpoints +""" + +from typing import Optional +from datetime import datetime, timedelta +from fastapi import APIRouter, Depends, Query +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select, func + +from app.core.database import get_db +from app.models.stock import Stock +from app.models.news import NewsArticle +from app.models.signal import BuySignal +from app.models.panic import PanicEvent + +router = APIRouter() + + +@router.get("/dashboard") +async def get_dashboard_stats(db: AsyncSession = Depends(get_db)): + """Get overview statistics for the dashboard.""" + now = datetime.utcnow() + last_24h = now - timedelta(hours=24) + last_7d = now - timedelta(days=7) + + # Count stocks + stocks_count = await db.execute( + select(func.count(Stock.id)).where(Stock.is_active == True) + ) + + # Count news (last 24h) + news_count = await db.execute( + select(func.count(NewsArticle.id)) + .where(NewsArticle.published_at >= last_24h) + ) + + # Count active signals + signals_count = await db.execute( + select(func.count(BuySignal.id)) + .where(BuySignal.status == "active") + ) + + # Average sentiment (last 24h) + avg_sentiment = await db.execute( + select(func.avg(NewsArticle.sentiment_score)) + .where(NewsArticle.published_at >= last_24h) + .where(NewsArticle.sentiment_score.isnot(None)) + ) + + return { + "stocks_tracked": stocks_count.scalar() or 0, + "news_last_24h": news_count.scalar() or 0, + "active_signals": signals_count.scalar() or 0, + "avg_sentiment_24h": round(avg_sentiment.scalar() or 0, 2), + "timestamp": now.isoformat(), + } + + +@router.get("/sentiment/trend") +async def get_sentiment_trend( + db: AsyncSession = Depends(get_db), + days: int = Query(7, ge=1, le=30, description="Number of days"), +): + """Get sentiment trend over time.""" + # This would aggregate sentiment by day + # Placeholder for now + return { + "days": days, + "trend": [], + "message": "Sentiment trend data will be populated by the workers", + } + + +@router.get("/sector/panic") +async def get_sector_panic_levels(db: AsyncSession = Depends(get_db)): + """Get current panic levels by sector.""" + # This would calculate average sentiment by sector + # Placeholder for now + return { + "sectors": [], + "message": "Sector panic data will be populated by the workers", + } + + +@router.get("/patterns/top") +async def get_top_patterns( + db: AsyncSession = Depends(get_db), + limit: int = Query(10, ge=1, le=50), +): + """Get top historical patterns with best recovery rates.""" + # This would query historical_patterns table + # Placeholder for now + return { + "patterns": [], + "message": "Pattern data will be populated by the pattern matcher", + } + + +@router.get("/panic-events/recent") +async def get_recent_panic_events( + db: AsyncSession = Depends(get_db), + days: int = Query(30, ge=1, le=90, description="Number of days"), + limit: int = Query(20, ge=1, le=50), +): + """Get recent panic events.""" + since = datetime.utcnow() - timedelta(days=days) + + query = ( + select(PanicEvent) + .where(PanicEvent.start_time >= since) + .order_by(PanicEvent.start_time.desc()) + .limit(limit) + ) + result = await db.execute(query) + events = result.scalars().all() + + return { + "days": days, + "count": len(events), + "events": events, + } + + +@router.get("/performance") +async def get_signal_performance( + db: AsyncSession = Depends(get_db), + days: int = Query(90, ge=30, le=365, description="Number of days to analyze"), +): + """Get performance metrics for past signals.""" + # This would analyze triggered signals and their outcomes + # Placeholder for now + return { + "days": days, + "total_signals": 0, + "triggered_signals": 0, + "avg_return": 0, + "win_rate": 0, + "message": "Performance data requires historical signal outcomes", + } diff --git a/backend/app/api/endpoints/news.py b/backend/app/api/endpoints/news.py new file mode 100644 index 0000000..986b545 --- /dev/null +++ b/backend/app/api/endpoints/news.py @@ -0,0 +1,122 @@ +""" +News API Endpoints +""" + +from typing import List, Optional +from datetime import datetime, timedelta +from uuid import UUID +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select, desc + +from app.core.database import get_db +from app.models.news import NewsArticle +from app.schemas.news import NewsResponse, NewsWithSentiment + +router = APIRouter() + + +@router.get("/", response_model=List[NewsResponse]) +async def list_news( + db: AsyncSession = Depends(get_db), + source: Optional[str] = Query(None, description="Filter by source"), + sentiment: Optional[str] = Query(None, description="Filter by sentiment: positive, negative, neutral"), + hours: int = Query(24, ge=1, le=168, description="News from last N hours"), + skip: int = Query(0, ge=0), + limit: int = Query(50, ge=1, le=100), +): + """List recent news articles.""" + since = datetime.utcnow() - timedelta(hours=hours) + + query = select(NewsArticle).where(NewsArticle.published_at >= since) + + if source: + query = query.where(NewsArticle.source == source) + if sentiment: + query = query.where(NewsArticle.sentiment_label == sentiment) + + query = query.order_by(desc(NewsArticle.published_at)).offset(skip).limit(limit) + result = await db.execute(query) + return result.scalars().all() + + +@router.get("/sources") +async def list_sources(db: AsyncSession = Depends(get_db)): + """Get list of all news sources.""" + query = select(NewsArticle.source).distinct() + result = await db.execute(query) + sources = [row[0] for row in result.fetchall() if row[0]] + return {"sources": sorted(sources)} + + +@router.get("/stock/{symbol}", response_model=List[NewsWithSentiment]) +async def get_news_for_stock( + symbol: str, + db: AsyncSession = Depends(get_db), + hours: int = Query(72, ge=1, le=720, description="News from last N hours"), + skip: int = Query(0, ge=0), + limit: int = Query(50, ge=1, le=100), +): + """Get news articles mentioning a specific stock.""" + # This would use the news_stock_mentions join table + # For now, we search in title/content + since = datetime.utcnow() - timedelta(hours=hours) + search_term = f"%{symbol.upper()}%" + + query = ( + select(NewsArticle) + .where(NewsArticle.published_at >= since) + .where( + (NewsArticle.title.ilike(search_term)) | + (NewsArticle.content.ilike(search_term)) + ) + .order_by(desc(NewsArticle.published_at)) + .offset(skip) + .limit(limit) + ) + result = await db.execute(query) + return result.scalars().all() + + +@router.get("/panic") +async def get_panic_news( + db: AsyncSession = Depends(get_db), + threshold: float = Query(-50.0, ge=-100, le=0, description="Sentiment threshold"), + hours: int = Query(24, ge=1, le=168, description="News from last N hours"), + limit: int = Query(20, ge=1, le=50), +): + """Get the most panic-inducing news articles.""" + since = datetime.utcnow() - timedelta(hours=hours) + + query = ( + select(NewsArticle) + .where(NewsArticle.published_at >= since) + .where(NewsArticle.sentiment_score <= threshold) + .order_by(NewsArticle.sentiment_score.asc()) + .limit(limit) + ) + result = await db.execute(query) + articles = result.scalars().all() + + return { + "threshold": threshold, + "hours": hours, + "count": len(articles), + "articles": articles, + } + + +@router.get("/{article_id}", response_model=NewsWithSentiment) +async def get_article( + article_id: UUID, + db: AsyncSession = Depends(get_db), +): + """Get a specific news article with full details.""" + query = select(NewsArticle).where(NewsArticle.id == article_id) + result = await db.execute(query) + article = result.scalar_one_or_none() + + if not article: + raise HTTPException(status_code=404, detail="Article not found") + + return article diff --git a/backend/app/api/endpoints/signals.py b/backend/app/api/endpoints/signals.py new file mode 100644 index 0000000..d6df398 --- /dev/null +++ b/backend/app/api/endpoints/signals.py @@ -0,0 +1,123 @@ +""" +Buy Signals API Endpoints +""" + +from typing import List, Optional +from datetime import datetime, timedelta +from uuid import UUID +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select, desc + +from app.core.database import get_db +from app.models.signal import BuySignal +from app.schemas.signal import SignalResponse, SignalWithDetails + +router = APIRouter() + + +@router.get("/", response_model=List[SignalResponse]) +async def list_signals( + db: AsyncSession = Depends(get_db), + status: Optional[str] = Query("active", description="Signal status: active, triggered, expired"), + min_confidence: float = Query(0.5, ge=0, le=1, description="Minimum confidence score"), + skip: int = Query(0, ge=0), + limit: int = Query(50, ge=1, le=100), +): + """List buy signals ordered by confidence.""" + query = select(BuySignal).where(BuySignal.confidence_score >= min_confidence) + + if status: + query = query.where(BuySignal.status == status) + + query = query.order_by(desc(BuySignal.confidence_score)).offset(skip).limit(limit) + result = await db.execute(query) + return result.scalars().all() + + +@router.get("/top") +async def get_top_signals( + db: AsyncSession = Depends(get_db), + limit: int = Query(10, ge=1, le=20), +): + """Get top buy signals by confidence score.""" + query = ( + select(BuySignal) + .where(BuySignal.status == "active") + .order_by(desc(BuySignal.confidence_score)) + .limit(limit) + ) + result = await db.execute(query) + signals = result.scalars().all() + + return { + "count": len(signals), + "signals": signals, + } + + +@router.get("/stock/{symbol}", response_model=List[SignalResponse]) +async def get_signals_for_stock( + symbol: str, + db: AsyncSession = Depends(get_db), + include_historical: bool = Query(False, description="Include past signals"), +): + """Get buy signals for a specific stock.""" + # We need to join with stocks table + # For now, placeholder + return [] + + +@router.get("/{signal_id}", response_model=SignalWithDetails) +async def get_signal( + signal_id: UUID, + db: AsyncSession = Depends(get_db), +): + """Get detailed information about a specific signal.""" + query = select(BuySignal).where(BuySignal.id == signal_id) + result = await db.execute(query) + signal = result.scalar_one_or_none() + + if not signal: + raise HTTPException(status_code=404, detail="Signal not found") + + return signal + + +@router.post("/{signal_id}/trigger") +async def trigger_signal( + signal_id: UUID, + db: AsyncSession = Depends(get_db), +): + """Mark a signal as triggered (you bought the stock).""" + query = select(BuySignal).where(BuySignal.id == signal_id) + result = await db.execute(query) + signal = result.scalar_one_or_none() + + if not signal: + raise HTTPException(status_code=404, detail="Signal not found") + + signal.status = "triggered" + signal.triggered_at = datetime.utcnow() + await db.commit() + + return {"message": "Signal marked as triggered", "signal_id": signal_id} + + +@router.post("/{signal_id}/dismiss") +async def dismiss_signal( + signal_id: UUID, + db: AsyncSession = Depends(get_db), +): + """Dismiss a signal (not interested).""" + query = select(BuySignal).where(BuySignal.id == signal_id) + result = await db.execute(query) + signal = result.scalar_one_or_none() + + if not signal: + raise HTTPException(status_code=404, detail="Signal not found") + + signal.status = "cancelled" + await db.commit() + + return {"message": "Signal dismissed", "signal_id": signal_id} diff --git a/backend/app/api/endpoints/stocks.py b/backend/app/api/endpoints/stocks.py new file mode 100644 index 0000000..01507eb --- /dev/null +++ b/backend/app/api/endpoints/stocks.py @@ -0,0 +1,131 @@ +""" +Stocks API Endpoints +""" + +from typing import List, Optional +from uuid import UUID +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select, func + +from app.core.database import get_db +from app.models.stock import Stock +from app.schemas.stock import StockResponse, StockCreate, StockWithPrice + +router = APIRouter() + + +@router.get("/", response_model=List[StockResponse]) +async def list_stocks( + db: AsyncSession = Depends(get_db), + sector: Optional[str] = Query(None, description="Filter by sector"), + industry: Optional[str] = Query(None, description="Filter by industry"), + search: Optional[str] = Query(None, description="Search by symbol or name"), + skip: int = Query(0, ge=0), + limit: int = Query(50, ge=1, le=100), +): + """List all tracked stocks with optional filters.""" + query = select(Stock).where(Stock.is_active == True) + + if sector: + query = query.where(Stock.sector == sector) + if industry: + query = query.where(Stock.industry == industry) + if search: + search_term = f"%{search}%" + query = query.where( + (Stock.symbol.ilike(search_term)) | + (Stock.name.ilike(search_term)) + ) + + query = query.offset(skip).limit(limit).order_by(Stock.symbol) + result = await db.execute(query) + return result.scalars().all() + + +@router.get("/sectors") +async def list_sectors(db: AsyncSession = Depends(get_db)): + """Get list of all unique sectors.""" + query = select(Stock.sector).distinct().where(Stock.is_active == True) + result = await db.execute(query) + sectors = [row[0] for row in result.fetchall() if row[0]] + return {"sectors": sorted(sectors)} + + +@router.get("/industries") +async def list_industries( + db: AsyncSession = Depends(get_db), + sector: Optional[str] = Query(None, description="Filter by sector"), +): + """Get list of all unique industries.""" + query = select(Stock.industry).distinct().where(Stock.is_active == True) + if sector: + query = query.where(Stock.sector == sector) + result = await db.execute(query) + industries = [row[0] for row in result.fetchall() if row[0]] + return {"industries": sorted(industries)} + + +@router.get("/{symbol}", response_model=StockWithPrice) +async def get_stock( + symbol: str, + db: AsyncSession = Depends(get_db), +): + """Get detailed stock information including latest price.""" + query = select(Stock).where(Stock.symbol == symbol.upper()) + result = await db.execute(query) + stock = result.scalar_one_or_none() + + if not stock: + raise HTTPException(status_code=404, detail=f"Stock {symbol} not found") + + # TODO: Add latest price from stock_prices table + return stock + + +@router.post("/", response_model=StockResponse) +async def add_stock( + stock: StockCreate, + db: AsyncSession = Depends(get_db), +): + """Add a new stock to track.""" + # Check if already exists + existing = await db.execute( + select(Stock).where(Stock.symbol == stock.symbol.upper()) + ) + if existing.scalar_one_or_none(): + raise HTTPException( + status_code=400, + detail=f"Stock {stock.symbol} already exists" + ) + + db_stock = Stock( + symbol=stock.symbol.upper(), + name=stock.name, + sector=stock.sector, + industry=stock.industry, + exchange=stock.exchange, + country=stock.country, + ) + db.add(db_stock) + await db.commit() + await db.refresh(db_stock) + return db_stock + + +@router.delete("/{symbol}") +async def remove_stock( + symbol: str, + db: AsyncSession = Depends(get_db), +): + """Remove a stock from tracking (soft delete).""" + query = select(Stock).where(Stock.symbol == symbol.upper()) + result = await db.execute(query) + stock = result.scalar_one_or_none() + + if not stock: + raise HTTPException(status_code=404, detail=f"Stock {symbol} not found") + + stock.is_active = False + await db.commit() + return {"message": f"Stock {symbol} removed from tracking"} diff --git a/backend/app/api/endpoints/watchlist.py b/backend/app/api/endpoints/watchlist.py new file mode 100644 index 0000000..2c94fa8 --- /dev/null +++ b/backend/app/api/endpoints/watchlist.py @@ -0,0 +1,141 @@ +""" +Watchlist API Endpoints +""" + +from typing import List, Optional +from uuid import UUID +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select + +from app.core.database import get_db +from app.models.watchlist import Watchlist +from app.models.stock import Stock +from app.schemas.watchlist import WatchlistResponse, WatchlistCreate, WatchlistUpdate + +router = APIRouter() + + +@router.get("/", response_model=List[WatchlistResponse]) +async def list_watchlist( + db: AsyncSession = Depends(get_db), + priority: Optional[int] = Query(None, ge=1, le=3, description="Filter by priority"), +): + """Get all items in watchlist.""" + query = select(Watchlist).where(Watchlist.is_active == True) + + if priority: + query = query.where(Watchlist.priority == priority) + + query = query.order_by(Watchlist.priority) + result = await db.execute(query) + return result.scalars().all() + + +@router.post("/", response_model=WatchlistResponse) +async def add_to_watchlist( + item: WatchlistCreate, + db: AsyncSession = Depends(get_db), +): + """Add a stock to the watchlist.""" + # Find the stock + stock_query = select(Stock).where(Stock.symbol == item.symbol.upper()) + stock_result = await db.execute(stock_query) + stock = stock_result.scalar_one_or_none() + + if not stock: + raise HTTPException(status_code=404, detail=f"Stock {item.symbol} not found") + + # Check if already in watchlist + existing = await db.execute( + select(Watchlist).where(Watchlist.stock_id == stock.id) + ) + if existing.scalar_one_or_none(): + raise HTTPException( + status_code=400, + detail=f"Stock {item.symbol} is already in watchlist" + ) + + watchlist_item = Watchlist( + stock_id=stock.id, + panic_alert_threshold=item.panic_alert_threshold, + price_alert_low=item.price_alert_low, + price_alert_high=item.price_alert_high, + priority=item.priority, + notes=item.notes, + ) + db.add(watchlist_item) + await db.commit() + await db.refresh(watchlist_item) + return watchlist_item + + +@router.put("/{watchlist_id}", response_model=WatchlistResponse) +async def update_watchlist_item( + watchlist_id: UUID, + update: WatchlistUpdate, + db: AsyncSession = Depends(get_db), +): + """Update a watchlist item.""" + query = select(Watchlist).where(Watchlist.id == watchlist_id) + result = await db.execute(query) + item = result.scalar_one_or_none() + + if not item: + raise HTTPException(status_code=404, detail="Watchlist item not found") + + update_data = update.model_dump(exclude_unset=True) + for key, value in update_data.items(): + setattr(item, key, value) + + await db.commit() + await db.refresh(item) + return item + + +@router.delete("/{watchlist_id}") +async def remove_from_watchlist( + watchlist_id: UUID, + db: AsyncSession = Depends(get_db), +): + """Remove a stock from the watchlist.""" + query = select(Watchlist).where(Watchlist.id == watchlist_id) + result = await db.execute(query) + item = result.scalar_one_or_none() + + if not item: + raise HTTPException(status_code=404, detail="Watchlist item not found") + + await db.delete(item) + await db.commit() + return {"message": "Removed from watchlist"} + + +@router.delete("/symbol/{symbol}") +async def remove_symbol_from_watchlist( + symbol: str, + db: AsyncSession = Depends(get_db), +): + """Remove a stock from watchlist by symbol.""" + # Find the stock + stock_query = select(Stock).where(Stock.symbol == symbol.upper()) + stock_result = await db.execute(stock_query) + stock = stock_result.scalar_one_or_none() + + if not stock: + raise HTTPException(status_code=404, detail=f"Stock {symbol} not found") + + # Find and remove watchlist item + watchlist_query = select(Watchlist).where(Watchlist.stock_id == stock.id) + watchlist_result = await db.execute(watchlist_query) + item = watchlist_result.scalar_one_or_none() + + if not item: + raise HTTPException( + status_code=404, + detail=f"Stock {symbol} is not in watchlist" + ) + + await db.delete(item) + await db.commit() + return {"message": f"Removed {symbol} from watchlist"} diff --git a/backend/app/core/__init__.py b/backend/app/core/__init__.py new file mode 100644 index 0000000..19e0bfb --- /dev/null +++ b/backend/app/core/__init__.py @@ -0,0 +1,6 @@ +"""Core module exports.""" + +from app.core.config import settings +from app.core.database import get_db, Base, AsyncSessionLocal + +__all__ = ["settings", "get_db", "Base", "AsyncSessionLocal"] diff --git a/backend/app/core/config.py b/backend/app/core/config.py new file mode 100644 index 0000000..b1f3c5e --- /dev/null +++ b/backend/app/core/config.py @@ -0,0 +1,97 @@ +""" +Application Configuration +""" + +from typing import List +from pydantic_settings import BaseSettings +from functools import lru_cache + + +class Settings(BaseSettings): + """Application settings loaded from environment variables.""" + + # Application + VERSION: str = "0.1.0" + DEBUG: bool = False + SECRET_KEY: str = "change-me-in-production" + + # Server + BACKEND_HOST: str = "0.0.0.0" + BACKEND_PORT: int = 8000 + CORS_ORIGINS: List[str] = ["http://localhost:3000", "http://localhost:5173"] + + # Database + POSTGRES_HOST: str = "localhost" + POSTGRES_PORT: int = 5432 + POSTGRES_DB: str = "marketscanner" + POSTGRES_USER: str = "marketscanner" + POSTGRES_PASSWORD: str = "changeme" + + @property + def DATABASE_URL(self) -> str: + return f"postgresql+asyncpg://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@{self.POSTGRES_HOST}:{self.POSTGRES_PORT}/{self.POSTGRES_DB}" + + @property + def DATABASE_URL_SYNC(self) -> str: + return f"postgresql://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@{self.POSTGRES_HOST}:{self.POSTGRES_PORT}/{self.POSTGRES_DB}" + + # Redis + REDIS_HOST: str = "localhost" + REDIS_PORT: int = 6379 + REDIS_PASSWORD: str = "" + + @property + def REDIS_URL(self) -> str: + if self.REDIS_PASSWORD: + return f"redis://:{self.REDIS_PASSWORD}@{self.REDIS_HOST}:{self.REDIS_PORT}/0" + return f"redis://{self.REDIS_HOST}:{self.REDIS_PORT}/0" + + # RabbitMQ + RABBITMQ_HOST: str = "localhost" + RABBITMQ_PORT: int = 5672 + RABBITMQ_USER: str = "guest" + RABBITMQ_PASSWORD: str = "guest" + + @property + def RABBITMQ_URL(self) -> str: + return f"amqp://{self.RABBITMQ_USER}:{self.RABBITMQ_PASSWORD}@{self.RABBITMQ_HOST}:{self.RABBITMQ_PORT}//" + + # API Keys - Stock Data + ALPHA_VANTAGE_API_KEY: str = "" + POLYGON_API_KEY: str = "" + YAHOO_FINANCE_ENABLED: bool = True + FINNHUB_API_KEY: str = "" + + # API Keys - News + NEWS_API_KEY: str = "" + + # API Keys - AI + OPENAI_API_KEY: str = "" + OPENAI_MODEL: str = "gpt-4o-mini" + USE_LOCAL_LLM: bool = False + OLLAMA_HOST: str = "http://localhost:11434" + OLLAMA_MODEL: str = "llama3.2" + + # Scanning Settings + NEWS_SCAN_INTERVAL: int = 300 # seconds + STOCK_PRICE_INTERVAL: int = 60 # seconds + MAX_TRACKED_STOCKS: int = 500 + PANIC_THRESHOLD: float = -50.0 + + # Alerts + TELEGRAM_BOT_TOKEN: str = "" + TELEGRAM_CHAT_ID: str = "" + DISCORD_WEBHOOK_URL: str = "" + + class Config: + env_file = ".env" + case_sensitive = True + + +@lru_cache() +def get_settings() -> Settings: + """Get cached settings instance.""" + return Settings() + + +settings = get_settings() diff --git a/backend/app/core/database.py b/backend/app/core/database.py new file mode 100644 index 0000000..a17054a --- /dev/null +++ b/backend/app/core/database.py @@ -0,0 +1,79 @@ +""" +Database Configuration and Session Management +""" + +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine, async_sessionmaker +from sqlalchemy.orm import declarative_base +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker + +from app.core.config import settings + +# Async engine for FastAPI +async_engine = create_async_engine( + settings.DATABASE_URL, + echo=settings.DEBUG, + pool_pre_ping=True, + pool_size=10, + max_overflow=20, +) + +# Async session factory +AsyncSessionLocal = async_sessionmaker( + async_engine, + class_=AsyncSession, + expire_on_commit=False, + autocommit=False, + autoflush=False, +) + +# Sync engine for Celery workers +sync_engine = create_engine( + settings.DATABASE_URL_SYNC, + echo=settings.DEBUG, + pool_pre_ping=True, + pool_size=5, + max_overflow=10, +) + +# Sync session factory +SyncSessionLocal = sessionmaker( + sync_engine, + autocommit=False, + autoflush=False, +) + +# Base class for models +Base = declarative_base() + + +async def init_db(): + """Initialize database (create tables if needed).""" + # Tables are created by init.sql, but we can add migrations here + pass + + +async def get_db() -> AsyncSession: + """Dependency for getting async database session.""" + async with AsyncSessionLocal() as session: + try: + yield session + await session.commit() + except Exception: + await session.rollback() + raise + finally: + await session.close() + + +def get_sync_db(): + """Get sync database session for Celery workers.""" + db = SyncSessionLocal() + try: + yield db + db.commit() + except Exception: + db.rollback() + raise + finally: + db.close() diff --git a/backend/app/main.py b/backend/app/main.py new file mode 100644 index 0000000..da71cc7 --- /dev/null +++ b/backend/app/main.py @@ -0,0 +1,114 @@ +""" +MarketScanner - Fear-to-Fortune Trading Intelligence +Main FastAPI Application +""" + +from contextlib import asynccontextmanager +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import JSONResponse +import structlog + +from app.core.config import settings +from app.core.database import init_db +from app.api import router as api_router + +# Configure structured logging +structlog.configure( + processors=[ + structlog.stdlib.filter_by_level, + structlog.processors.TimeStamper(fmt="iso"), + structlog.processors.JSONRenderer() + ], + wrapper_class=structlog.stdlib.BoundLogger, + context_class=dict, + logger_factory=structlog.stdlib.LoggerFactory(), +) + +logger = structlog.get_logger() + + +@asynccontextmanager +async def lifespan(app: FastAPI): + """Application lifespan events.""" + # Startup + logger.info("Starting MarketScanner API", version=settings.VERSION) + await init_db() + logger.info("Database initialized") + + yield + + # Shutdown + logger.info("Shutting down MarketScanner API") + + +# Create FastAPI application +app = FastAPI( + title="MarketScanner API", + description=""" + πŸš€ **MarketScanner** - Fear-to-Fortune Trading Intelligence + + A system that identifies buying opportunities by analyzing how stocks + historically respond to panic-inducing news. + + ## Features + + * **News Monitoring** - Real-time scanning of financial news + * **Sentiment Analysis** - NLP-powered sentiment scoring + * **Panic Detection** - Identify market fear events + * **Pattern Matching** - Historical recovery patterns + * **Buy Signals** - Confidence-scored opportunities + + *"Buy when there's blood in the streets."* β€” Baron Rothschild + """, + version=settings.VERSION, + docs_url="/docs", + redoc_url="/redoc", + lifespan=lifespan, +) + +# CORS middleware +app.add_middleware( + CORSMiddleware, + allow_origins=settings.CORS_ORIGINS, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +# Include API routes +app.include_router(api_router, prefix="/api/v1") + + +@app.get("/", tags=["Root"]) +async def root(): + """Root endpoint with API info.""" + return { + "name": "MarketScanner API", + "version": settings.VERSION, + "description": "Fear-to-Fortune Trading Intelligence", + "docs": "/docs", + "health": "/health", + } + + +@app.get("/health", tags=["Health"]) +async def health_check(): + """Health check endpoint for Docker/Kubernetes.""" + return JSONResponse( + status_code=200, + content={ + "status": "healthy", + "version": settings.VERSION, + } + ) + + +if __name__ == "__main__": + import uvicorn + uvicorn.run( + "app.main:app", + host=settings.BACKEND_HOST, + port=settings.BACKEND_PORT, + reload=settings.DEBUG, + ) diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py new file mode 100644 index 0000000..28d4402 --- /dev/null +++ b/backend/app/models/__init__.py @@ -0,0 +1,9 @@ +"""Database models.""" + +from app.models.stock import Stock +from app.models.news import NewsArticle +from app.models.signal import BuySignal +from app.models.panic import PanicEvent +from app.models.watchlist import Watchlist + +__all__ = ["Stock", "NewsArticle", "BuySignal", "PanicEvent", "Watchlist"] diff --git a/backend/app/models/news.py b/backend/app/models/news.py new file mode 100644 index 0000000..4eab3d1 --- /dev/null +++ b/backend/app/models/news.py @@ -0,0 +1,34 @@ +"""News article model.""" + +from sqlalchemy import Column, String, Text, Boolean, DateTime, Numeric +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.sql import func +import uuid + +from app.core.database import Base + + +class NewsArticle(Base): + """News article table model.""" + + __tablename__ = "news_articles" + + id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + title = Column(Text, nullable=False) + content = Column(Text) + summary = Column(Text) + url = Column(Text, unique=True, nullable=False) + source = Column(String(100), nullable=False, index=True) + author = Column(String(255)) + published_at = Column(DateTime(timezone=True), nullable=False, index=True) + fetched_at = Column(DateTime(timezone=True), server_default=func.now()) + image_url = Column(Text) + + # Sentiment analysis results + sentiment_score = Column(Numeric(5, 2), index=True) # -100 to +100 + sentiment_label = Column(String(20)) # negative, neutral, positive + sentiment_confidence = Column(Numeric(5, 4)) + + # Processing status + is_processed = Column(Boolean, default=False) + processing_error = Column(Text) diff --git a/backend/app/models/panic.py b/backend/app/models/panic.py new file mode 100644 index 0000000..fa81b98 --- /dev/null +++ b/backend/app/models/panic.py @@ -0,0 +1,48 @@ +"""Panic event model.""" + +from sqlalchemy import Column, String, Text, Boolean, DateTime, Numeric, Integer, ForeignKey +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.sql import func +import uuid + +from app.core.database import Base + + +class PanicEvent(Base): + """Panic event table model.""" + + __tablename__ = "panic_events" + + id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + stock_id = Column(UUID(as_uuid=True), ForeignKey("stocks.id", ondelete="CASCADE"), nullable=False, index=True) + + # Event timing + start_time = Column(DateTime(timezone=True), nullable=False, index=True) + peak_time = Column(DateTime(timezone=True)) + end_time = Column(DateTime(timezone=True)) + + # Price impact + price_at_start = Column(Numeric(15, 4), nullable=False) + price_at_peak_panic = Column(Numeric(15, 4)) + price_at_end = Column(Numeric(15, 4)) + max_drawdown_percent = Column(Numeric(8, 4), index=True) + + # Sentiment + avg_sentiment_score = Column(Numeric(5, 2)) + min_sentiment_score = Column(Numeric(5, 2)) + news_volume = Column(Integer) + + # Recovery metrics + recovery_time_days = Column(Integer) + recovery_percent = Column(Numeric(8, 4)) + + # Classification + event_type = Column(String(100), index=True) # earnings_miss, scandal, lawsuit, macro, etc. + event_category = Column(String(50)) # company_specific, sector_wide, market_wide + + # Analysis + is_complete = Column(Boolean, default=False) + notes = Column(Text) + + created_at = Column(DateTime(timezone=True), server_default=func.now()) + updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now()) diff --git a/backend/app/models/signal.py b/backend/app/models/signal.py new file mode 100644 index 0000000..34a8785 --- /dev/null +++ b/backend/app/models/signal.py @@ -0,0 +1,46 @@ +"""Buy signal model.""" + +from sqlalchemy import Column, String, DateTime, Numeric, Integer, ForeignKey +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.sql import func +import uuid + +from app.core.database import Base + + +class BuySignal(Base): + """Buy signal table model.""" + + __tablename__ = "buy_signals" + + id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + stock_id = Column(UUID(as_uuid=True), ForeignKey("stocks.id", ondelete="CASCADE"), nullable=False, index=True) + panic_event_id = Column(UUID(as_uuid=True), ForeignKey("panic_events.id", ondelete="SET NULL")) + + # Signal details + signal_time = Column(DateTime(timezone=True), nullable=False, server_default=func.now(), index=True) + signal_price = Column(Numeric(15, 4), nullable=False) + + # Confidence scoring + confidence_score = Column(Numeric(5, 4), nullable=False, index=True) # 0 to 1 + + # Based on pattern + pattern_id = Column(UUID(as_uuid=True), ForeignKey("historical_patterns.id", ondelete="SET NULL")) + expected_recovery_percent = Column(Numeric(8, 4)) + expected_recovery_days = Column(Integer) + + # Current metrics + current_drawdown_percent = Column(Numeric(8, 4)) + current_sentiment_score = Column(Numeric(5, 2)) + + # Signal status + status = Column(String(20), default="active", index=True) # active, triggered, expired, cancelled + triggered_at = Column(DateTime(timezone=True)) + + # Outcome tracking + outcome_price = Column(Numeric(15, 4)) + outcome_percent = Column(Numeric(8, 4)) + outcome_days = Column(Integer) + + created_at = Column(DateTime(timezone=True), server_default=func.now()) + updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now()) diff --git a/backend/app/models/stock.py b/backend/app/models/stock.py new file mode 100644 index 0000000..535ff54 --- /dev/null +++ b/backend/app/models/stock.py @@ -0,0 +1,26 @@ +"""Stock model.""" + +from sqlalchemy import Column, String, BigInteger, Boolean, DateTime +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.sql import func +import uuid + +from app.core.database import Base + + +class Stock(Base): + """Stock table model.""" + + __tablename__ = "stocks" + + id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + symbol = Column(String(20), unique=True, nullable=False, index=True) + name = Column(String(255), nullable=False) + sector = Column(String(100), index=True) + industry = Column(String(100), index=True) + market_cap = Column(BigInteger) + exchange = Column(String(50)) + country = Column(String(100), default="USA") + is_active = Column(Boolean, default=True) + created_at = Column(DateTime(timezone=True), server_default=func.now()) + updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now()) diff --git a/backend/app/models/watchlist.py b/backend/app/models/watchlist.py new file mode 100644 index 0000000..adf560d --- /dev/null +++ b/backend/app/models/watchlist.py @@ -0,0 +1,30 @@ +"""Watchlist model.""" + +from sqlalchemy import Column, String, Text, Boolean, DateTime, Numeric, Integer, ForeignKey +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.sql import func +import uuid + +from app.core.database import Base + + +class Watchlist(Base): + """Watchlist table model.""" + + __tablename__ = "watchlist" + + id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + stock_id = Column(UUID(as_uuid=True), ForeignKey("stocks.id", ondelete="CASCADE"), nullable=False, unique=True) + + # Alert thresholds + panic_alert_threshold = Column(Numeric(5, 2), default=-50) + price_alert_low = Column(Numeric(15, 4)) + price_alert_high = Column(Numeric(15, 4)) + + # Preferences + priority = Column(Integer, default=1, index=True) # 1 = high, 2 = medium, 3 = low + notes = Column(Text) + + is_active = Column(Boolean, default=True) + created_at = Column(DateTime(timezone=True), server_default=func.now()) + updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now()) diff --git a/backend/app/schemas/__init__.py b/backend/app/schemas/__init__.py new file mode 100644 index 0000000..b4b9124 --- /dev/null +++ b/backend/app/schemas/__init__.py @@ -0,0 +1,13 @@ +"""Pydantic schemas.""" + +from app.schemas.stock import StockCreate, StockResponse, StockWithPrice +from app.schemas.news import NewsCreate, NewsResponse, NewsWithSentiment +from app.schemas.signal import SignalResponse, SignalWithDetails +from app.schemas.watchlist import WatchlistCreate, WatchlistResponse, WatchlistUpdate + +__all__ = [ + "StockCreate", "StockResponse", "StockWithPrice", + "NewsCreate", "NewsResponse", "NewsWithSentiment", + "SignalResponse", "SignalWithDetails", + "WatchlistCreate", "WatchlistResponse", "WatchlistUpdate", +] diff --git a/backend/app/schemas/news.py b/backend/app/schemas/news.py new file mode 100644 index 0000000..29abc9b --- /dev/null +++ b/backend/app/schemas/news.py @@ -0,0 +1,43 @@ +"""News schemas.""" + +from typing import Optional +from datetime import datetime +from uuid import UUID +from pydantic import BaseModel, Field + + +class NewsBase(BaseModel): + """Base news schema.""" + title: str + url: str + source: str + published_at: datetime + + +class NewsCreate(NewsBase): + """Schema for creating a news article.""" + content: Optional[str] = None + summary: Optional[str] = None + author: Optional[str] = None + image_url: Optional[str] = None + + +class NewsResponse(NewsBase): + """Schema for news response.""" + id: UUID + summary: Optional[str] = None + author: Optional[str] = None + fetched_at: datetime + + class Config: + from_attributes = True + + +class NewsWithSentiment(NewsResponse): + """News response with sentiment analysis.""" + content: Optional[str] = None + image_url: Optional[str] = None + sentiment_score: Optional[float] = None + sentiment_label: Optional[str] = None + sentiment_confidence: Optional[float] = None + is_processed: bool = False diff --git a/backend/app/schemas/signal.py b/backend/app/schemas/signal.py new file mode 100644 index 0000000..39c0515 --- /dev/null +++ b/backend/app/schemas/signal.py @@ -0,0 +1,39 @@ +"""Signal schemas.""" + +from typing import Optional +from datetime import datetime +from uuid import UUID +from pydantic import BaseModel + + +class SignalBase(BaseModel): + """Base signal schema.""" + stock_id: UUID + signal_price: float + confidence_score: float + + +class SignalResponse(SignalBase): + """Schema for signal response.""" + id: UUID + signal_time: datetime + status: str + current_drawdown_percent: Optional[float] = None + current_sentiment_score: Optional[float] = None + expected_recovery_percent: Optional[float] = None + expected_recovery_days: Optional[int] = None + created_at: datetime + + class Config: + from_attributes = True + + +class SignalWithDetails(SignalResponse): + """Signal response with full details.""" + panic_event_id: Optional[UUID] = None + pattern_id: Optional[UUID] = None + triggered_at: Optional[datetime] = None + outcome_price: Optional[float] = None + outcome_percent: Optional[float] = None + outcome_days: Optional[int] = None + updated_at: datetime diff --git a/backend/app/schemas/stock.py b/backend/app/schemas/stock.py new file mode 100644 index 0000000..1e5f246 --- /dev/null +++ b/backend/app/schemas/stock.py @@ -0,0 +1,41 @@ +"""Stock schemas.""" + +from typing import Optional +from datetime import datetime +from uuid import UUID +from pydantic import BaseModel, Field + + +class StockBase(BaseModel): + """Base stock schema.""" + symbol: str = Field(..., min_length=1, max_length=20) + name: str = Field(..., min_length=1, max_length=255) + sector: Optional[str] = None + industry: Optional[str] = None + exchange: Optional[str] = None + country: str = "USA" + + +class StockCreate(StockBase): + """Schema for creating a stock.""" + pass + + +class StockResponse(StockBase): + """Schema for stock response.""" + id: UUID + market_cap: Optional[int] = None + is_active: bool + created_at: datetime + updated_at: datetime + + class Config: + from_attributes = True + + +class StockWithPrice(StockResponse): + """Stock response with latest price data.""" + latest_price: Optional[float] = None + price_change_24h: Optional[float] = None + price_change_percent_24h: Optional[float] = None + volume_24h: Optional[int] = None diff --git a/backend/app/schemas/watchlist.py b/backend/app/schemas/watchlist.py new file mode 100644 index 0000000..3b12cf6 --- /dev/null +++ b/backend/app/schemas/watchlist.py @@ -0,0 +1,42 @@ +"""Watchlist schemas.""" + +from typing import Optional +from datetime import datetime +from uuid import UUID +from pydantic import BaseModel, Field + + +class WatchlistBase(BaseModel): + """Base watchlist schema.""" + panic_alert_threshold: float = -50.0 + price_alert_low: Optional[float] = None + price_alert_high: Optional[float] = None + priority: int = Field(1, ge=1, le=3) + notes: Optional[str] = None + + +class WatchlistCreate(WatchlistBase): + """Schema for creating a watchlist item.""" + symbol: str + + +class WatchlistUpdate(BaseModel): + """Schema for updating a watchlist item.""" + panic_alert_threshold: Optional[float] = None + price_alert_low: Optional[float] = None + price_alert_high: Optional[float] = None + priority: Optional[int] = Field(None, ge=1, le=3) + notes: Optional[str] = None + is_active: Optional[bool] = None + + +class WatchlistResponse(WatchlistBase): + """Schema for watchlist response.""" + id: UUID + stock_id: UUID + is_active: bool + created_at: datetime + updated_at: datetime + + class Config: + from_attributes = True diff --git a/backend/app/workers/__init__.py b/backend/app/workers/__init__.py new file mode 100644 index 0000000..49af1b0 --- /dev/null +++ b/backend/app/workers/__init__.py @@ -0,0 +1,5 @@ +"""Celery workers module.""" + +from app.workers.celery_app import celery_app + +__all__ = ["celery_app"] diff --git a/backend/app/workers/celery_app.py b/backend/app/workers/celery_app.py new file mode 100644 index 0000000..80109e7 --- /dev/null +++ b/backend/app/workers/celery_app.py @@ -0,0 +1,69 @@ +""" +Celery Application Configuration +""" + +from celery import Celery +from celery.schedules import crontab + +from app.core.config import settings + +# Create Celery app +celery_app = Celery( + "marketscanner", + broker=settings.RABBITMQ_URL, + backend=settings.REDIS_URL, + include=[ + "app.workers.tasks.news_tasks", + "app.workers.tasks.stock_tasks", + "app.workers.tasks.sentiment_tasks", + "app.workers.tasks.pattern_tasks", + "app.workers.tasks.alert_tasks", + ], +) + +# Celery configuration +celery_app.conf.update( + task_serializer="json", + accept_content=["json"], + result_serializer="json", + timezone="UTC", + enable_utc=True, + task_track_started=True, + task_time_limit=300, # 5 minutes + worker_prefetch_multiplier=1, + worker_concurrency=4, +) + +# Beat schedule (periodic tasks) +celery_app.conf.beat_schedule = { + # Fetch news every 5 minutes + "fetch-news-every-5-minutes": { + "task": "app.workers.tasks.news_tasks.fetch_all_news", + "schedule": settings.NEWS_SCAN_INTERVAL, + }, + # Update stock prices every minute + "update-prices-every-minute": { + "task": "app.workers.tasks.stock_tasks.update_stock_prices", + "schedule": settings.STOCK_PRICE_INTERVAL, + }, + # Process unanalyzed news every 2 minutes + "analyze-sentiment-every-2-minutes": { + "task": "app.workers.tasks.sentiment_tasks.process_unanalyzed_news", + "schedule": 120, + }, + # Detect panic events every 5 minutes + "detect-panic-every-5-minutes": { + "task": "app.workers.tasks.pattern_tasks.detect_panic_events", + "schedule": 300, + }, + # Generate signals every 10 minutes + "generate-signals-every-10-minutes": { + "task": "app.workers.tasks.pattern_tasks.generate_buy_signals", + "schedule": 600, + }, + # Clean old data daily at midnight + "cleanup-daily": { + "task": "app.workers.tasks.news_tasks.cleanup_old_news", + "schedule": crontab(hour=0, minute=0), + }, +} diff --git a/backend/app/workers/tasks/__init__.py b/backend/app/workers/tasks/__init__.py new file mode 100644 index 0000000..231b317 --- /dev/null +++ b/backend/app/workers/tasks/__init__.py @@ -0,0 +1 @@ +"""Worker tasks module.""" diff --git a/backend/app/workers/tasks/alert_tasks.py b/backend/app/workers/tasks/alert_tasks.py new file mode 100644 index 0000000..4331249 --- /dev/null +++ b/backend/app/workers/tasks/alert_tasks.py @@ -0,0 +1,153 @@ +""" +Alert notification tasks +""" + +import httpx +import structlog + +from app.workers.celery_app import celery_app +from app.core.config import settings + +logger = structlog.get_logger() + + +@celery_app.task(name="app.workers.tasks.alert_tasks.send_telegram_alert") +def send_telegram_alert(message: str): + """Send alert via Telegram.""" + if not settings.TELEGRAM_BOT_TOKEN or not settings.TELEGRAM_CHAT_ID: + logger.warning("Telegram not configured") + return {"sent": False, "reason": "not_configured"} + + try: + url = f"https://api.telegram.org/bot{settings.TELEGRAM_BOT_TOKEN}/sendMessage" + payload = { + "chat_id": settings.TELEGRAM_CHAT_ID, + "text": message, + "parse_mode": "Markdown", + } + + with httpx.Client() as client: + response = client.post(url, json=payload) + response.raise_for_status() + + logger.info("Telegram alert sent") + return {"sent": True} + + except Exception as e: + logger.error("Failed to send Telegram alert", error=str(e)) + return {"sent": False, "error": str(e)} + + +@celery_app.task(name="app.workers.tasks.alert_tasks.send_discord_alert") +def send_discord_alert(message: str, embed: dict = None): + """Send alert via Discord webhook.""" + if not settings.DISCORD_WEBHOOK_URL: + logger.warning("Discord not configured") + return {"sent": False, "reason": "not_configured"} + + try: + payload = {"content": message} + if embed: + payload["embeds"] = [embed] + + with httpx.Client() as client: + response = client.post(settings.DISCORD_WEBHOOK_URL, json=payload) + response.raise_for_status() + + logger.info("Discord alert sent") + return {"sent": True} + + except Exception as e: + logger.error("Failed to send Discord alert", error=str(e)) + return {"sent": False, "error": str(e)} + + +@celery_app.task(name="app.workers.tasks.alert_tasks.send_buy_signal_alert") +def send_buy_signal_alert(signal_data: dict): + """Send formatted buy signal alert to all configured channels.""" + logger.info("Sending buy signal alert", symbol=signal_data.get("symbol")) + + # Format message + symbol = signal_data.get("symbol", "UNKNOWN") + confidence = signal_data.get("confidence", 0) * 100 + current_price = signal_data.get("price", 0) + drawdown = signal_data.get("drawdown", 0) + expected_recovery = signal_data.get("expected_recovery", 0) + + message = f""" +🚨 *BUY SIGNAL: ${symbol}* 🚨 + +πŸ“Š *Confidence:* {confidence:.1f}% +πŸ’° *Current Price:* ${current_price:.2f} +πŸ“‰ *Drawdown:* {drawdown:.1f}% +πŸ“ˆ *Expected Recovery:* {expected_recovery:.1f}% + +_"Buy when there's blood in the streets"_ + """.strip() + + results = { + "telegram": None, + "discord": None, + } + + # Send to Telegram + if settings.TELEGRAM_BOT_TOKEN: + results["telegram"] = send_telegram_alert.delay(message).get() + + # Send to Discord with embed + if settings.DISCORD_WEBHOOK_URL: + embed = { + "title": f"🚨 BUY SIGNAL: ${symbol}", + "color": 0x00ff00, # Green + "fields": [ + {"name": "Confidence", "value": f"{confidence:.1f}%", "inline": True}, + {"name": "Price", "value": f"${current_price:.2f}", "inline": True}, + {"name": "Drawdown", "value": f"{drawdown:.1f}%", "inline": True}, + {"name": "Expected Recovery", "value": f"{expected_recovery:.1f}%", "inline": True}, + ], + "footer": {"text": "MarketScanner β€’ Buy the Fear"}, + } + results["discord"] = send_discord_alert.delay("", embed).get() + + return results + + +@celery_app.task(name="app.workers.tasks.alert_tasks.send_panic_alert") +def send_panic_alert(panic_data: dict): + """Send formatted panic detection alert.""" + logger.info("Sending panic alert", symbol=panic_data.get("symbol")) + + symbol = panic_data.get("symbol", "UNKNOWN") + sentiment = panic_data.get("sentiment", 0) + price_drop = panic_data.get("price_drop", 0) + news_count = panic_data.get("news_count", 0) + + message = f""" +πŸ”΄ *PANIC DETECTED: ${symbol}* πŸ”΄ + +😱 *Sentiment Score:* {sentiment:.1f} +πŸ“‰ *Price Drop:* {price_drop:.1f}% +πŸ“° *News Volume:* {news_count} articles + +⏳ Monitoring for buying opportunity... + """.strip() + + results = {} + + if settings.TELEGRAM_BOT_TOKEN: + results["telegram"] = send_telegram_alert.delay(message).get() + + if settings.DISCORD_WEBHOOK_URL: + embed = { + "title": f"πŸ”΄ PANIC DETECTED: ${symbol}", + "color": 0xff0000, # Red + "fields": [ + {"name": "Sentiment", "value": f"{sentiment:.1f}", "inline": True}, + {"name": "Price Drop", "value": f"{price_drop:.1f}%", "inline": True}, + {"name": "News Volume", "value": f"{news_count} articles", "inline": True}, + ], + "footer": {"text": "MarketScanner β€’ Watching for opportunity"}, + } + results["discord"] = send_discord_alert.delay("", embed).get() + + return results diff --git a/backend/app/workers/tasks/news_tasks.py b/backend/app/workers/tasks/news_tasks.py new file mode 100644 index 0000000..40b1f58 --- /dev/null +++ b/backend/app/workers/tasks/news_tasks.py @@ -0,0 +1,108 @@ +""" +News fetching tasks +""" + +from datetime import datetime, timedelta +import feedparser +import structlog + +from app.workers.celery_app import celery_app +from app.core.database import get_sync_db +from app.core.config import settings + +logger = structlog.get_logger() + +# RSS Feeds to monitor +NEWS_FEEDS = [ + # General Financial News + {"name": "Yahoo Finance", "url": "https://finance.yahoo.com/news/rssindex"}, + {"name": "Reuters Business", "url": "https://www.reutersagency.com/feed/?best-topics=business-finance&post_type=best"}, + {"name": "CNBC", "url": "https://www.cnbc.com/id/100003114/device/rss/rss.html"}, + {"name": "MarketWatch", "url": "https://feeds.marketwatch.com/marketwatch/topstories/"}, + {"name": "Seeking Alpha", "url": "https://seekingalpha.com/market_currents.xml"}, + {"name": "Bloomberg", "url": "https://www.bloomberg.com/feed/podcast/etf-report.xml"}, + + # Tech + {"name": "TechCrunch", "url": "https://techcrunch.com/feed/"}, + + # Crypto (because why not) + {"name": "CoinDesk", "url": "https://www.coindesk.com/arc/outboundfeeds/rss/"}, +] + + +@celery_app.task(name="app.workers.tasks.news_tasks.fetch_all_news") +def fetch_all_news(): + """Fetch news from all configured sources.""" + logger.info("Starting news fetch from all sources") + total_fetched = 0 + + for feed_config in NEWS_FEEDS: + try: + count = fetch_from_feed(feed_config["name"], feed_config["url"]) + total_fetched += count + except Exception as e: + logger.error( + "Failed to fetch from feed", + feed=feed_config["name"], + error=str(e) + ) + + logger.info("News fetch complete", total_articles=total_fetched) + return {"fetched": total_fetched} + + +@celery_app.task(name="app.workers.tasks.news_tasks.fetch_from_feed") +def fetch_from_feed(source_name: str, feed_url: str) -> int: + """Fetch news from a single RSS feed.""" + logger.info("Fetching from feed", source=source_name) + + try: + feed = feedparser.parse(feed_url) + articles_saved = 0 + + for entry in feed.entries[:50]: # Limit to 50 most recent + try: + # Extract data + title = entry.get("title", "") + url = entry.get("link", "") + summary = entry.get("summary", "") + author = entry.get("author", "") + + # Parse published date + published = entry.get("published_parsed") or entry.get("updated_parsed") + if published: + published_at = datetime(*published[:6]) + else: + published_at = datetime.utcnow() + + # Save to database (skip if exists) + # This is a placeholder - actual implementation would use the db session + articles_saved += 1 + + except Exception as e: + logger.warning( + "Failed to process article", + title=entry.get("title", "unknown"), + error=str(e) + ) + + logger.info("Feed processed", source=source_name, articles=articles_saved) + return articles_saved + + except Exception as e: + logger.error("Failed to parse feed", source=source_name, error=str(e)) + return 0 + + +@celery_app.task(name="app.workers.tasks.news_tasks.cleanup_old_news") +def cleanup_old_news(days: int = 90): + """Remove news articles older than specified days.""" + logger.info("Starting news cleanup", days_to_keep=days) + + cutoff = datetime.utcnow() - timedelta(days=days) + + # Placeholder - actual implementation would delete from database + deleted_count = 0 + + logger.info("News cleanup complete", deleted=deleted_count) + return {"deleted": deleted_count} diff --git a/backend/app/workers/tasks/pattern_tasks.py b/backend/app/workers/tasks/pattern_tasks.py new file mode 100644 index 0000000..9e5a67d --- /dev/null +++ b/backend/app/workers/tasks/pattern_tasks.py @@ -0,0 +1,142 @@ +""" +Pattern detection and buy signal generation tasks +""" + +from datetime import datetime, timedelta +import structlog + +from app.workers.celery_app import celery_app +from app.core.config import settings + +logger = structlog.get_logger() + + +@celery_app.task(name="app.workers.tasks.pattern_tasks.detect_panic_events") +def detect_panic_events(): + """Detect new panic events based on sentiment and price drops.""" + logger.info("Starting panic event detection") + + # Detection criteria: + # 1. Sentiment score drops below threshold + # 2. Price drops significantly (>5% in 24h) + # 3. News volume spikes + + # Placeholder - actual implementation would: + # - Query recent news sentiment by stock + # - Check price movements + # - Create panic_events records + + detected_count = 0 + + logger.info("Panic detection complete", detected=detected_count) + return {"detected": detected_count} + + +@celery_app.task(name="app.workers.tasks.pattern_tasks.generate_buy_signals") +def generate_buy_signals(): + """Generate buy signals based on historical patterns.""" + logger.info("Starting buy signal generation") + + # Signal generation criteria: + # 1. Active panic event exists + # 2. Similar historical events had good recovery + # 3. Price is near or past typical bottom + # 4. Volume indicates capitulation + + # Placeholder - actual implementation would: + # - Find stocks with active panic events + # - Match against historical patterns + # - Calculate confidence scores + # - Create buy_signals records + + signals_count = 0 + + logger.info("Signal generation complete", signals=signals_count) + return {"generated": signals_count} + + +@celery_app.task(name="app.workers.tasks.pattern_tasks.analyze_historical_pattern") +def analyze_historical_pattern(stock_id: str, event_type: str): + """Analyze historical patterns for a specific stock and event type.""" + logger.info("Analyzing historical pattern", stock_id=stock_id, event_type=event_type) + + # Would query past panic events for this stock + # Calculate statistics: + # - Average/median drawdown + # - Average/median recovery time + # - Average/median recovery percentage + # - Success rate (how often did it recover) + + return { + "stock_id": stock_id, + "event_type": event_type, + "pattern": None, # Would contain pattern data + } + + +@celery_app.task(name="app.workers.tasks.pattern_tasks.calculate_confidence_score") +def calculate_confidence_score( + stock_id: str, + current_drawdown: float, + current_sentiment: float, + historical_pattern: dict, +) -> float: + """Calculate confidence score for a potential buy signal.""" + + # Factors: + # 1. How close is current drawdown to historical average + # 2. How negative is sentiment (capitulation indicator) + # 3. Pattern reliability (sample size, consistency) + # 4. Market conditions (sector performance, overall market) + + score = 0.5 # Base score + + # Adjust based on drawdown match + if historical_pattern and historical_pattern.get("avg_drawdown"): + avg_drawdown = historical_pattern["avg_drawdown"] + drawdown_ratio = current_drawdown / avg_drawdown + if 0.8 <= drawdown_ratio <= 1.2: + score += 0.2 # Close to historical average + + # Adjust based on sentiment (more panic = higher score) + if current_sentiment < settings.PANIC_THRESHOLD: + panic_intensity = abs(current_sentiment - settings.PANIC_THRESHOLD) / 50 + score += min(panic_intensity * 0.2, 0.2) + + # Adjust based on pattern reliability + if historical_pattern and historical_pattern.get("event_count", 0) >= 3: + score += 0.1 # Multiple historical examples + + return min(max(score, 0), 1) # Clamp to 0-1 + + +@celery_app.task(name="app.workers.tasks.pattern_tasks.update_panic_event_status") +def update_panic_event_status(): + """Update panic events - check if they've ended/recovered.""" + logger.info("Updating panic event statuses") + + # Check active (incomplete) panic events + # Mark as complete if: + # - Price has recovered to pre-panic levels + # - Sentiment has normalized + # - Enough time has passed + + updated_count = 0 + + logger.info("Panic status update complete", updated=updated_count) + return {"updated": updated_count} + + +@celery_app.task(name="app.workers.tasks.pattern_tasks.rebuild_patterns") +def rebuild_patterns(stock_id: str = None): + """Rebuild historical patterns from panic events.""" + logger.info("Rebuilding patterns", stock_id=stock_id or "all") + + # Aggregate all completed panic events + # Group by stock and event type + # Calculate pattern statistics + + patterns_count = 0 + + logger.info("Pattern rebuild complete", patterns=patterns_count) + return {"rebuilt": patterns_count} diff --git a/backend/app/workers/tasks/sentiment_tasks.py b/backend/app/workers/tasks/sentiment_tasks.py new file mode 100644 index 0000000..bd8c3ce --- /dev/null +++ b/backend/app/workers/tasks/sentiment_tasks.py @@ -0,0 +1,137 @@ +""" +Sentiment analysis tasks +""" + +from typing import Optional +import structlog +from openai import OpenAI + +from app.workers.celery_app import celery_app +from app.core.config import settings + +logger = structlog.get_logger() + + +def get_openai_client() -> Optional[OpenAI]: + """Get OpenAI client if configured.""" + if settings.OPENAI_API_KEY: + return OpenAI(api_key=settings.OPENAI_API_KEY) + return None + + +@celery_app.task(name="app.workers.tasks.sentiment_tasks.process_unanalyzed_news") +def process_unanalyzed_news(): + """Process all news articles that haven't been sentiment analyzed.""" + logger.info("Starting sentiment analysis batch") + + # Placeholder - would query database for unprocessed articles + processed_count = 0 + + logger.info("Sentiment analysis complete", processed=processed_count) + return {"processed": processed_count} + + +@celery_app.task(name="app.workers.tasks.sentiment_tasks.analyze_sentiment") +def analyze_sentiment(article_id: str, title: str, content: str): + """Analyze sentiment of a single article using OpenAI.""" + logger.info("Analyzing sentiment", article_id=article_id) + + client = get_openai_client() + if not client: + logger.warning("OpenAI not configured, using fallback") + return fallback_sentiment_analysis(title, content) + + try: + # Prepare text (limit length) + text = f"Title: {title}\n\nContent: {content[:2000]}" + + response = client.chat.completions.create( + model=settings.OPENAI_MODEL, + messages=[ + { + "role": "system", + "content": """You are a financial sentiment analyzer. Analyze the given news article and respond with a JSON object containing: + - score: a number from -100 (extremely negative/panic) to +100 (extremely positive/euphoric) + - label: one of "negative", "neutral", or "positive" + - confidence: a number from 0 to 1 indicating confidence in the analysis + - stocks: list of stock symbols mentioned (if any) + - summary: one-sentence summary of the sentiment + + Focus on: + - Financial impact + - Market reaction implications + - Panic/fear indicators + - Earnings/guidance implications + """ + }, + { + "role": "user", + "content": text + } + ], + response_format={"type": "json_object"}, + temperature=0.3, + ) + + result = response.choices[0].message.content + logger.info("Sentiment analyzed", article_id=article_id, result=result) + return result + + except Exception as e: + logger.error("Sentiment analysis failed", article_id=article_id, error=str(e)) + return fallback_sentiment_analysis(title, content) + + +def fallback_sentiment_analysis(title: str, content: str) -> dict: + """Simple keyword-based sentiment analysis as fallback.""" + text = f"{title} {content}".lower() + + negative_words = [ + "crash", "plunge", "collapse", "scandal", "fraud", "lawsuit", + "investigation", "bankruptcy", "layoffs", "miss", "decline", + "warning", "downgrade", "sell", "bear", "crisis", "fear", + "panic", "loss", "debt", "default", "recession" + ] + + positive_words = [ + "surge", "rally", "growth", "profit", "beat", "upgrade", + "buy", "bull", "record", "breakout", "opportunity", + "dividend", "expansion", "innovation", "deal", "acquisition" + ] + + neg_count = sum(1 for word in negative_words if word in text) + pos_count = sum(1 for word in positive_words if word in text) + + total = neg_count + pos_count + if total == 0: + score = 0 + label = "neutral" + else: + score = ((pos_count - neg_count) / total) * 100 + if score < -20: + label = "negative" + elif score > 20: + label = "positive" + else: + label = "neutral" + + return { + "score": round(score, 2), + "label": label, + "confidence": min(0.3 + (total * 0.1), 0.7), # Low confidence for fallback + "stocks": [], + "summary": "Analyzed using keyword matching (fallback)", + } + + +@celery_app.task(name="app.workers.tasks.sentiment_tasks.batch_analyze") +def batch_analyze(article_ids: list): + """Analyze multiple articles in batch.""" + logger.info("Starting batch analysis", count=len(article_ids)) + + results = [] + for article_id in article_ids: + # Would fetch article from database and analyze + results.append({"article_id": article_id, "status": "pending"}) + + return {"analyzed": len(results), "results": results} diff --git a/backend/app/workers/tasks/stock_tasks.py b/backend/app/workers/tasks/stock_tasks.py new file mode 100644 index 0000000..17381c5 --- /dev/null +++ b/backend/app/workers/tasks/stock_tasks.py @@ -0,0 +1,102 @@ +""" +Stock data fetching tasks +""" + +import yfinance as yf +import structlog + +from app.workers.celery_app import celery_app +from app.core.config import settings + +logger = structlog.get_logger() + + +@celery_app.task(name="app.workers.tasks.stock_tasks.update_stock_prices") +def update_stock_prices(): + """Update prices for all tracked stocks.""" + logger.info("Starting stock price update") + + # Placeholder - would get active stocks from database + # For now, just demonstrate the concept + + updated_count = 0 + + logger.info("Stock prices updated", count=updated_count) + return {"updated": updated_count} + + +@celery_app.task(name="app.workers.tasks.stock_tasks.fetch_stock_price") +def fetch_stock_price(symbol: str): + """Fetch current price for a single stock.""" + logger.info("Fetching price", symbol=symbol) + + try: + ticker = yf.Ticker(symbol) + info = ticker.info + + return { + "symbol": symbol, + "price": info.get("currentPrice") or info.get("regularMarketPrice"), + "previous_close": info.get("previousClose"), + "volume": info.get("volume"), + "market_cap": info.get("marketCap"), + } + except Exception as e: + logger.error("Failed to fetch price", symbol=symbol, error=str(e)) + return None + + +@celery_app.task(name="app.workers.tasks.stock_tasks.fetch_historical_data") +def fetch_historical_data(symbol: str, period: str = "10y"): + """Fetch historical price data for a stock.""" + logger.info("Fetching historical data", symbol=symbol, period=period) + + try: + ticker = yf.Ticker(symbol) + hist = ticker.history(period=period) + + # Convert to list of dicts for storage + records = [] + for idx, row in hist.iterrows(): + records.append({ + "time": idx.isoformat(), + "open": row["Open"], + "high": row["High"], + "low": row["Low"], + "close": row["Close"], + "volume": row["Volume"], + }) + + logger.info( + "Historical data fetched", + symbol=symbol, + records=len(records) + ) + return {"symbol": symbol, "records": len(records)} + + except Exception as e: + logger.error("Failed to fetch historical data", symbol=symbol, error=str(e)) + return None + + +@celery_app.task(name="app.workers.tasks.stock_tasks.update_stock_info") +def update_stock_info(symbol: str): + """Update stock metadata (sector, industry, market cap, etc.).""" + logger.info("Updating stock info", symbol=symbol) + + try: + ticker = yf.Ticker(symbol) + info = ticker.info + + return { + "symbol": symbol, + "name": info.get("longName") or info.get("shortName"), + "sector": info.get("sector"), + "industry": info.get("industry"), + "market_cap": info.get("marketCap"), + "exchange": info.get("exchange"), + "country": info.get("country"), + } + except Exception as e: + logger.error("Failed to update stock info", symbol=symbol, error=str(e)) + return None diff --git a/backend/requirements.txt b/backend/requirements.txt new file mode 100644 index 0000000..0627a69 --- /dev/null +++ b/backend/requirements.txt @@ -0,0 +1,70 @@ +# FastAPI & Web +fastapi==0.109.2 +uvicorn[standard]==0.27.1 +python-multipart==0.0.9 +python-jose[cryptography]==3.3.0 +passlib[bcrypt]==1.7.4 +httpx==0.26.0 +aiohttp==3.9.3 +websockets==12.0 + +# Database +sqlalchemy==2.0.25 +asyncpg==0.29.0 +psycopg2-binary==2.9.9 +alembic==1.13.1 + +# Redis & Caching +redis==5.0.1 +aioredis==2.0.1 + +# Celery & Task Queue +celery==5.3.6 +flower==2.0.1 + +# Data Processing +pandas==2.2.0 +numpy==1.26.3 +scipy==1.12.0 + +# Stock Data +yfinance==0.2.36 +alpha-vantage==2.3.1 +finnhub-python==2.4.19 + +# News & Web Scraping +feedparser==6.0.11 +newspaper3k==0.2.8 +beautifulsoup4==4.12.3 +lxml==5.1.0 + +# NLP & Sentiment +openai==1.12.0 +tiktoken==0.5.2 +nltk==3.8.1 +textblob==0.17.1 +transformers==4.37.2 +torch==2.2.0 + +# Validation & Serialization +pydantic==2.6.1 +pydantic-settings==2.1.0 + +# Utilities +python-dotenv==1.0.1 +structlog==24.1.0 +tenacity==8.2.3 +python-dateutil==2.8.2 +pytz==2024.1 + +# Testing +pytest==8.0.0 +pytest-asyncio==0.23.4 +pytest-cov==4.1.0 +httpx==0.26.0 + +# Development +black==24.1.1 +isort==5.13.2 +flake8==7.0.0 +mypy==1.8.0 diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..44ed1df --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,222 @@ +version: '3.9' + +services: + # ============================================================================= + # DATABASE - TimescaleDB (PostgreSQL with time-series superpowers) + # ============================================================================= + db: + image: timescale/timescaledb:latest-pg16 + container_name: marketscanner-db + restart: unless-stopped + environment: + POSTGRES_DB: ${POSTGRES_DB:-marketscanner} + POSTGRES_USER: ${POSTGRES_USER:-marketscanner} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:?POSTGRES_PASSWORD is required} + volumes: + - postgres_data:/var/lib/postgresql/data + - ./docker/db/init.sql:/docker-entrypoint-initdb.d/init.sql:ro + ports: + - "5432:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-marketscanner}"] + interval: 10s + timeout: 5s + retries: 5 + networks: + - marketscanner-network + + # ============================================================================= + # CACHE - Redis + # ============================================================================= + redis: + image: redis:7-alpine + container_name: marketscanner-redis + restart: unless-stopped + command: redis-server --requirepass ${REDIS_PASSWORD:-changeme} + volumes: + - redis_data:/data + ports: + - "6379:6379" + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 10s + timeout: 5s + retries: 5 + networks: + - marketscanner-network + + # ============================================================================= + # MESSAGE QUEUE - RabbitMQ + # ============================================================================= + rabbitmq: + image: rabbitmq:3-management-alpine + container_name: marketscanner-rabbitmq + restart: unless-stopped + environment: + RABBITMQ_DEFAULT_USER: ${RABBITMQ_USER:-marketscanner} + RABBITMQ_DEFAULT_PASS: ${RABBITMQ_PASSWORD:-changeme} + volumes: + - rabbitmq_data:/var/lib/rabbitmq + ports: + - "5672:5672" + - "15672:15672" # Management UI + healthcheck: + test: ["CMD", "rabbitmq-diagnostics", "check_running"] + interval: 30s + timeout: 10s + retries: 5 + networks: + - marketscanner-network + + # ============================================================================= + # BACKEND - FastAPI + # ============================================================================= + backend: + build: + context: ./backend + dockerfile: Dockerfile + container_name: marketscanner-backend + restart: unless-stopped + env_file: + - .env + environment: + - POSTGRES_HOST=db + - REDIS_HOST=redis + - RABBITMQ_HOST=rabbitmq + volumes: + - ./backend:/app + - backend_logs:/app/logs + ports: + - "${BACKEND_PORT:-8000}:8000" + depends_on: + db: + condition: service_healthy + redis: + condition: service_healthy + rabbitmq: + condition: service_healthy + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8000/health"] + interval: 30s + timeout: 10s + retries: 3 + networks: + - marketscanner-network + + # ============================================================================= + # CELERY WORKER - Background Tasks + # ============================================================================= + celery-worker: + build: + context: ./backend + dockerfile: Dockerfile + container_name: marketscanner-celery-worker + restart: unless-stopped + command: celery -A app.workers.celery_app worker --loglevel=info --concurrency=4 + env_file: + - .env + environment: + - POSTGRES_HOST=db + - REDIS_HOST=redis + - RABBITMQ_HOST=rabbitmq + volumes: + - ./backend:/app + - backend_logs:/app/logs + depends_on: + db: + condition: service_healthy + redis: + condition: service_healthy + rabbitmq: + condition: service_healthy + backend: + condition: service_healthy + networks: + - marketscanner-network + + # ============================================================================= + # CELERY BEAT - Scheduled Tasks + # ============================================================================= + celery-beat: + build: + context: ./backend + dockerfile: Dockerfile + container_name: marketscanner-celery-beat + restart: unless-stopped + command: celery -A app.workers.celery_app beat --loglevel=info + env_file: + - .env + environment: + - POSTGRES_HOST=db + - REDIS_HOST=redis + - RABBITMQ_HOST=rabbitmq + volumes: + - ./backend:/app + - backend_logs:/app/logs + depends_on: + db: + condition: service_healthy + redis: + condition: service_healthy + rabbitmq: + condition: service_healthy + backend: + condition: service_healthy + networks: + - marketscanner-network + + # ============================================================================= + # FRONTEND - React Dashboard + # ============================================================================= + frontend: + build: + context: ./frontend + dockerfile: Dockerfile + args: + - VITE_API_URL=${VITE_API_URL:-http://localhost:8000} + container_name: marketscanner-frontend + restart: unless-stopped + ports: + - "${FRONTEND_PORT:-3000}:80" + depends_on: + - backend + networks: + - marketscanner-network + + # ============================================================================= + # OPTIONAL: Ollama for local LLM (if USE_LOCAL_LLM=true) + # ============================================================================= + # ollama: + # image: ollama/ollama:latest + # container_name: marketscanner-ollama + # restart: unless-stopped + # volumes: + # - ollama_data:/root/.ollama + # ports: + # - "11434:11434" + # deploy: + # resources: + # reservations: + # devices: + # - driver: nvidia + # count: all + # capabilities: [gpu] + # networks: + # - marketscanner-network + +# ============================================================================= +# NETWORKS +# ============================================================================= +networks: + marketscanner-network: + driver: bridge + +# ============================================================================= +# VOLUMES +# ============================================================================= +volumes: + postgres_data: + redis_data: + rabbitmq_data: + backend_logs: + # ollama_data: diff --git a/docker/db/init.sql b/docker/db/init.sql new file mode 100644 index 0000000..4eef985 --- /dev/null +++ b/docker/db/init.sql @@ -0,0 +1,343 @@ +-- MarketScanner Database Initialization +-- TimescaleDB + PostgreSQL + +-- Enable TimescaleDB extension +CREATE EXTENSION IF NOT EXISTS timescaledb; + +-- Enable UUID extension +CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; + +-- Enable pg_trgm for text search +CREATE EXTENSION IF NOT EXISTS pg_trgm; + +-- ============================================================================= +-- STOCKS TABLE +-- ============================================================================= +CREATE TABLE IF NOT EXISTS stocks ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + symbol VARCHAR(20) NOT NULL UNIQUE, + name VARCHAR(255) NOT NULL, + sector VARCHAR(100), + industry VARCHAR(100), + market_cap BIGINT, + exchange VARCHAR(50), + country VARCHAR(100) DEFAULT 'USA', + is_active BOOLEAN DEFAULT true, + created_at TIMESTAMPTZ DEFAULT NOW(), + updated_at TIMESTAMPTZ DEFAULT NOW() +); + +CREATE INDEX idx_stocks_symbol ON stocks(symbol); +CREATE INDEX idx_stocks_sector ON stocks(sector); +CREATE INDEX idx_stocks_industry ON stocks(industry); + +-- ============================================================================= +-- STOCK PRICES (Time-series) +-- ============================================================================= +CREATE TABLE IF NOT EXISTS stock_prices ( + time TIMESTAMPTZ NOT NULL, + stock_id UUID NOT NULL REFERENCES stocks(id) ON DELETE CASCADE, + open DECIMAL(15, 4), + high DECIMAL(15, 4), + low DECIMAL(15, 4), + close DECIMAL(15, 4) NOT NULL, + volume BIGINT, + adjusted_close DECIMAL(15, 4), + PRIMARY KEY (time, stock_id) +); + +-- Convert to hypertable for time-series optimization +SELECT create_hypertable('stock_prices', 'time', if_not_exists => TRUE); + +-- Create indexes +CREATE INDEX idx_stock_prices_stock_id ON stock_prices(stock_id, time DESC); + +-- ============================================================================= +-- NEWS ARTICLES +-- ============================================================================= +CREATE TABLE IF NOT EXISTS news_articles ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + title TEXT NOT NULL, + content TEXT, + summary TEXT, + url TEXT UNIQUE NOT NULL, + source VARCHAR(100) NOT NULL, + author VARCHAR(255), + published_at TIMESTAMPTZ NOT NULL, + fetched_at TIMESTAMPTZ DEFAULT NOW(), + image_url TEXT, + + -- Sentiment analysis results + sentiment_score DECIMAL(5, 2), -- -100 to +100 + sentiment_label VARCHAR(20), -- negative, neutral, positive + sentiment_confidence DECIMAL(5, 4), + + -- Processing status + is_processed BOOLEAN DEFAULT false, + processing_error TEXT +); + +CREATE INDEX idx_news_published_at ON news_articles(published_at DESC); +CREATE INDEX idx_news_source ON news_articles(source); +CREATE INDEX idx_news_sentiment ON news_articles(sentiment_score); +CREATE INDEX idx_news_title_trgm ON news_articles USING gin(title gin_trgm_ops); + +-- ============================================================================= +-- NEWS-STOCK ASSOCIATIONS +-- ============================================================================= +CREATE TABLE IF NOT EXISTS news_stock_mentions ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + news_id UUID NOT NULL REFERENCES news_articles(id) ON DELETE CASCADE, + stock_id UUID NOT NULL REFERENCES stocks(id) ON DELETE CASCADE, + mention_type VARCHAR(50), -- direct, indirect, sector + relevance_score DECIMAL(5, 4), + created_at TIMESTAMPTZ DEFAULT NOW(), + UNIQUE(news_id, stock_id) +); + +CREATE INDEX idx_mentions_news ON news_stock_mentions(news_id); +CREATE INDEX idx_mentions_stock ON news_stock_mentions(stock_id); + +-- ============================================================================= +-- PANIC EVENTS +-- ============================================================================= +CREATE TABLE IF NOT EXISTS panic_events ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + stock_id UUID NOT NULL REFERENCES stocks(id) ON DELETE CASCADE, + + -- Event timing + start_time TIMESTAMPTZ NOT NULL, + peak_time TIMESTAMPTZ, + end_time TIMESTAMPTZ, + + -- Price impact + price_at_start DECIMAL(15, 4) NOT NULL, + price_at_peak_panic DECIMAL(15, 4), + price_at_end DECIMAL(15, 4), + max_drawdown_percent DECIMAL(8, 4), + + -- Sentiment + avg_sentiment_score DECIMAL(5, 2), + min_sentiment_score DECIMAL(5, 2), + news_volume INTEGER, + + -- Recovery metrics + recovery_time_days INTEGER, + recovery_percent DECIMAL(8, 4), + + -- Classification + event_type VARCHAR(100), -- earnings_miss, scandal, lawsuit, macro, etc. + event_category VARCHAR(50), -- company_specific, sector_wide, market_wide + + -- Analysis + is_complete BOOLEAN DEFAULT false, + notes TEXT, + created_at TIMESTAMPTZ DEFAULT NOW(), + updated_at TIMESTAMPTZ DEFAULT NOW() +); + +CREATE INDEX idx_panic_stock ON panic_events(stock_id); +CREATE INDEX idx_panic_time ON panic_events(start_time DESC); +CREATE INDEX idx_panic_type ON panic_events(event_type); +CREATE INDEX idx_panic_drawdown ON panic_events(max_drawdown_percent); + +-- ============================================================================= +-- HISTORICAL PATTERNS +-- ============================================================================= +CREATE TABLE IF NOT EXISTS historical_patterns ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + stock_id UUID NOT NULL REFERENCES stocks(id) ON DELETE CASCADE, + + -- Pattern definition + pattern_type VARCHAR(100) NOT NULL, -- earnings_miss_recovery, scandal_recovery, etc. + + -- Statistics (aggregated from multiple panic events) + avg_drawdown_percent DECIMAL(8, 4), + avg_recovery_days INTEGER, + avg_recovery_percent DECIMAL(8, 4), + + median_drawdown_percent DECIMAL(8, 4), + median_recovery_days INTEGER, + median_recovery_percent DECIMAL(8, 4), + + -- Sample size + event_count INTEGER NOT NULL, + + -- Confidence + pattern_confidence DECIMAL(5, 4), + + -- Time range + first_event_date DATE, + last_event_date DATE, + + created_at TIMESTAMPTZ DEFAULT NOW(), + updated_at TIMESTAMPTZ DEFAULT NOW(), + + UNIQUE(stock_id, pattern_type) +); + +CREATE INDEX idx_patterns_stock ON historical_patterns(stock_id); +CREATE INDEX idx_patterns_type ON historical_patterns(pattern_type); + +-- ============================================================================= +-- BUY SIGNALS +-- ============================================================================= +CREATE TABLE IF NOT EXISTS buy_signals ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + stock_id UUID NOT NULL REFERENCES stocks(id) ON DELETE CASCADE, + panic_event_id UUID REFERENCES panic_events(id) ON DELETE SET NULL, + + -- Signal details + signal_time TIMESTAMPTZ NOT NULL DEFAULT NOW(), + signal_price DECIMAL(15, 4) NOT NULL, + + -- Confidence scoring + confidence_score DECIMAL(5, 4) NOT NULL, -- 0 to 1 + + -- Based on pattern + pattern_id UUID REFERENCES historical_patterns(id) ON DELETE SET NULL, + expected_recovery_percent DECIMAL(8, 4), + expected_recovery_days INTEGER, + + -- Current metrics + current_drawdown_percent DECIMAL(8, 4), + current_sentiment_score DECIMAL(5, 2), + + -- Signal status + status VARCHAR(20) DEFAULT 'active', -- active, triggered, expired, cancelled + triggered_at TIMESTAMPTZ, + + -- Outcome tracking + outcome_price DECIMAL(15, 4), + outcome_percent DECIMAL(8, 4), + outcome_days INTEGER, + + created_at TIMESTAMPTZ DEFAULT NOW(), + updated_at TIMESTAMPTZ DEFAULT NOW() +); + +CREATE INDEX idx_signals_stock ON buy_signals(stock_id); +CREATE INDEX idx_signals_time ON buy_signals(signal_time DESC); +CREATE INDEX idx_signals_confidence ON buy_signals(confidence_score DESC); +CREATE INDEX idx_signals_status ON buy_signals(status); + +-- ============================================================================= +-- WATCHLIST +-- ============================================================================= +CREATE TABLE IF NOT EXISTS watchlist ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + stock_id UUID NOT NULL REFERENCES stocks(id) ON DELETE CASCADE, + + -- Alert thresholds + panic_alert_threshold DECIMAL(5, 2) DEFAULT -50, + price_alert_low DECIMAL(15, 4), + price_alert_high DECIMAL(15, 4), + + -- Preferences + priority INTEGER DEFAULT 1, -- 1 = high, 2 = medium, 3 = low + notes TEXT, + + is_active BOOLEAN DEFAULT true, + created_at TIMESTAMPTZ DEFAULT NOW(), + updated_at TIMESTAMPTZ DEFAULT NOW(), + + UNIQUE(stock_id) +); + +CREATE INDEX idx_watchlist_priority ON watchlist(priority, is_active); + +-- ============================================================================= +-- ALERT HISTORY +-- ============================================================================= +CREATE TABLE IF NOT EXISTS alert_history ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + stock_id UUID NOT NULL REFERENCES stocks(id) ON DELETE CASCADE, + signal_id UUID REFERENCES buy_signals(id) ON DELETE SET NULL, + + alert_type VARCHAR(50) NOT NULL, -- panic_detected, buy_signal, price_target, etc. + alert_message TEXT NOT NULL, + + -- Delivery status + sent_telegram BOOLEAN DEFAULT false, + sent_discord BOOLEAN DEFAULT false, + sent_email BOOLEAN DEFAULT false, + + created_at TIMESTAMPTZ DEFAULT NOW() +); + +CREATE INDEX idx_alerts_time ON alert_history(created_at DESC); +CREATE INDEX idx_alerts_stock ON alert_history(stock_id); + +-- ============================================================================= +-- FUNCTIONS +-- ============================================================================= + +-- Function to update updated_at timestamp +CREATE OR REPLACE FUNCTION update_updated_at_column() +RETURNS TRIGGER AS $$ +BEGIN + NEW.updated_at = NOW(); + RETURN NEW; +END; +$$ language 'plpgsql'; + +-- Apply triggers +CREATE TRIGGER update_stocks_updated_at BEFORE UPDATE ON stocks FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); +CREATE TRIGGER update_panic_events_updated_at BEFORE UPDATE ON panic_events FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); +CREATE TRIGGER update_patterns_updated_at BEFORE UPDATE ON historical_patterns FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); +CREATE TRIGGER update_signals_updated_at BEFORE UPDATE ON buy_signals FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); +CREATE TRIGGER update_watchlist_updated_at BEFORE UPDATE ON watchlist FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); + +-- ============================================================================= +-- SEED DATA - Popular stocks to get started +-- ============================================================================= +INSERT INTO stocks (symbol, name, sector, industry, exchange) VALUES + ('AAPL', 'Apple Inc.', 'Technology', 'Consumer Electronics', 'NASDAQ'), + ('MSFT', 'Microsoft Corporation', 'Technology', 'Software', 'NASDAQ'), + ('GOOGL', 'Alphabet Inc.', 'Technology', 'Internet Services', 'NASDAQ'), + ('AMZN', 'Amazon.com Inc.', 'Consumer Discretionary', 'E-Commerce', 'NASDAQ'), + ('NVDA', 'NVIDIA Corporation', 'Technology', 'Semiconductors', 'NASDAQ'), + ('META', 'Meta Platforms Inc.', 'Technology', 'Social Media', 'NASDAQ'), + ('TSLA', 'Tesla Inc.', 'Consumer Discretionary', 'Electric Vehicles', 'NASDAQ'), + ('JPM', 'JPMorgan Chase & Co.', 'Financials', 'Banking', 'NYSE'), + ('V', 'Visa Inc.', 'Financials', 'Payments', 'NYSE'), + ('JNJ', 'Johnson & Johnson', 'Healthcare', 'Pharmaceuticals', 'NYSE'), + ('WMT', 'Walmart Inc.', 'Consumer Staples', 'Retail', 'NYSE'), + ('XOM', 'Exxon Mobil Corporation', 'Energy', 'Oil & Gas', 'NYSE'), + ('BA', 'Boeing Company', 'Industrials', 'Aerospace', 'NYSE'), + ('DIS', 'Walt Disney Company', 'Communication Services', 'Entertainment', 'NYSE'), + ('NFLX', 'Netflix Inc.', 'Communication Services', 'Streaming', 'NASDAQ'), + ('AMD', 'Advanced Micro Devices', 'Technology', 'Semiconductors', 'NASDAQ'), + ('INTC', 'Intel Corporation', 'Technology', 'Semiconductors', 'NASDAQ'), + ('CRM', 'Salesforce Inc.', 'Technology', 'Software', 'NYSE'), + ('ORCL', 'Oracle Corporation', 'Technology', 'Software', 'NYSE'), + ('PYPL', 'PayPal Holdings Inc.', 'Financials', 'Payments', 'NASDAQ'), + -- Defense & Weapons (no morals, remember?) + ('LMT', 'Lockheed Martin Corporation', 'Industrials', 'Defense', 'NYSE'), + ('RTX', 'RTX Corporation', 'Industrials', 'Defense', 'NYSE'), + ('NOC', 'Northrop Grumman Corporation', 'Industrials', 'Defense', 'NYSE'), + ('GD', 'General Dynamics Corporation', 'Industrials', 'Defense', 'NYSE'), + ('BA', 'Boeing Company', 'Industrials', 'Aerospace & Defense', 'NYSE'), + -- Mining + ('NEM', 'Newmont Corporation', 'Materials', 'Gold Mining', 'NYSE'), + ('FCX', 'Freeport-McMoRan Inc.', 'Materials', 'Copper Mining', 'NYSE'), + ('RIO', 'Rio Tinto Group', 'Materials', 'Diversified Mining', 'NYSE'), + ('BHP', 'BHP Group Limited', 'Materials', 'Diversified Mining', 'NYSE'), + -- Food & Agriculture + ('ADM', 'Archer-Daniels-Midland Company', 'Consumer Staples', 'Agriculture', 'NYSE'), + ('BG', 'Bunge Limited', 'Consumer Staples', 'Agriculture', 'NYSE'), + ('MDLZ', 'Mondelez International', 'Consumer Staples', 'Food', 'NASDAQ'), + ('KO', 'Coca-Cola Company', 'Consumer Staples', 'Beverages', 'NYSE'), + ('PEP', 'PepsiCo Inc.', 'Consumer Staples', 'Beverages', 'NASDAQ'), + -- Oil & Gas + ('CVX', 'Chevron Corporation', 'Energy', 'Oil & Gas', 'NYSE'), + ('COP', 'ConocoPhillips', 'Energy', 'Oil & Gas', 'NYSE'), + ('SLB', 'Schlumberger Limited', 'Energy', 'Oil Services', 'NYSE'), + -- Pharma & Biotech + ('PFE', 'Pfizer Inc.', 'Healthcare', 'Pharmaceuticals', 'NYSE'), + ('MRK', 'Merck & Co. Inc.', 'Healthcare', 'Pharmaceuticals', 'NYSE'), + ('ABBV', 'AbbVie Inc.', 'Healthcare', 'Pharmaceuticals', 'NYSE'), + ('BMY', 'Bristol-Myers Squibb', 'Healthcare', 'Pharmaceuticals', 'NYSE') +ON CONFLICT (symbol) DO NOTHING; + +COMMIT; diff --git a/frontend/Dockerfile b/frontend/Dockerfile new file mode 100644 index 0000000..a46d1f4 --- /dev/null +++ b/frontend/Dockerfile @@ -0,0 +1,34 @@ +# Build stage +FROM node:20-alpine AS builder + +WORKDIR /app + +# Copy package files +COPY package*.json ./ + +# Install dependencies +RUN npm ci + +# Copy source code +COPY . . + +# Build arguments +ARG VITE_API_URL=http://localhost:8000 +ENV VITE_API_URL=$VITE_API_URL + +# Build the app +RUN npm run build + +# Production stage +FROM nginx:alpine + +# Copy built assets +COPY --from=builder /app/dist /usr/share/nginx/html + +# Copy nginx config +COPY nginx.conf /etc/nginx/conf.d/default.conf + +# Expose port +EXPOSE 80 + +CMD ["nginx", "-g", "daemon off;"] diff --git a/frontend/index.html b/frontend/index.html new file mode 100644 index 0000000..f17d5f1 --- /dev/null +++ b/frontend/index.html @@ -0,0 +1,14 @@ + + + + + + + + MarketScanner | Buy the Fear + + +
+ + + diff --git a/frontend/nginx.conf b/frontend/nginx.conf new file mode 100644 index 0000000..8696975 --- /dev/null +++ b/frontend/nginx.conf @@ -0,0 +1,35 @@ +server { + listen 80; + server_name localhost; + root /usr/share/nginx/html; + index index.html; + + # Gzip compression + gzip on; + gzip_vary on; + gzip_min_length 1024; + gzip_types text/plain text/css application/json application/javascript text/xml application/xml; + + # Handle SPA routing + location / { + try_files $uri $uri/ /index.html; + } + + # Cache static assets + location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg|woff|woff2)$ { + expires 1y; + add_header Cache-Control "public, immutable"; + } + + # API proxy (optional, for same-origin requests) + location /api/ { + proxy_pass http://backend:8000/api/; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_cache_bypass $http_upgrade; + } +} diff --git a/frontend/package.json b/frontend/package.json new file mode 100644 index 0000000..89ff2e3 --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,43 @@ +{ + "name": "marketscanner-frontend", + "private": true, + "version": "0.1.0", + "type": "module", + "scripts": { + "dev": "vite", + "build": "tsc && vite build", + "lint": "eslint . --ext ts,tsx --report-unused-disable-directives --max-warnings 0", + "preview": "vite preview" + }, + "dependencies": { + "@headlessui/react": "^1.7.18", + "@heroicons/react": "^2.1.1", + "@tanstack/react-query": "^5.17.19", + "axios": "^1.6.7", + "clsx": "^2.1.0", + "date-fns": "^3.3.1", + "framer-motion": "^11.0.3", + "lightweight-charts": "^4.1.1", + "react": "^18.2.0", + "react-dom": "^18.2.0", + "react-hot-toast": "^2.4.1", + "react-router-dom": "^6.21.3", + "recharts": "^2.12.0", + "zustand": "^4.5.0" + }, + "devDependencies": { + "@types/react": "^18.2.48", + "@types/react-dom": "^18.2.18", + "@typescript-eslint/eslint-plugin": "^6.19.1", + "@typescript-eslint/parser": "^6.19.1", + "@vitejs/plugin-react": "^4.2.1", + "autoprefixer": "^10.4.17", + "eslint": "^8.56.0", + "eslint-plugin-react-hooks": "^4.6.0", + "eslint-plugin-react-refresh": "^0.4.5", + "postcss": "^8.4.33", + "tailwindcss": "^3.4.1", + "typescript": "^5.3.3", + "vite": "^5.0.12" + } +} diff --git a/frontend/postcss.config.js b/frontend/postcss.config.js new file mode 100644 index 0000000..2e7af2b --- /dev/null +++ b/frontend/postcss.config.js @@ -0,0 +1,6 @@ +export default { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +} diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx new file mode 100644 index 0000000..f05c3a3 --- /dev/null +++ b/frontend/src/App.tsx @@ -0,0 +1,25 @@ +import { Routes, Route } from 'react-router-dom' +import Layout from './components/Layout' +import Dashboard from './pages/Dashboard' +import Signals from './pages/Signals' +import Stocks from './pages/Stocks' +import News from './pages/News' +import Watchlist from './pages/Watchlist' +import Analytics from './pages/Analytics' + +function App() { + return ( + + + } /> + } /> + } /> + } /> + } /> + } /> + + + ) +} + +export default App diff --git a/frontend/src/components/Layout.tsx b/frontend/src/components/Layout.tsx new file mode 100644 index 0000000..1c532e5 --- /dev/null +++ b/frontend/src/components/Layout.tsx @@ -0,0 +1,172 @@ +import { ReactNode, useState } from 'react' +import { Link, useLocation } from 'react-router-dom' +import { motion } from 'framer-motion' +import { + HomeIcon, + BellAlertIcon, + ChartBarIcon, + NewspaperIcon, + StarIcon, + ChartPieIcon, + Bars3Icon, + XMarkIcon, +} from '@heroicons/react/24/outline' +import clsx from 'clsx' + +interface LayoutProps { + children: ReactNode +} + +const navigation = [ + { name: 'Dashboard', href: '/', icon: HomeIcon }, + { name: 'Buy Signals', href: '/signals', icon: BellAlertIcon }, + { name: 'Stocks', href: '/stocks', icon: ChartBarIcon }, + { name: 'News', href: '/news', icon: NewspaperIcon }, + { name: 'Watchlist', href: '/watchlist', icon: StarIcon }, + { name: 'Analytics', href: '/analytics', icon: ChartPieIcon }, +] + +export default function Layout({ children }: LayoutProps) { + const location = useLocation() + const [sidebarOpen, setSidebarOpen] = useState(false) + + return ( +
+ {/* Mobile sidebar backdrop */} + {sidebarOpen && ( +
setSidebarOpen(false)} + /> + )} + + {/* Sidebar */} + + + {/* Main content */} +
+ {/* Top bar */} +
+
+ + +
+ {/* Search */} +
+ +
+
+ +
+ {/* Market status */} +
+
+ Market Open +
+ + {/* Time */} +
+ {new Date().toLocaleTimeString()} +
+
+
+
+ + {/* Page content */} +
+ + {children} + +
+
+
+ ) +} diff --git a/frontend/src/index.css b/frontend/src/index.css new file mode 100644 index 0000000..bdeadfe --- /dev/null +++ b/frontend/src/index.css @@ -0,0 +1,88 @@ +@tailwind base; +@tailwind components; +@tailwind utilities; + +/* Custom scrollbar for dark mode */ +::-webkit-scrollbar { + width: 8px; + height: 8px; +} + +::-webkit-scrollbar-track { + background: #1a1a2e; +} + +::-webkit-scrollbar-thumb { + background: #4b5563; + border-radius: 4px; +} + +::-webkit-scrollbar-thumb:hover { + background: #6b7280; +} + +/* Custom styles */ +@layer components { + .card { + @apply bg-gray-900/50 backdrop-blur-sm border border-gray-800 rounded-xl; + } + + .card-hover { + @apply card transition-all duration-300 hover:border-gray-700 hover:shadow-lg hover:shadow-green-500/5; + } + + .btn-primary { + @apply px-4 py-2 bg-green-600 hover:bg-green-500 text-white font-medium rounded-lg transition-colors; + } + + .btn-secondary { + @apply px-4 py-2 bg-gray-700 hover:bg-gray-600 text-white font-medium rounded-lg transition-colors; + } + + .btn-danger { + @apply px-4 py-2 bg-red-600 hover:bg-red-500 text-white font-medium rounded-lg transition-colors; + } + + .input { + @apply w-full px-4 py-2 bg-gray-800 border border-gray-700 rounded-lg focus:outline-none focus:border-green-500 text-white placeholder-gray-500; + } + + .badge { + @apply px-2 py-0.5 text-xs font-medium rounded-full; + } + + .badge-success { + @apply badge bg-green-500/20 text-green-400 border border-green-500/30; + } + + .badge-danger { + @apply badge bg-red-500/20 text-red-400 border border-red-500/30; + } + + .badge-warning { + @apply badge bg-yellow-500/20 text-yellow-400 border border-yellow-500/30; + } + + .badge-info { + @apply badge bg-blue-500/20 text-blue-400 border border-blue-500/30; + } + + .gradient-text { + @apply bg-gradient-to-r from-green-400 via-emerald-400 to-teal-400 bg-clip-text text-transparent; + } +} + +/* Animations */ +@keyframes fadeIn { + from { opacity: 0; transform: translateY(10px); } + to { opacity: 1; transform: translateY(0); } +} + +.animate-fade-in { + animation: fadeIn 0.3s ease-out; +} + +/* Number styling */ +.font-mono { + font-variant-numeric: tabular-nums; +} diff --git a/frontend/src/main.tsx b/frontend/src/main.tsx new file mode 100644 index 0000000..0857165 --- /dev/null +++ b/frontend/src/main.tsx @@ -0,0 +1,33 @@ +import React from 'react' +import ReactDOM from 'react-dom/client' +import { QueryClient, QueryClientProvider } from '@tanstack/react-query' +import { BrowserRouter } from 'react-router-dom' +import { Toaster } from 'react-hot-toast' +import App from './App' +import './index.css' + +const queryClient = new QueryClient({ + defaultOptions: { + queries: { + staleTime: 30000, // 30 seconds + retry: 2, + }, + }, +}) + +ReactDOM.createRoot(document.getElementById('root')!).render( + + + + + + + + , +) diff --git a/frontend/src/pages/Analytics.tsx b/frontend/src/pages/Analytics.tsx new file mode 100644 index 0000000..206400a --- /dev/null +++ b/frontend/src/pages/Analytics.tsx @@ -0,0 +1,231 @@ +import { motion } from 'framer-motion' +import { + ChartPieIcon, + ArrowTrendingUpIcon, + ClockIcon, + CheckCircleIcon, +} from '@heroicons/react/24/outline' +import { + LineChart, + Line, + XAxis, + YAxis, + CartesianGrid, + Tooltip, + ResponsiveContainer, + AreaChart, + Area, +} from 'recharts' + +// Mock data +const performanceData = [ + { date: 'Jan', signals: 5, successful: 4, return: 12.5 }, + { date: 'Feb', signals: 8, successful: 6, return: 18.2 }, + { date: 'Mar', signals: 3, successful: 2, return: 8.5 }, + { date: 'Apr', signals: 6, successful: 5, return: 15.3 }, + { date: 'May', signals: 4, successful: 4, return: 22.1 }, + { date: 'Jun', signals: 7, successful: 5, return: 14.8 }, +] + +const sentimentTrend = [ + { date: 'Mon', sentiment: -15 }, + { date: 'Tue', sentiment: -25 }, + { date: 'Wed', sentiment: -45 }, + { date: 'Thu', sentiment: -38 }, + { date: 'Fri', sentiment: -52 }, + { date: 'Sat', sentiment: -48 }, + { date: 'Sun', sentiment: -35 }, +] + +const topPatterns = [ + { type: 'Earnings Miss Recovery', avgRecovery: 28.5, avgDays: 45, successRate: 82 }, + { type: 'Scandal/PR Crisis', avgRecovery: 35.2, avgDays: 60, successRate: 75 }, + { type: 'Product Recall', avgRecovery: 22.8, avgDays: 35, successRate: 78 }, + { type: 'Sector Rotation', avgRecovery: 18.5, avgDays: 25, successRate: 85 }, + { type: 'Market Correction', avgRecovery: 42.3, avgDays: 90, successRate: 72 }, +] + +const stats = [ + { label: 'Total Signals', value: '33', icon: ChartPieIcon }, + { label: 'Success Rate', value: '79%', icon: CheckCircleIcon }, + { label: 'Avg Return', value: '+15.2%', icon: ArrowTrendingUpIcon }, + { label: 'Avg Hold Time', value: '42 days', icon: ClockIcon }, +] + +export default function Analytics() { + return ( +
+
+

Analytics

+

Performance metrics and pattern analysis

+
+ + {/* Stats Grid */} +
+ {stats.map((stat, index) => ( + +
+
+

{stat.label}

+

{stat.value}

+
+ +
+
+ ))} +
+ +
+ {/* Signal Performance Chart */} + +

Signal Performance

+ + + + + + + + + + + + + + + +
+ + {/* Sentiment Trend */} + +

Market Sentiment (7 Days)

+ + + + + + + + {/* Zero line */} + 0} + stroke="#6b7280" + strokeDasharray="5 5" + dot={false} + /> + + +
+
+ + {/* Top Patterns */} + +

Top Historical Patterns

+
+ + + + + + + + + + + {topPatterns.map((pattern, index) => ( + + + + + + + ))} + +
Pattern TypeAvg RecoveryAvg DaysSuccess Rate
{pattern.type} + +{pattern.avgRecovery}% + + {pattern.avgDays} + +
+
+
+
+ {pattern.successRate}% +
+
+
+
+ + {/* Insight */} + +

πŸ’‘ Key Insight

+

+ Based on historical data, Earnings Miss Recovery patterns have the highest + success rate (82%), while Market Correction events offer the highest + average return (+42.3%) but require longer hold times. Consider your risk tolerance + and time horizon when acting on signals. +

+
+
+ ) +} diff --git a/frontend/src/pages/Dashboard.tsx b/frontend/src/pages/Dashboard.tsx new file mode 100644 index 0000000..8afbd82 --- /dev/null +++ b/frontend/src/pages/Dashboard.tsx @@ -0,0 +1,242 @@ +import { motion } from 'framer-motion' +import { + ArrowTrendingUpIcon, + ArrowTrendingDownIcon, + BellAlertIcon, + NewspaperIcon, + ChartBarIcon, + ExclamationTriangleIcon, +} from '@heroicons/react/24/outline' + +// Mock data - will be replaced with API calls +const stats = [ + { name: 'Active Signals', value: '3', change: '+2', trend: 'up', icon: BellAlertIcon }, + { name: 'Stocks Tracked', value: '47', change: '+5', trend: 'up', icon: ChartBarIcon }, + { name: 'News Today', value: '156', change: '+23', trend: 'up', icon: NewspaperIcon }, + { name: 'Panic Events', value: '2', change: '+1', trend: 'down', icon: ExclamationTriangleIcon }, +] + +const topSignals = [ + { symbol: 'NVDA', confidence: 87, price: 485.23, drawdown: -18.5, expectedRecovery: 35 }, + { symbol: 'META', confidence: 76, price: 345.67, drawdown: -12.3, expectedRecovery: 25 }, + { symbol: 'TSLA', confidence: 71, price: 178.90, drawdown: -25.8, expectedRecovery: 45 }, +] + +const recentNews = [ + { title: 'NVIDIA faces supply chain concerns amid AI boom', sentiment: -45, time: '2h ago' }, + { title: 'Meta announces layoffs in Reality Labs division', sentiment: -62, time: '4h ago' }, + { title: 'Fed signals potential rate cuts in 2024', sentiment: 35, time: '5h ago' }, + { title: 'Tesla recalls 2M vehicles over autopilot issues', sentiment: -78, time: '6h ago' }, +] + +const sectorHeatmap = [ + { sector: 'Technology', sentiment: -25, change: -5.2 }, + { sector: 'Healthcare', sentiment: 15, change: 2.1 }, + { sector: 'Energy', sentiment: -45, change: -8.3 }, + { sector: 'Financials', sentiment: 5, change: 0.8 }, + { sector: 'Consumer', sentiment: -35, change: -4.5 }, + { sector: 'Defense', sentiment: 55, change: 12.3 }, +] + +export default function Dashboard() { + return ( +
+ {/* Header */} +
+

Dashboard

+

Fear-to-Fortune Trading Intelligence

+
+ + {/* Stats Grid */} +
+ {stats.map((stat, index) => ( + +
+
+

{stat.name}

+

{stat.value}

+
+
+ +
+
+
+ {stat.trend === 'up' ? ( + + ) : ( + + )} + + {stat.change} + + from yesterday +
+
+ ))} +
+ +
+ {/* Top Buy Signals */} + +
+

+ + Top Buy Signals +

+ + View all β†’ + +
+ +
+ {topSignals.map((signal, index) => ( + +
+
+ {signal.symbol.slice(0, 2)} +
+
+

${signal.symbol}

+

${signal.price.toFixed(2)}

+
+
+ +
+
+

Drawdown

+

{signal.drawdown}%

+
+
+

Expected

+

+{signal.expectedRecovery}%

+
+
+

Confidence

+
+
+
+
+ {signal.confidence}% +
+
+
+ + ))} +
+
+ + {/* Sector Heatmap */} + +

Sector Sentiment

+
+ {sectorHeatmap.map((sector) => ( +
0 + ? `rgba(34, 197, 94, ${Math.abs(sector.sentiment) / 200})` + : `rgba(239, 68, 68, ${Math.abs(sector.sentiment) / 200})`, + }} + > + {sector.sector} +
+ 0 ? 'text-green-400' : 'text-red-400'}> + {sector.change > 0 ? '+' : ''}{sector.change}% + + + ({sector.sentiment}) + +
+
+ ))} +
+
+
+ + {/* Recent Panic News */} + +
+

+ + Recent Panic News +

+ + View all β†’ + +
+ +
+ {recentNews.map((news, index) => ( + +
+

{news.title}

+ 30 + ? 'badge-success' + : 'badge-warning' + }`} + > + {news.sentiment} + +
+

{news.time}

+
+ ))} +
+
+ + {/* Quote */} + +
+ "Buy when there's blood in the streets, even if the blood is your own." +
+ β€” Baron Rothschild +
+
+ ) +} diff --git a/frontend/src/pages/News.tsx b/frontend/src/pages/News.tsx new file mode 100644 index 0000000..f13f594 --- /dev/null +++ b/frontend/src/pages/News.tsx @@ -0,0 +1,184 @@ +import { useState } from 'react' +import { motion } from 'framer-motion' +import { NewspaperIcon, FunnelIcon } from '@heroicons/react/24/outline' + +// Mock data +const news = [ + { + id: '1', + title: 'NVIDIA faces supply chain concerns as AI demand continues to surge', + source: 'Reuters', + sentiment: -45, + stocks: ['NVDA', 'AMD'], + time: '2 hours ago', + summary: 'NVIDIA is struggling to meet AI chip demand as supply chain issues persist...', + }, + { + id: '2', + title: 'Tesla recalls 2 million vehicles over autopilot safety issues', + source: 'Bloomberg', + sentiment: -78, + stocks: ['TSLA'], + time: '4 hours ago', + summary: 'The recall affects nearly all Tesla vehicles sold in the US...', + }, + { + id: '3', + title: 'Meta announces 10,000 layoffs in Reality Labs division', + source: 'CNBC', + sentiment: -62, + stocks: ['META'], + time: '5 hours ago', + summary: 'Meta is cutting costs as metaverse investments continue to drain resources...', + }, + { + id: '4', + title: 'Federal Reserve signals potential rate cuts in 2024', + source: 'Wall Street Journal', + sentiment: 45, + stocks: ['SPY', 'QQQ'], + time: '6 hours ago', + summary: 'Fed officials indicate inflation has cooled enough to consider easing...', + }, + { + id: '5', + title: 'Lockheed Martin secures $20B defense contract', + source: 'Defense News', + sentiment: 72, + stocks: ['LMT', 'RTX'], + time: '8 hours ago', + summary: 'The Pentagon awards major contract for next-generation fighter jets...', + }, + { + id: '6', + title: 'Apple iPhone sales decline in China amid competition', + source: 'Financial Times', + sentiment: -35, + stocks: ['AAPL'], + time: '10 hours ago', + summary: 'Huawei and other local brands continue to gain market share...', + }, +] + +const sentimentFilters = ['All', 'Panic', 'Negative', 'Neutral', 'Positive'] + +export default function News() { + const [selectedFilter, setSelectedFilter] = useState('All') + + const filteredNews = news.filter(item => { + if (selectedFilter === 'All') return true + if (selectedFilter === 'Panic') return item.sentiment <= -50 + if (selectedFilter === 'Negative') return item.sentiment < -20 && item.sentiment > -50 + if (selectedFilter === 'Neutral') return item.sentiment >= -20 && item.sentiment <= 20 + if (selectedFilter === 'Positive') return item.sentiment > 20 + return true + }) + + const getSentimentColor = (sentiment: number) => { + if (sentiment <= -50) return 'text-red-500' + if (sentiment < -20) return 'text-orange-400' + if (sentiment <= 20) return 'text-gray-400' + return 'text-green-400' + } + + const getSentimentBadge = (sentiment: number) => { + if (sentiment <= -50) return 'badge-danger' + if (sentiment < -20) return 'badge-warning' + if (sentiment <= 20) return 'badge-info' + return 'badge-success' + } + + return ( +
+
+
+

News Feed

+

Real-time financial news with sentiment analysis

+
+
+ + {/* Filters */} +
+ +
+ {sentimentFilters.map((filter) => ( + + ))} +
+
+ + {/* News Cards */} +
+ {filteredNews.map((item, index) => ( + +
+
+
+ {item.source} + β€’ + {item.time} + + {item.sentiment <= -50 ? 'πŸ”΄ Panic' : item.sentiment > 20 ? '🟒 Positive' : 'Neutral'} + +
+ +

+ {item.title} +

+ +

{item.summary}

+ +
+ {item.stocks.map((stock) => ( + + ${stock} + + ))} +
+
+ +
+
+ {item.sentiment} +
+
Sentiment
+
+
+
+ ))} +
+ + {filteredNews.length === 0 && ( +
+ +

No News Found

+

+ No news articles match your current filters. +

+
+ )} +
+ ) +} diff --git a/frontend/src/pages/Signals.tsx b/frontend/src/pages/Signals.tsx new file mode 100644 index 0000000..ef73e86 --- /dev/null +++ b/frontend/src/pages/Signals.tsx @@ -0,0 +1,185 @@ +import { motion } from 'framer-motion' +import { BellAlertIcon, CheckIcon, XMarkIcon } from '@heroicons/react/24/outline' + +// Mock data +const signals = [ + { + id: '1', + symbol: 'NVDA', + name: 'NVIDIA Corporation', + confidence: 87, + price: 485.23, + drawdown: -18.5, + expectedRecovery: 35, + expectedDays: 45, + sentiment: -65, + status: 'active', + createdAt: '2024-01-15T10:30:00Z', + reason: 'Panic selling due to supply chain concerns. Historical pattern shows 85% recovery rate within 60 days.', + }, + { + id: '2', + symbol: 'META', + name: 'Meta Platforms Inc.', + confidence: 76, + price: 345.67, + drawdown: -12.3, + expectedRecovery: 25, + expectedDays: 30, + sentiment: -48, + status: 'active', + createdAt: '2024-01-15T09:15:00Z', + reason: 'Reality Labs layoff announcement. Similar events historically recovered within 45 days.', + }, + { + id: '3', + symbol: 'TSLA', + name: 'Tesla Inc.', + confidence: 71, + price: 178.90, + drawdown: -25.8, + expectedRecovery: 45, + expectedDays: 60, + sentiment: -72, + status: 'active', + createdAt: '2024-01-15T08:00:00Z', + reason: 'Autopilot recall news. Tesla has recovered from similar negative news 78% of the time.', + }, +] + +export default function Signals() { + return ( +
+
+
+

Buy Signals

+

Opportunities identified by panic pattern matching

+
+
+ + +
+
+ + {/* Signal Cards */} +
+ {signals.map((signal, index) => ( + +
+ {/* Left: Stock Info */} +
+
+ +
+
+
+

${signal.symbol}

+ Active +
+

{signal.name}

+

+ Signal generated {new Date(signal.createdAt).toLocaleString()} +

+
+
+ + {/* Middle: Stats */} +
+
+

Price

+

${signal.price.toFixed(2)}

+
+
+

Drawdown

+

{signal.drawdown}%

+
+
+

Expected

+

+{signal.expectedRecovery}%

+
+
+

Timeframe

+

{signal.expectedDays}d

+
+
+ + {/* Right: Confidence & Actions */} +
+
+
+ + + + + + + + + + +
+ {signal.confidence}% +
+
+

Confidence

+
+ +
+ + +
+
+
+ + {/* Reason */} +
+

+ Analysis: + {signal.reason} +

+
+
+ ))} +
+ + {/* Empty State */} + {signals.length === 0 && ( +
+ +

No Active Signals

+

+ When panic creates opportunities, they'll appear here. +

+
+ )} +
+ ) +} diff --git a/frontend/src/pages/Stocks.tsx b/frontend/src/pages/Stocks.tsx new file mode 100644 index 0000000..45c52b9 --- /dev/null +++ b/frontend/src/pages/Stocks.tsx @@ -0,0 +1,136 @@ +import { useState } from 'react' +import { motion } from 'framer-motion' +import { MagnifyingGlassIcon, PlusIcon } from '@heroicons/react/24/outline' + +// Mock data +const stocks = [ + { symbol: 'AAPL', name: 'Apple Inc.', sector: 'Technology', price: 185.23, change: 1.25, sentiment: 15 }, + { symbol: 'MSFT', name: 'Microsoft Corporation', sector: 'Technology', price: 378.45, change: -0.85, sentiment: 8 }, + { symbol: 'NVDA', name: 'NVIDIA Corporation', sector: 'Technology', price: 485.23, change: -3.45, sentiment: -45 }, + { symbol: 'GOOGL', name: 'Alphabet Inc.', sector: 'Technology', price: 142.67, change: 0.55, sentiment: 5 }, + { symbol: 'META', name: 'Meta Platforms Inc.', sector: 'Technology', price: 345.67, change: -2.15, sentiment: -35 }, + { symbol: 'TSLA', name: 'Tesla Inc.', sector: 'Consumer Discretionary', price: 178.90, change: -5.25, sentiment: -65 }, + { symbol: 'LMT', name: 'Lockheed Martin', sector: 'Defense', price: 458.32, change: 4.55, sentiment: 55 }, + { symbol: 'RTX', name: 'RTX Corporation', sector: 'Defense', price: 92.45, change: 2.15, sentiment: 42 }, + { symbol: 'XOM', name: 'Exxon Mobil', sector: 'Energy', price: 102.34, change: -1.85, sentiment: -25 }, + { symbol: 'JPM', name: 'JPMorgan Chase', sector: 'Financials', price: 172.56, change: 0.95, sentiment: 12 }, +] + +const sectors = ['All', 'Technology', 'Defense', 'Energy', 'Financials', 'Healthcare', 'Consumer'] + +export default function Stocks() { + const [search, setSearch] = useState('') + const [selectedSector, setSelectedSector] = useState('All') + + const filteredStocks = stocks.filter(stock => { + const matchesSearch = stock.symbol.toLowerCase().includes(search.toLowerCase()) || + stock.name.toLowerCase().includes(search.toLowerCase()) + const matchesSector = selectedSector === 'All' || stock.sector === selectedSector + return matchesSearch && matchesSector + }) + + return ( +
+
+
+

Stocks

+

Monitor and track stocks across all sectors

+
+ +
+ + {/* Filters */} +
+
+ + setSearch(e.target.value)} + className="input pl-10" + /> +
+
+ {sectors.map((sector) => ( + + ))} +
+
+ + {/* Stocks Table */} +
+
+ + + + + + + + + + + + + + {filteredStocks.map((stock, index) => ( + + + + + + + + + + ))} + +
SymbolNameSectorPriceChangeSentimentActions
+ ${stock.symbol} + {stock.name} + {stock.sector} + ${stock.price.toFixed(2)}= 0 ? 'text-green-400' : 'text-red-400'}`}> + {stock.change >= 0 ? '+' : ''}{stock.change.toFixed(2)}% + +
+
+
= 0 ? 'bg-green-500' : 'bg-red-500'}`} + style={{ + width: `${Math.abs(stock.sentiment)}%`, + marginLeft: stock.sentiment >= 0 ? '50%' : `${50 - Math.abs(stock.sentiment)}%`, + }} + /> +
+ = 0 ? 'text-green-400' : 'text-red-400'}`}> + {stock.sentiment} + +
+
+ +
+
+
+
+ ) +} diff --git a/frontend/src/pages/Watchlist.tsx b/frontend/src/pages/Watchlist.tsx new file mode 100644 index 0000000..37ed42b --- /dev/null +++ b/frontend/src/pages/Watchlist.tsx @@ -0,0 +1,193 @@ +import { motion } from 'framer-motion' +import { StarIcon, TrashIcon, BellIcon } from '@heroicons/react/24/outline' +import { StarIcon as StarIconSolid } from '@heroicons/react/24/solid' + +// Mock data +const watchlist = [ + { + id: '1', + symbol: 'NVDA', + name: 'NVIDIA Corporation', + price: 485.23, + change: -3.45, + sentiment: -45, + alertThreshold: -50, + priority: 1, + notes: 'Watching for panic below $450', + }, + { + id: '2', + symbol: 'TSLA', + name: 'Tesla Inc.', + price: 178.90, + change: -5.25, + sentiment: -65, + alertThreshold: -60, + priority: 1, + notes: 'Autopilot recall - monitoring sentiment', + }, + { + id: '3', + symbol: 'META', + name: 'Meta Platforms Inc.', + price: 345.67, + change: -2.15, + sentiment: -35, + alertThreshold: -50, + priority: 2, + notes: 'Reality Labs concerns', + }, + { + id: '4', + symbol: 'BA', + name: 'Boeing Company', + price: 215.34, + change: -1.85, + sentiment: -28, + alertThreshold: -40, + priority: 2, + notes: 'Safety issues being monitored', + }, + { + id: '5', + symbol: 'LMT', + name: 'Lockheed Martin', + price: 458.32, + change: 4.55, + sentiment: 55, + alertThreshold: -30, + priority: 3, + notes: 'Defense sector strength', + }, +] + +export default function Watchlist() { + return ( +
+
+
+

Watchlist

+

Stocks you're monitoring for opportunities

+
+ +
+ + {/* Priority Legend */} +
+ Priority: + + + High + + + + Medium + + + + Low + +
+ + {/* Watchlist Cards */} +
+ {watchlist.map((item, index) => ( + +
+
+
+
+
+

${item.symbol}

+ +
+

{item.name}

+
+
+ +
+ + +
+
+ +
+
+

Price

+

${item.price.toFixed(2)}

+
+
+

Change

+

= 0 ? 'text-green-400' : 'text-red-400'}`}> + {item.change >= 0 ? '+' : ''}{item.change.toFixed(2)}% +

+
+
+

Sentiment

+

= 0 ? 'text-green-400' : 'text-red-400'}`}> + {item.sentiment} +

+
+
+ + {/* Alert Threshold */} +
+
+ Panic Alert Threshold + {item.alertThreshold} +
+
+
+
+ {item.sentiment <= item.alertThreshold && ( +

⚠️ Below threshold - Watch for opportunity!

+ )} +
+ + {/* Notes */} + {item.notes && ( +
+

+ Notes: + {item.notes} +

+
+ )} + + ))} +
+ + {watchlist.length === 0 && ( +
+ +

Your Watchlist is Empty

+

+ Add stocks you want to monitor for panic-buying opportunities. +

+ +
+ )} +
+ ) +} diff --git a/frontend/src/services/api.ts b/frontend/src/services/api.ts new file mode 100644 index 0000000..7da354d --- /dev/null +++ b/frontend/src/services/api.ts @@ -0,0 +1,79 @@ +import axios from 'axios' + +const API_URL = import.meta.env.VITE_API_URL || 'http://localhost:8000' + +export const api = axios.create({ + baseURL: `${API_URL}/api/v1`, + headers: { + 'Content-Type': 'application/json', + }, +}) + +// Stocks +export const stocksApi = { + list: (params?: { sector?: string; search?: string; skip?: number; limit?: number }) => + api.get('/stocks/', { params }), + get: (symbol: string) => api.get(`/stocks/${symbol}`), + create: (data: { symbol: string; name: string; sector?: string; industry?: string }) => + api.post('/stocks/', data), + delete: (symbol: string) => api.delete(`/stocks/${symbol}`), + getSectors: () => api.get('/stocks/sectors'), + getIndustries: (sector?: string) => api.get('/stocks/industries', { params: { sector } }), +} + +// News +export const newsApi = { + list: (params?: { source?: string; sentiment?: string; hours?: number; skip?: number; limit?: number }) => + api.get('/news/', { params }), + getForStock: (symbol: string, params?: { hours?: number }) => + api.get(`/news/stock/${symbol}`, { params }), + getPanicNews: (params?: { threshold?: number; hours?: number; limit?: number }) => + api.get('/news/panic', { params }), + get: (id: string) => api.get(`/news/${id}`), +} + +// Signals +export const signalsApi = { + list: (params?: { status?: string; min_confidence?: number; skip?: number; limit?: number }) => + api.get('/signals/', { params }), + getTop: (limit?: number) => api.get('/signals/top', { params: { limit } }), + get: (id: string) => api.get(`/signals/${id}`), + trigger: (id: string) => api.post(`/signals/${id}/trigger`), + dismiss: (id: string) => api.post(`/signals/${id}/dismiss`), +} + +// Watchlist +export const watchlistApi = { + list: (priority?: number) => api.get('/watchlist/', { params: { priority } }), + add: (data: { + symbol: string + panic_alert_threshold?: number + price_alert_low?: number + price_alert_high?: number + priority?: number + notes?: string + }) => api.post('/watchlist/', data), + update: (id: string, data: Partial<{ + panic_alert_threshold: number + price_alert_low: number + price_alert_high: number + priority: number + notes: string + is_active: boolean + }>) => api.put(`/watchlist/${id}`, data), + remove: (id: string) => api.delete(`/watchlist/${id}`), + removeBySymbol: (symbol: string) => api.delete(`/watchlist/symbol/${symbol}`), +} + +// Analytics +export const analyticsApi = { + getDashboard: () => api.get('/analytics/dashboard'), + getSentimentTrend: (days?: number) => api.get('/analytics/sentiment/trend', { params: { days } }), + getSectorPanic: () => api.get('/analytics/sector/panic'), + getTopPatterns: (limit?: number) => api.get('/analytics/patterns/top', { params: { limit } }), + getRecentPanicEvents: (days?: number, limit?: number) => + api.get('/analytics/panic-events/recent', { params: { days, limit } }), + getPerformance: (days?: number) => api.get('/analytics/performance', { params: { days } }), +} + +export default api diff --git a/frontend/tailwind.config.js b/frontend/tailwind.config.js new file mode 100644 index 0000000..5479cb5 --- /dev/null +++ b/frontend/tailwind.config.js @@ -0,0 +1,54 @@ +/** @type {import('tailwindcss').Config} */ +export default { + content: [ + "./index.html", + "./src/**/*.{js,ts,jsx,tsx}", + ], + darkMode: 'class', + theme: { + extend: { + colors: { + // Custom trading colors + panic: { + 50: '#fef2f2', + 100: '#fee2e2', + 200: '#fecaca', + 300: '#fca5a5', + 400: '#f87171', + 500: '#ef4444', + 600: '#dc2626', + 700: '#b91c1c', + 800: '#991b1b', + 900: '#7f1d1d', + }, + greed: { + 50: '#f0fdf4', + 100: '#dcfce7', + 200: '#bbf7d0', + 300: '#86efac', + 400: '#4ade80', + 500: '#22c55e', + 600: '#16a34a', + 700: '#15803d', + 800: '#166534', + 900: '#14532d', + }, + neutral: { + 850: '#1a1a2e', + 950: '#0f0f1a', + }, + }, + animation: { + 'pulse-slow': 'pulse 3s cubic-bezier(0.4, 0, 0.6, 1) infinite', + 'glow': 'glow 2s ease-in-out infinite alternate', + }, + keyframes: { + glow: { + '0%': { boxShadow: '0 0 5px rgb(34 197 94 / 0.5), 0 0 10px rgb(34 197 94 / 0.3)' }, + '100%': { boxShadow: '0 0 10px rgb(34 197 94 / 0.8), 0 0 20px rgb(34 197 94 / 0.5)' }, + }, + }, + }, + }, + plugins: [], +} diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json new file mode 100644 index 0000000..2a27e96 --- /dev/null +++ b/frontend/tsconfig.json @@ -0,0 +1,24 @@ +{ + "compilerOptions": { + "target": "ES2020", + "useDefineForClassFields": true, + "lib": ["ES2020", "DOM", "DOM.Iterable"], + "module": "ESNext", + "skipLibCheck": true, + "moduleResolution": "bundler", + "allowImportingTsExtensions": true, + "resolveJsonModule": true, + "isolatedModules": true, + "noEmit": true, + "jsx": "react-jsx", + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noFallthroughCasesInSwitch": true, + "paths": { + "@/*": ["./src/*"] + } + }, + "include": ["src"], + "references": [{ "path": "./tsconfig.node.json" }] +} diff --git a/frontend/tsconfig.node.json b/frontend/tsconfig.node.json new file mode 100644 index 0000000..42872c5 --- /dev/null +++ b/frontend/tsconfig.node.json @@ -0,0 +1,10 @@ +{ + "compilerOptions": { + "composite": true, + "skipLibCheck": true, + "module": "ESNext", + "moduleResolution": "bundler", + "allowSyntheticDefaultImports": true + }, + "include": ["vite.config.ts"] +} diff --git a/frontend/vite.config.ts b/frontend/vite.config.ts new file mode 100644 index 0000000..2e4477b --- /dev/null +++ b/frontend/vite.config.ts @@ -0,0 +1,16 @@ +import { defineConfig } from 'vite' +import react from '@vitejs/plugin-react' + +// https://vitejs.dev/config/ +export default defineConfig({ + plugins: [react()], + server: { + port: 5173, + proxy: { + '/api': { + target: 'http://localhost:8000', + changeOrigin: true, + }, + }, + }, +})