Files
marketscanner/backend/app/api/endpoints/news.py
mindesbunister 074787f067 Initial project structure: MarketScanner - Fear-to-Fortune Trading Intelligence
Features:
- FastAPI backend with stocks, news, signals, watchlist, analytics endpoints
- React frontend with TailwindCSS dark mode trading dashboard
- Celery workers for news fetching, sentiment analysis, pattern detection
- TimescaleDB schema for time-series stock data
- Docker Compose setup for all services
- OpenAI integration for sentiment analysis
2026-01-08 14:15:51 +01:00

123 lines
3.9 KiB
Python

"""
News API Endpoints
"""
from typing import List, Optional
from datetime import datetime, timedelta
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Query
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, desc
from app.core.database import get_db
from app.models.news import NewsArticle
from app.schemas.news import NewsResponse, NewsWithSentiment
router = APIRouter()
@router.get("/", response_model=List[NewsResponse])
async def list_news(
db: AsyncSession = Depends(get_db),
source: Optional[str] = Query(None, description="Filter by source"),
sentiment: Optional[str] = Query(None, description="Filter by sentiment: positive, negative, neutral"),
hours: int = Query(24, ge=1, le=168, description="News from last N hours"),
skip: int = Query(0, ge=0),
limit: int = Query(50, ge=1, le=100),
):
"""List recent news articles."""
since = datetime.utcnow() - timedelta(hours=hours)
query = select(NewsArticle).where(NewsArticle.published_at >= since)
if source:
query = query.where(NewsArticle.source == source)
if sentiment:
query = query.where(NewsArticle.sentiment_label == sentiment)
query = query.order_by(desc(NewsArticle.published_at)).offset(skip).limit(limit)
result = await db.execute(query)
return result.scalars().all()
@router.get("/sources")
async def list_sources(db: AsyncSession = Depends(get_db)):
"""Get list of all news sources."""
query = select(NewsArticle.source).distinct()
result = await db.execute(query)
sources = [row[0] for row in result.fetchall() if row[0]]
return {"sources": sorted(sources)}
@router.get("/stock/{symbol}", response_model=List[NewsWithSentiment])
async def get_news_for_stock(
symbol: str,
db: AsyncSession = Depends(get_db),
hours: int = Query(72, ge=1, le=720, description="News from last N hours"),
skip: int = Query(0, ge=0),
limit: int = Query(50, ge=1, le=100),
):
"""Get news articles mentioning a specific stock."""
# This would use the news_stock_mentions join table
# For now, we search in title/content
since = datetime.utcnow() - timedelta(hours=hours)
search_term = f"%{symbol.upper()}%"
query = (
select(NewsArticle)
.where(NewsArticle.published_at >= since)
.where(
(NewsArticle.title.ilike(search_term)) |
(NewsArticle.content.ilike(search_term))
)
.order_by(desc(NewsArticle.published_at))
.offset(skip)
.limit(limit)
)
result = await db.execute(query)
return result.scalars().all()
@router.get("/panic")
async def get_panic_news(
db: AsyncSession = Depends(get_db),
threshold: float = Query(-50.0, ge=-100, le=0, description="Sentiment threshold"),
hours: int = Query(24, ge=1, le=168, description="News from last N hours"),
limit: int = Query(20, ge=1, le=50),
):
"""Get the most panic-inducing news articles."""
since = datetime.utcnow() - timedelta(hours=hours)
query = (
select(NewsArticle)
.where(NewsArticle.published_at >= since)
.where(NewsArticle.sentiment_score <= threshold)
.order_by(NewsArticle.sentiment_score.asc())
.limit(limit)
)
result = await db.execute(query)
articles = result.scalars().all()
return {
"threshold": threshold,
"hours": hours,
"count": len(articles),
"articles": articles,
}
@router.get("/{article_id}", response_model=NewsWithSentiment)
async def get_article(
article_id: UUID,
db: AsyncSession = Depends(get_db),
):
"""Get a specific news article with full details."""
query = select(NewsArticle).where(NewsArticle.id == article_id)
result = await db.execute(query)
article = result.scalar_one_or_none()
if not article:
raise HTTPException(status_code=404, detail="Article not found")
return article