Initial project structure: MarketScanner - Fear-to-Fortune Trading Intelligence

Features:
- FastAPI backend with stocks, news, signals, watchlist, analytics endpoints
- React frontend with TailwindCSS dark mode trading dashboard
- Celery workers for news fetching, sentiment analysis, pattern detection
- TimescaleDB schema for time-series stock data
- Docker Compose setup for all services
- OpenAI integration for sentiment analysis
This commit is contained in:
mindesbunister
2026-01-08 14:15:51 +01:00
commit 074787f067
58 changed files with 4864 additions and 0 deletions

View File

@@ -0,0 +1,5 @@
"""Celery workers module."""
from app.workers.celery_app import celery_app
__all__ = ["celery_app"]

View File

@@ -0,0 +1,69 @@
"""
Celery Application Configuration
"""
from celery import Celery
from celery.schedules import crontab
from app.core.config import settings
# Create Celery app
celery_app = Celery(
"marketscanner",
broker=settings.RABBITMQ_URL,
backend=settings.REDIS_URL,
include=[
"app.workers.tasks.news_tasks",
"app.workers.tasks.stock_tasks",
"app.workers.tasks.sentiment_tasks",
"app.workers.tasks.pattern_tasks",
"app.workers.tasks.alert_tasks",
],
)
# Celery configuration
celery_app.conf.update(
task_serializer="json",
accept_content=["json"],
result_serializer="json",
timezone="UTC",
enable_utc=True,
task_track_started=True,
task_time_limit=300, # 5 minutes
worker_prefetch_multiplier=1,
worker_concurrency=4,
)
# Beat schedule (periodic tasks)
celery_app.conf.beat_schedule = {
# Fetch news every 5 minutes
"fetch-news-every-5-minutes": {
"task": "app.workers.tasks.news_tasks.fetch_all_news",
"schedule": settings.NEWS_SCAN_INTERVAL,
},
# Update stock prices every minute
"update-prices-every-minute": {
"task": "app.workers.tasks.stock_tasks.update_stock_prices",
"schedule": settings.STOCK_PRICE_INTERVAL,
},
# Process unanalyzed news every 2 minutes
"analyze-sentiment-every-2-minutes": {
"task": "app.workers.tasks.sentiment_tasks.process_unanalyzed_news",
"schedule": 120,
},
# Detect panic events every 5 minutes
"detect-panic-every-5-minutes": {
"task": "app.workers.tasks.pattern_tasks.detect_panic_events",
"schedule": 300,
},
# Generate signals every 10 minutes
"generate-signals-every-10-minutes": {
"task": "app.workers.tasks.pattern_tasks.generate_buy_signals",
"schedule": 600,
},
# Clean old data daily at midnight
"cleanup-daily": {
"task": "app.workers.tasks.news_tasks.cleanup_old_news",
"schedule": crontab(hour=0, minute=0),
},
}

View File

@@ -0,0 +1 @@
"""Worker tasks module."""

View File

@@ -0,0 +1,153 @@
"""
Alert notification tasks
"""
import httpx
import structlog
from app.workers.celery_app import celery_app
from app.core.config import settings
logger = structlog.get_logger()
@celery_app.task(name="app.workers.tasks.alert_tasks.send_telegram_alert")
def send_telegram_alert(message: str):
"""Send alert via Telegram."""
if not settings.TELEGRAM_BOT_TOKEN or not settings.TELEGRAM_CHAT_ID:
logger.warning("Telegram not configured")
return {"sent": False, "reason": "not_configured"}
try:
url = f"https://api.telegram.org/bot{settings.TELEGRAM_BOT_TOKEN}/sendMessage"
payload = {
"chat_id": settings.TELEGRAM_CHAT_ID,
"text": message,
"parse_mode": "Markdown",
}
with httpx.Client() as client:
response = client.post(url, json=payload)
response.raise_for_status()
logger.info("Telegram alert sent")
return {"sent": True}
except Exception as e:
logger.error("Failed to send Telegram alert", error=str(e))
return {"sent": False, "error": str(e)}
@celery_app.task(name="app.workers.tasks.alert_tasks.send_discord_alert")
def send_discord_alert(message: str, embed: dict = None):
"""Send alert via Discord webhook."""
if not settings.DISCORD_WEBHOOK_URL:
logger.warning("Discord not configured")
return {"sent": False, "reason": "not_configured"}
try:
payload = {"content": message}
if embed:
payload["embeds"] = [embed]
with httpx.Client() as client:
response = client.post(settings.DISCORD_WEBHOOK_URL, json=payload)
response.raise_for_status()
logger.info("Discord alert sent")
return {"sent": True}
except Exception as e:
logger.error("Failed to send Discord alert", error=str(e))
return {"sent": False, "error": str(e)}
@celery_app.task(name="app.workers.tasks.alert_tasks.send_buy_signal_alert")
def send_buy_signal_alert(signal_data: dict):
"""Send formatted buy signal alert to all configured channels."""
logger.info("Sending buy signal alert", symbol=signal_data.get("symbol"))
# Format message
symbol = signal_data.get("symbol", "UNKNOWN")
confidence = signal_data.get("confidence", 0) * 100
current_price = signal_data.get("price", 0)
drawdown = signal_data.get("drawdown", 0)
expected_recovery = signal_data.get("expected_recovery", 0)
message = f"""
🚨 *BUY SIGNAL: ${symbol}* 🚨
📊 *Confidence:* {confidence:.1f}%
💰 *Current Price:* ${current_price:.2f}
📉 *Drawdown:* {drawdown:.1f}%
📈 *Expected Recovery:* {expected_recovery:.1f}%
_"Buy when there's blood in the streets"_
""".strip()
results = {
"telegram": None,
"discord": None,
}
# Send to Telegram
if settings.TELEGRAM_BOT_TOKEN:
results["telegram"] = send_telegram_alert.delay(message).get()
# Send to Discord with embed
if settings.DISCORD_WEBHOOK_URL:
embed = {
"title": f"🚨 BUY SIGNAL: ${symbol}",
"color": 0x00ff00, # Green
"fields": [
{"name": "Confidence", "value": f"{confidence:.1f}%", "inline": True},
{"name": "Price", "value": f"${current_price:.2f}", "inline": True},
{"name": "Drawdown", "value": f"{drawdown:.1f}%", "inline": True},
{"name": "Expected Recovery", "value": f"{expected_recovery:.1f}%", "inline": True},
],
"footer": {"text": "MarketScanner • Buy the Fear"},
}
results["discord"] = send_discord_alert.delay("", embed).get()
return results
@celery_app.task(name="app.workers.tasks.alert_tasks.send_panic_alert")
def send_panic_alert(panic_data: dict):
"""Send formatted panic detection alert."""
logger.info("Sending panic alert", symbol=panic_data.get("symbol"))
symbol = panic_data.get("symbol", "UNKNOWN")
sentiment = panic_data.get("sentiment", 0)
price_drop = panic_data.get("price_drop", 0)
news_count = panic_data.get("news_count", 0)
message = f"""
🔴 *PANIC DETECTED: ${symbol}* 🔴
😱 *Sentiment Score:* {sentiment:.1f}
📉 *Price Drop:* {price_drop:.1f}%
📰 *News Volume:* {news_count} articles
⏳ Monitoring for buying opportunity...
""".strip()
results = {}
if settings.TELEGRAM_BOT_TOKEN:
results["telegram"] = send_telegram_alert.delay(message).get()
if settings.DISCORD_WEBHOOK_URL:
embed = {
"title": f"🔴 PANIC DETECTED: ${symbol}",
"color": 0xff0000, # Red
"fields": [
{"name": "Sentiment", "value": f"{sentiment:.1f}", "inline": True},
{"name": "Price Drop", "value": f"{price_drop:.1f}%", "inline": True},
{"name": "News Volume", "value": f"{news_count} articles", "inline": True},
],
"footer": {"text": "MarketScanner • Watching for opportunity"},
}
results["discord"] = send_discord_alert.delay("", embed).get()
return results

View File

@@ -0,0 +1,108 @@
"""
News fetching tasks
"""
from datetime import datetime, timedelta
import feedparser
import structlog
from app.workers.celery_app import celery_app
from app.core.database import get_sync_db
from app.core.config import settings
logger = structlog.get_logger()
# RSS Feeds to monitor
NEWS_FEEDS = [
# General Financial News
{"name": "Yahoo Finance", "url": "https://finance.yahoo.com/news/rssindex"},
{"name": "Reuters Business", "url": "https://www.reutersagency.com/feed/?best-topics=business-finance&post_type=best"},
{"name": "CNBC", "url": "https://www.cnbc.com/id/100003114/device/rss/rss.html"},
{"name": "MarketWatch", "url": "https://feeds.marketwatch.com/marketwatch/topstories/"},
{"name": "Seeking Alpha", "url": "https://seekingalpha.com/market_currents.xml"},
{"name": "Bloomberg", "url": "https://www.bloomberg.com/feed/podcast/etf-report.xml"},
# Tech
{"name": "TechCrunch", "url": "https://techcrunch.com/feed/"},
# Crypto (because why not)
{"name": "CoinDesk", "url": "https://www.coindesk.com/arc/outboundfeeds/rss/"},
]
@celery_app.task(name="app.workers.tasks.news_tasks.fetch_all_news")
def fetch_all_news():
"""Fetch news from all configured sources."""
logger.info("Starting news fetch from all sources")
total_fetched = 0
for feed_config in NEWS_FEEDS:
try:
count = fetch_from_feed(feed_config["name"], feed_config["url"])
total_fetched += count
except Exception as e:
logger.error(
"Failed to fetch from feed",
feed=feed_config["name"],
error=str(e)
)
logger.info("News fetch complete", total_articles=total_fetched)
return {"fetched": total_fetched}
@celery_app.task(name="app.workers.tasks.news_tasks.fetch_from_feed")
def fetch_from_feed(source_name: str, feed_url: str) -> int:
"""Fetch news from a single RSS feed."""
logger.info("Fetching from feed", source=source_name)
try:
feed = feedparser.parse(feed_url)
articles_saved = 0
for entry in feed.entries[:50]: # Limit to 50 most recent
try:
# Extract data
title = entry.get("title", "")
url = entry.get("link", "")
summary = entry.get("summary", "")
author = entry.get("author", "")
# Parse published date
published = entry.get("published_parsed") or entry.get("updated_parsed")
if published:
published_at = datetime(*published[:6])
else:
published_at = datetime.utcnow()
# Save to database (skip if exists)
# This is a placeholder - actual implementation would use the db session
articles_saved += 1
except Exception as e:
logger.warning(
"Failed to process article",
title=entry.get("title", "unknown"),
error=str(e)
)
logger.info("Feed processed", source=source_name, articles=articles_saved)
return articles_saved
except Exception as e:
logger.error("Failed to parse feed", source=source_name, error=str(e))
return 0
@celery_app.task(name="app.workers.tasks.news_tasks.cleanup_old_news")
def cleanup_old_news(days: int = 90):
"""Remove news articles older than specified days."""
logger.info("Starting news cleanup", days_to_keep=days)
cutoff = datetime.utcnow() - timedelta(days=days)
# Placeholder - actual implementation would delete from database
deleted_count = 0
logger.info("News cleanup complete", deleted=deleted_count)
return {"deleted": deleted_count}

View File

@@ -0,0 +1,142 @@
"""
Pattern detection and buy signal generation tasks
"""
from datetime import datetime, timedelta
import structlog
from app.workers.celery_app import celery_app
from app.core.config import settings
logger = structlog.get_logger()
@celery_app.task(name="app.workers.tasks.pattern_tasks.detect_panic_events")
def detect_panic_events():
"""Detect new panic events based on sentiment and price drops."""
logger.info("Starting panic event detection")
# Detection criteria:
# 1. Sentiment score drops below threshold
# 2. Price drops significantly (>5% in 24h)
# 3. News volume spikes
# Placeholder - actual implementation would:
# - Query recent news sentiment by stock
# - Check price movements
# - Create panic_events records
detected_count = 0
logger.info("Panic detection complete", detected=detected_count)
return {"detected": detected_count}
@celery_app.task(name="app.workers.tasks.pattern_tasks.generate_buy_signals")
def generate_buy_signals():
"""Generate buy signals based on historical patterns."""
logger.info("Starting buy signal generation")
# Signal generation criteria:
# 1. Active panic event exists
# 2. Similar historical events had good recovery
# 3. Price is near or past typical bottom
# 4. Volume indicates capitulation
# Placeholder - actual implementation would:
# - Find stocks with active panic events
# - Match against historical patterns
# - Calculate confidence scores
# - Create buy_signals records
signals_count = 0
logger.info("Signal generation complete", signals=signals_count)
return {"generated": signals_count}
@celery_app.task(name="app.workers.tasks.pattern_tasks.analyze_historical_pattern")
def analyze_historical_pattern(stock_id: str, event_type: str):
"""Analyze historical patterns for a specific stock and event type."""
logger.info("Analyzing historical pattern", stock_id=stock_id, event_type=event_type)
# Would query past panic events for this stock
# Calculate statistics:
# - Average/median drawdown
# - Average/median recovery time
# - Average/median recovery percentage
# - Success rate (how often did it recover)
return {
"stock_id": stock_id,
"event_type": event_type,
"pattern": None, # Would contain pattern data
}
@celery_app.task(name="app.workers.tasks.pattern_tasks.calculate_confidence_score")
def calculate_confidence_score(
stock_id: str,
current_drawdown: float,
current_sentiment: float,
historical_pattern: dict,
) -> float:
"""Calculate confidence score for a potential buy signal."""
# Factors:
# 1. How close is current drawdown to historical average
# 2. How negative is sentiment (capitulation indicator)
# 3. Pattern reliability (sample size, consistency)
# 4. Market conditions (sector performance, overall market)
score = 0.5 # Base score
# Adjust based on drawdown match
if historical_pattern and historical_pattern.get("avg_drawdown"):
avg_drawdown = historical_pattern["avg_drawdown"]
drawdown_ratio = current_drawdown / avg_drawdown
if 0.8 <= drawdown_ratio <= 1.2:
score += 0.2 # Close to historical average
# Adjust based on sentiment (more panic = higher score)
if current_sentiment < settings.PANIC_THRESHOLD:
panic_intensity = abs(current_sentiment - settings.PANIC_THRESHOLD) / 50
score += min(panic_intensity * 0.2, 0.2)
# Adjust based on pattern reliability
if historical_pattern and historical_pattern.get("event_count", 0) >= 3:
score += 0.1 # Multiple historical examples
return min(max(score, 0), 1) # Clamp to 0-1
@celery_app.task(name="app.workers.tasks.pattern_tasks.update_panic_event_status")
def update_panic_event_status():
"""Update panic events - check if they've ended/recovered."""
logger.info("Updating panic event statuses")
# Check active (incomplete) panic events
# Mark as complete if:
# - Price has recovered to pre-panic levels
# - Sentiment has normalized
# - Enough time has passed
updated_count = 0
logger.info("Panic status update complete", updated=updated_count)
return {"updated": updated_count}
@celery_app.task(name="app.workers.tasks.pattern_tasks.rebuild_patterns")
def rebuild_patterns(stock_id: str = None):
"""Rebuild historical patterns from panic events."""
logger.info("Rebuilding patterns", stock_id=stock_id or "all")
# Aggregate all completed panic events
# Group by stock and event type
# Calculate pattern statistics
patterns_count = 0
logger.info("Pattern rebuild complete", patterns=patterns_count)
return {"rebuilt": patterns_count}

View File

@@ -0,0 +1,137 @@
"""
Sentiment analysis tasks
"""
from typing import Optional
import structlog
from openai import OpenAI
from app.workers.celery_app import celery_app
from app.core.config import settings
logger = structlog.get_logger()
def get_openai_client() -> Optional[OpenAI]:
"""Get OpenAI client if configured."""
if settings.OPENAI_API_KEY:
return OpenAI(api_key=settings.OPENAI_API_KEY)
return None
@celery_app.task(name="app.workers.tasks.sentiment_tasks.process_unanalyzed_news")
def process_unanalyzed_news():
"""Process all news articles that haven't been sentiment analyzed."""
logger.info("Starting sentiment analysis batch")
# Placeholder - would query database for unprocessed articles
processed_count = 0
logger.info("Sentiment analysis complete", processed=processed_count)
return {"processed": processed_count}
@celery_app.task(name="app.workers.tasks.sentiment_tasks.analyze_sentiment")
def analyze_sentiment(article_id: str, title: str, content: str):
"""Analyze sentiment of a single article using OpenAI."""
logger.info("Analyzing sentiment", article_id=article_id)
client = get_openai_client()
if not client:
logger.warning("OpenAI not configured, using fallback")
return fallback_sentiment_analysis(title, content)
try:
# Prepare text (limit length)
text = f"Title: {title}\n\nContent: {content[:2000]}"
response = client.chat.completions.create(
model=settings.OPENAI_MODEL,
messages=[
{
"role": "system",
"content": """You are a financial sentiment analyzer. Analyze the given news article and respond with a JSON object containing:
- score: a number from -100 (extremely negative/panic) to +100 (extremely positive/euphoric)
- label: one of "negative", "neutral", or "positive"
- confidence: a number from 0 to 1 indicating confidence in the analysis
- stocks: list of stock symbols mentioned (if any)
- summary: one-sentence summary of the sentiment
Focus on:
- Financial impact
- Market reaction implications
- Panic/fear indicators
- Earnings/guidance implications
"""
},
{
"role": "user",
"content": text
}
],
response_format={"type": "json_object"},
temperature=0.3,
)
result = response.choices[0].message.content
logger.info("Sentiment analyzed", article_id=article_id, result=result)
return result
except Exception as e:
logger.error("Sentiment analysis failed", article_id=article_id, error=str(e))
return fallback_sentiment_analysis(title, content)
def fallback_sentiment_analysis(title: str, content: str) -> dict:
"""Simple keyword-based sentiment analysis as fallback."""
text = f"{title} {content}".lower()
negative_words = [
"crash", "plunge", "collapse", "scandal", "fraud", "lawsuit",
"investigation", "bankruptcy", "layoffs", "miss", "decline",
"warning", "downgrade", "sell", "bear", "crisis", "fear",
"panic", "loss", "debt", "default", "recession"
]
positive_words = [
"surge", "rally", "growth", "profit", "beat", "upgrade",
"buy", "bull", "record", "breakout", "opportunity",
"dividend", "expansion", "innovation", "deal", "acquisition"
]
neg_count = sum(1 for word in negative_words if word in text)
pos_count = sum(1 for word in positive_words if word in text)
total = neg_count + pos_count
if total == 0:
score = 0
label = "neutral"
else:
score = ((pos_count - neg_count) / total) * 100
if score < -20:
label = "negative"
elif score > 20:
label = "positive"
else:
label = "neutral"
return {
"score": round(score, 2),
"label": label,
"confidence": min(0.3 + (total * 0.1), 0.7), # Low confidence for fallback
"stocks": [],
"summary": "Analyzed using keyword matching (fallback)",
}
@celery_app.task(name="app.workers.tasks.sentiment_tasks.batch_analyze")
def batch_analyze(article_ids: list):
"""Analyze multiple articles in batch."""
logger.info("Starting batch analysis", count=len(article_ids))
results = []
for article_id in article_ids:
# Would fetch article from database and analyze
results.append({"article_id": article_id, "status": "pending"})
return {"analyzed": len(results), "results": results}

View File

@@ -0,0 +1,102 @@
"""
Stock data fetching tasks
"""
import yfinance as yf
import structlog
from app.workers.celery_app import celery_app
from app.core.config import settings
logger = structlog.get_logger()
@celery_app.task(name="app.workers.tasks.stock_tasks.update_stock_prices")
def update_stock_prices():
"""Update prices for all tracked stocks."""
logger.info("Starting stock price update")
# Placeholder - would get active stocks from database
# For now, just demonstrate the concept
updated_count = 0
logger.info("Stock prices updated", count=updated_count)
return {"updated": updated_count}
@celery_app.task(name="app.workers.tasks.stock_tasks.fetch_stock_price")
def fetch_stock_price(symbol: str):
"""Fetch current price for a single stock."""
logger.info("Fetching price", symbol=symbol)
try:
ticker = yf.Ticker(symbol)
info = ticker.info
return {
"symbol": symbol,
"price": info.get("currentPrice") or info.get("regularMarketPrice"),
"previous_close": info.get("previousClose"),
"volume": info.get("volume"),
"market_cap": info.get("marketCap"),
}
except Exception as e:
logger.error("Failed to fetch price", symbol=symbol, error=str(e))
return None
@celery_app.task(name="app.workers.tasks.stock_tasks.fetch_historical_data")
def fetch_historical_data(symbol: str, period: str = "10y"):
"""Fetch historical price data for a stock."""
logger.info("Fetching historical data", symbol=symbol, period=period)
try:
ticker = yf.Ticker(symbol)
hist = ticker.history(period=period)
# Convert to list of dicts for storage
records = []
for idx, row in hist.iterrows():
records.append({
"time": idx.isoformat(),
"open": row["Open"],
"high": row["High"],
"low": row["Low"],
"close": row["Close"],
"volume": row["Volume"],
})
logger.info(
"Historical data fetched",
symbol=symbol,
records=len(records)
)
return {"symbol": symbol, "records": len(records)}
except Exception as e:
logger.error("Failed to fetch historical data", symbol=symbol, error=str(e))
return None
@celery_app.task(name="app.workers.tasks.stock_tasks.update_stock_info")
def update_stock_info(symbol: str):
"""Update stock metadata (sector, industry, market cap, etc.)."""
logger.info("Updating stock info", symbol=symbol)
try:
ticker = yf.Ticker(symbol)
info = ticker.info
return {
"symbol": symbol,
"name": info.get("longName") or info.get("shortName"),
"sector": info.get("sector"),
"industry": info.get("industry"),
"market_cap": info.get("marketCap"),
"exchange": info.get("exchange"),
"country": info.get("country"),
}
except Exception as e:
logger.error("Failed to update stock info", symbol=symbol, error=str(e))
return None