Initial project structure: MarketScanner - Fear-to-Fortune Trading Intelligence
Features: - FastAPI backend with stocks, news, signals, watchlist, analytics endpoints - React frontend with TailwindCSS dark mode trading dashboard - Celery workers for news fetching, sentiment analysis, pattern detection - TimescaleDB schema for time-series stock data - Docker Compose setup for all services - OpenAI integration for sentiment analysis
This commit is contained in:
6
backend/app/core/__init__.py
Normal file
6
backend/app/core/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
"""Core module exports."""
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.database import get_db, Base, AsyncSessionLocal
|
||||
|
||||
__all__ = ["settings", "get_db", "Base", "AsyncSessionLocal"]
|
||||
97
backend/app/core/config.py
Normal file
97
backend/app/core/config.py
Normal file
@@ -0,0 +1,97 @@
|
||||
"""
|
||||
Application Configuration
|
||||
"""
|
||||
|
||||
from typing import List
|
||||
from pydantic_settings import BaseSettings
|
||||
from functools import lru_cache
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
"""Application settings loaded from environment variables."""
|
||||
|
||||
# Application
|
||||
VERSION: str = "0.1.0"
|
||||
DEBUG: bool = False
|
||||
SECRET_KEY: str = "change-me-in-production"
|
||||
|
||||
# Server
|
||||
BACKEND_HOST: str = "0.0.0.0"
|
||||
BACKEND_PORT: int = 8000
|
||||
CORS_ORIGINS: List[str] = ["http://localhost:3000", "http://localhost:5173"]
|
||||
|
||||
# Database
|
||||
POSTGRES_HOST: str = "localhost"
|
||||
POSTGRES_PORT: int = 5432
|
||||
POSTGRES_DB: str = "marketscanner"
|
||||
POSTGRES_USER: str = "marketscanner"
|
||||
POSTGRES_PASSWORD: str = "changeme"
|
||||
|
||||
@property
|
||||
def DATABASE_URL(self) -> str:
|
||||
return f"postgresql+asyncpg://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@{self.POSTGRES_HOST}:{self.POSTGRES_PORT}/{self.POSTGRES_DB}"
|
||||
|
||||
@property
|
||||
def DATABASE_URL_SYNC(self) -> str:
|
||||
return f"postgresql://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@{self.POSTGRES_HOST}:{self.POSTGRES_PORT}/{self.POSTGRES_DB}"
|
||||
|
||||
# Redis
|
||||
REDIS_HOST: str = "localhost"
|
||||
REDIS_PORT: int = 6379
|
||||
REDIS_PASSWORD: str = ""
|
||||
|
||||
@property
|
||||
def REDIS_URL(self) -> str:
|
||||
if self.REDIS_PASSWORD:
|
||||
return f"redis://:{self.REDIS_PASSWORD}@{self.REDIS_HOST}:{self.REDIS_PORT}/0"
|
||||
return f"redis://{self.REDIS_HOST}:{self.REDIS_PORT}/0"
|
||||
|
||||
# RabbitMQ
|
||||
RABBITMQ_HOST: str = "localhost"
|
||||
RABBITMQ_PORT: int = 5672
|
||||
RABBITMQ_USER: str = "guest"
|
||||
RABBITMQ_PASSWORD: str = "guest"
|
||||
|
||||
@property
|
||||
def RABBITMQ_URL(self) -> str:
|
||||
return f"amqp://{self.RABBITMQ_USER}:{self.RABBITMQ_PASSWORD}@{self.RABBITMQ_HOST}:{self.RABBITMQ_PORT}//"
|
||||
|
||||
# API Keys - Stock Data
|
||||
ALPHA_VANTAGE_API_KEY: str = ""
|
||||
POLYGON_API_KEY: str = ""
|
||||
YAHOO_FINANCE_ENABLED: bool = True
|
||||
FINNHUB_API_KEY: str = ""
|
||||
|
||||
# API Keys - News
|
||||
NEWS_API_KEY: str = ""
|
||||
|
||||
# API Keys - AI
|
||||
OPENAI_API_KEY: str = ""
|
||||
OPENAI_MODEL: str = "gpt-4o-mini"
|
||||
USE_LOCAL_LLM: bool = False
|
||||
OLLAMA_HOST: str = "http://localhost:11434"
|
||||
OLLAMA_MODEL: str = "llama3.2"
|
||||
|
||||
# Scanning Settings
|
||||
NEWS_SCAN_INTERVAL: int = 300 # seconds
|
||||
STOCK_PRICE_INTERVAL: int = 60 # seconds
|
||||
MAX_TRACKED_STOCKS: int = 500
|
||||
PANIC_THRESHOLD: float = -50.0
|
||||
|
||||
# Alerts
|
||||
TELEGRAM_BOT_TOKEN: str = ""
|
||||
TELEGRAM_CHAT_ID: str = ""
|
||||
DISCORD_WEBHOOK_URL: str = ""
|
||||
|
||||
class Config:
|
||||
env_file = ".env"
|
||||
case_sensitive = True
|
||||
|
||||
|
||||
@lru_cache()
|
||||
def get_settings() -> Settings:
|
||||
"""Get cached settings instance."""
|
||||
return Settings()
|
||||
|
||||
|
||||
settings = get_settings()
|
||||
79
backend/app/core/database.py
Normal file
79
backend/app/core/database.py
Normal file
@@ -0,0 +1,79 @@
|
||||
"""
|
||||
Database Configuration and Session Management
|
||||
"""
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine, async_sessionmaker
|
||||
from sqlalchemy.orm import declarative_base
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
# Async engine for FastAPI
|
||||
async_engine = create_async_engine(
|
||||
settings.DATABASE_URL,
|
||||
echo=settings.DEBUG,
|
||||
pool_pre_ping=True,
|
||||
pool_size=10,
|
||||
max_overflow=20,
|
||||
)
|
||||
|
||||
# Async session factory
|
||||
AsyncSessionLocal = async_sessionmaker(
|
||||
async_engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
autocommit=False,
|
||||
autoflush=False,
|
||||
)
|
||||
|
||||
# Sync engine for Celery workers
|
||||
sync_engine = create_engine(
|
||||
settings.DATABASE_URL_SYNC,
|
||||
echo=settings.DEBUG,
|
||||
pool_pre_ping=True,
|
||||
pool_size=5,
|
||||
max_overflow=10,
|
||||
)
|
||||
|
||||
# Sync session factory
|
||||
SyncSessionLocal = sessionmaker(
|
||||
sync_engine,
|
||||
autocommit=False,
|
||||
autoflush=False,
|
||||
)
|
||||
|
||||
# Base class for models
|
||||
Base = declarative_base()
|
||||
|
||||
|
||||
async def init_db():
|
||||
"""Initialize database (create tables if needed)."""
|
||||
# Tables are created by init.sql, but we can add migrations here
|
||||
pass
|
||||
|
||||
|
||||
async def get_db() -> AsyncSession:
|
||||
"""Dependency for getting async database session."""
|
||||
async with AsyncSessionLocal() as session:
|
||||
try:
|
||||
yield session
|
||||
await session.commit()
|
||||
except Exception:
|
||||
await session.rollback()
|
||||
raise
|
||||
finally:
|
||||
await session.close()
|
||||
|
||||
|
||||
def get_sync_db():
|
||||
"""Get sync database session for Celery workers."""
|
||||
db = SyncSessionLocal()
|
||||
try:
|
||||
yield db
|
||||
db.commit()
|
||||
except Exception:
|
||||
db.rollback()
|
||||
raise
|
||||
finally:
|
||||
db.close()
|
||||
Reference in New Issue
Block a user