feat: V9 advanced parameter sweep with MA gap filter (810K configs)

Parameter space expansion:
- Original 15 params: 101K configurations
- NEW: MA gap filter (3 dimensions) = 18× expansion
- Total: ~810,000 configurations across 4 time profiles
- Chunk size: 1,000 configs/chunk = ~810 chunks

MA Gap Filter parameters:
- use_ma_gap: True/False (2 values)
- ma_gap_min_long: -5.0%, 0%, +5.0% (3 values)
- ma_gap_min_short: -5.0%, 0%, +5.0% (3 values)

Implementation:
- money_line_v9.py: Full v9 indicator with MA gap logic
- v9_advanced_worker.py: Chunk processor (1,000 configs)
- v9_advanced_coordinator.py: Work distributor (2 EPYC workers)
- run_v9_advanced_sweep.sh: Startup script (generates + launches)

Infrastructure:
- Uses existing EPYC cluster (64 cores total)
- Worker1: bd-epyc-02 (32 threads)
- Worker2: bd-host01 (32 threads via SSH hop)
- Expected runtime: 70-80 hours
- Database: SQLite (chunk tracking + results)

Goal: Find optimal MA gap thresholds for filtering false breakouts
during MA whipsaw zones while preserving trend entries.
This commit is contained in:
mindesbunister
2025-12-01 18:11:47 +01:00
parent 2993bc8895
commit 7e1fe1cc30
9 changed files with 2541 additions and 0 deletions

219
cluster/run_v9_advanced_sweep.sh Executable file
View File

@@ -0,0 +1,219 @@
#!/bin/bash
# V9 Advanced Parameter Sweep - 810K configs with MA gap filter exploration
# Uses existing cluster infrastructure with all dependencies already installed
set -e
echo "=========================================="
echo "V9 ADVANCED PARAMETER SWEEP"
echo "=========================================="
echo ""
echo "Configuration:"
echo " • Total configs: ~810,000 (18 parameters)"
echo " • New parameters: MA gap filter (3 dimensions)"
echo " • Chunk size: 1,000 configs/chunk = ~810 chunks"
echo " • Workers: 2 EPYCs"
echo " • Expected runtime: 70-80 hours"
echo ""
# Check if data file exists
DATA_FILE="data/solusdt_5m.csv"
if [ ! -f "$DATA_FILE" ]; then
echo "❌ Error: Data file not found: $DATA_FILE"
echo "Please ensure OHLCV data is available"
exit 1
fi
# Activate virtual environment
echo "Activating Python environment..."
source .venv/bin/activate
# Generate parameter configurations
echo ""
echo "=========================================="
echo "STEP 1: Generate Configurations"
echo "=========================================="
python3 << 'PYTHON_CODE'
import itertools
import json
from pathlib import Path
# 18-dimensional parameter space
ATR_RANGES = {
"minutes": [10, 12, 14],
"hours": [8, 10, 12],
"daily": [8, 10, 12, 14],
"weekly": [5, 7, 9],
}
MULT_RANGES = {
"minutes": [3.5, 3.8, 4.0],
"hours": [3.2, 3.5, 3.8],
"daily": [3.0, 3.2, 3.5, 3.8],
"weekly": [2.8, 3.0, 3.2],
}
RSI_LONG_MIN = [30, 35, 40]
RSI_LONG_MAX = [65, 70, 75]
RSI_SHORT_MIN = [25, 30, 35]
RSI_SHORT_MAX = [65, 70, 75]
VOL_MAX = [3.0, 3.5, 4.0]
ENTRY_BUFFER = [0.15, 0.20, 0.25]
ADX_LENGTH = [14, 16, 18]
# NEW: MA gap filter parameters (8x expansion)
USE_MA_GAP = [True, False]
MA_GAP_MIN_LONG = [-5.0, 0.0, 5.0]
MA_GAP_MIN_SHORT = [-5.0, 0.0, 5.0]
print("Generating parameter configurations...")
configs = []
for profile in ["minutes", "hours", "daily", "weekly"]:
for atr in ATR_RANGES[profile]:
for mult in MULT_RANGES[profile]:
for rsi_long_min in RSI_LONG_MIN:
for rsi_long_max in RSI_LONG_MAX:
if rsi_long_max <= rsi_long_min:
continue
for rsi_short_min in RSI_SHORT_MIN:
for rsi_short_max in RSI_SHORT_MAX:
if rsi_short_max <= rsi_short_min:
continue
for vol_max in VOL_MAX:
for entry_buffer in ENTRY_BUFFER:
for adx_len in ADX_LENGTH:
# NEW: MA gap filter combinations
for use_ma_gap in USE_MA_GAP:
for gap_min_long in MA_GAP_MIN_LONG:
for gap_min_short in MA_GAP_MIN_SHORT:
config = {
"profile": profile,
f"atr_{profile}": atr,
f"mult_{profile}": mult,
"rsi_long_min": rsi_long_min,
"rsi_long_max": rsi_long_max,
"rsi_short_min": rsi_short_min,
"rsi_short_max": rsi_short_max,
"vol_max": vol_max,
"entry_buffer": entry_buffer,
"adx_length": adx_len,
# NEW parameters
"use_ma_gap": use_ma_gap,
"ma_gap_min_long": gap_min_long,
"ma_gap_min_short": gap_min_short,
}
configs.append(config)
print(f"✓ Generated {len(configs):,} configurations")
# Create chunks (1,000 configs per chunk)
chunk_dir = Path("chunks")
chunk_dir.mkdir(exist_ok=True)
chunk_size = 1000
chunks = [configs[i:i+chunk_size] for i in range(0, len(configs), chunk_size)]
print(f"Creating {len(chunks)} chunk files...")
for i, chunk in enumerate(chunks):
chunk_file = chunk_dir / f"v9_advanced_chunk_{i:04d}.json"
with open(chunk_file, 'w') as f:
json.dump(chunk, f)
print(f"✓ Created {len(chunks)} chunk files in chunks/")
print(f" Total configs: {len(configs):,}")
print(f" Configs per chunk: {chunk_size}")
print(f" Enhancement: Added MA gap filter (2×3×3 = 18× multiplier)")
PYTHON_CODE
# Setup exploration database
echo ""
echo "=========================================="
echo "STEP 2: Setup Database"
echo "=========================================="
python3 << 'PYTHON_CODE'
import sqlite3
from pathlib import Path
db_path = Path("exploration.db")
conn = sqlite3.connect(str(db_path))
cursor = conn.cursor()
# Drop existing v9_advanced tables if they exist
cursor.execute("DROP TABLE IF EXISTS v9_advanced_strategies")
cursor.execute("DROP TABLE IF EXISTS v9_advanced_chunks")
# Create chunks table
cursor.execute("""
CREATE TABLE v9_advanced_chunks (
id TEXT PRIMARY KEY,
start_combo INTEGER NOT NULL,
end_combo INTEGER NOT NULL,
total_combos INTEGER NOT NULL,
status TEXT NOT NULL,
assigned_worker TEXT,
started_at INTEGER,
completed_at INTEGER,
created_at INTEGER DEFAULT (strftime('%s', 'now'))
)
""")
# Create strategies table
cursor.execute("""
CREATE TABLE v9_advanced_strategies (
id INTEGER PRIMARY KEY AUTOINCREMENT,
chunk_id TEXT NOT NULL,
params TEXT NOT NULL,
pnl REAL NOT NULL,
win_rate REAL NOT NULL,
profit_factor REAL NOT NULL,
max_drawdown REAL NOT NULL,
total_trades INTEGER NOT NULL,
created_at INTEGER DEFAULT (strftime('%s', 'now')),
FOREIGN KEY (chunk_id) REFERENCES v9_advanced_chunks(id)
)
""")
# Register all chunks
chunk_files = sorted(Path("chunks").glob("v9_advanced_chunk_*.json"))
for chunk_file in chunk_files:
chunk_id = chunk_file.stem
# Each chunk has ~1,000 configs (except possibly last one)
cursor.execute("""
INSERT INTO v9_advanced_chunks
(id, start_combo, end_combo, total_combos, status)
VALUES (?, 0, 1000, 1000, 'pending')
""", (chunk_id,))
conn.commit()
print(f"✓ Database ready: exploration.db")
print(f" Registered {len(chunk_files)} chunks")
conn.close()
PYTHON_CODE
echo ""
echo "=========================================="
echo "STEP 3: Launch Distributed Coordinator"
echo "=========================================="
echo ""
echo "Starting coordinator in background..."
echo "Monitor progress at: http://localhost:3001/cluster"
echo ""
# Launch distributed coordinator
nohup python3 distributed_coordinator.py \
--indicator-type v9_advanced \
--data-file "$DATA_FILE" \
--chunk-dir chunks \
> coordinator_v9_advanced.log 2>&1 &
COORD_PID=$!
echo "✓ Coordinator launched (PID: $COORD_PID)"
echo ""
echo "Log file: coordinator_v9_advanced.log"
echo "Monitor: tail -f coordinator_v9_advanced.log"
echo ""
echo "Sweep will run for ~70-80 hours (810K configs, 2 workers)"
echo "Check status: http://localhost:3001/cluster"