- ProxyJump (-J) doesn't work from Docker container - Changed to nested SSH: hop -> target - Proper command escaping for nested SSH - Worker2 (srv-bd-host01) only accessible via worker1 (pve-nu-monitor01)
213 lines
7.4 KiB
Bash
Executable File
213 lines
7.4 KiB
Bash
Executable File
#!/bin/bash
|
||
# V9 Advanced Parameter Sweep - 810K configs with MA gap filter exploration
|
||
# Uses existing cluster infrastructure with all dependencies already installed
|
||
|
||
set -e
|
||
|
||
echo "=========================================="
|
||
echo "V9 ADVANCED PARAMETER SWEEP"
|
||
echo "=========================================="
|
||
echo ""
|
||
echo "Configuration:"
|
||
echo " • Total configs: ~1.7M (18 parameters)"
|
||
echo " • New parameters: MA gap filter (3 dimensions)"
|
||
echo " • Chunk size: 1,000 configs/chunk = ~1,700 chunks"
|
||
echo " • Workers: 2 EPYCs"
|
||
echo " • Expected runtime: 140-160 hours"
|
||
echo ""
|
||
|
||
# Check if data file exists
|
||
DATA_FILE="data/solusdt_5m.csv"
|
||
if [ ! -f "$DATA_FILE" ]; then
|
||
echo "❌ Error: Data file not found: $DATA_FILE"
|
||
echo "Please ensure OHLCV data is available"
|
||
exit 1
|
||
fi
|
||
|
||
# Generate parameter configurations (no venv needed for config generation)
|
||
echo ""
|
||
echo "=========================================="
|
||
echo "STEP 1: Generate Configurations"
|
||
echo "=========================================="
|
||
|
||
python3 << 'PYTHON_CODE'
|
||
import itertools
|
||
import json
|
||
from pathlib import Path
|
||
|
||
# 18-dimensional parameter space
|
||
ATR_RANGES = {
|
||
"minutes": [10, 12, 14],
|
||
"hours": [8, 10, 12],
|
||
"daily": [8, 10, 12, 14],
|
||
"weekly": [5, 7, 9],
|
||
}
|
||
|
||
MULT_RANGES = {
|
||
"minutes": [3.5, 3.8, 4.0],
|
||
"hours": [3.2, 3.5, 3.8],
|
||
"daily": [3.0, 3.2, 3.5, 3.8],
|
||
"weekly": [2.8, 3.0, 3.2],
|
||
}
|
||
|
||
RSI_LONG_MIN = [30, 35, 40]
|
||
RSI_LONG_MAX = [65, 70, 75]
|
||
RSI_SHORT_MIN = [25, 30, 35]
|
||
RSI_SHORT_MAX = [65, 70, 75]
|
||
VOL_MAX = [3.0, 3.5, 4.0]
|
||
ENTRY_BUFFER = [0.15, 0.20, 0.25]
|
||
ADX_LENGTH = [14, 16, 18]
|
||
|
||
# NEW: MA gap filter parameters (8x expansion)
|
||
USE_MA_GAP = [True, False]
|
||
MA_GAP_MIN_LONG = [-5.0, 0.0, 5.0]
|
||
MA_GAP_MIN_SHORT = [-5.0, 0.0, 5.0]
|
||
|
||
print("Generating parameter configurations...")
|
||
configs = []
|
||
|
||
for profile in ["minutes", "hours", "daily", "weekly"]:
|
||
for atr in ATR_RANGES[profile]:
|
||
for mult in MULT_RANGES[profile]:
|
||
for rsi_long_min in RSI_LONG_MIN:
|
||
for rsi_long_max in RSI_LONG_MAX:
|
||
if rsi_long_max <= rsi_long_min:
|
||
continue
|
||
for rsi_short_min in RSI_SHORT_MIN:
|
||
for rsi_short_max in RSI_SHORT_MAX:
|
||
if rsi_short_max <= rsi_short_min:
|
||
continue
|
||
for vol_max in VOL_MAX:
|
||
for entry_buffer in ENTRY_BUFFER:
|
||
for adx_len in ADX_LENGTH:
|
||
# NEW: MA gap filter combinations
|
||
for use_ma_gap in USE_MA_GAP:
|
||
for gap_min_long in MA_GAP_MIN_LONG:
|
||
for gap_min_short in MA_GAP_MIN_SHORT:
|
||
config = {
|
||
"profile": profile,
|
||
f"atr_{profile}": atr,
|
||
f"mult_{profile}": mult,
|
||
"rsi_long_min": rsi_long_min,
|
||
"rsi_long_max": rsi_long_max,
|
||
"rsi_short_min": rsi_short_min,
|
||
"rsi_short_max": rsi_short_max,
|
||
"vol_max": vol_max,
|
||
"entry_buffer": entry_buffer,
|
||
"adx_length": adx_len,
|
||
# NEW parameters
|
||
"use_ma_gap": use_ma_gap,
|
||
"ma_gap_min_long": gap_min_long,
|
||
"ma_gap_min_short": gap_min_short,
|
||
}
|
||
configs.append(config)
|
||
|
||
print(f"✓ Generated {len(configs):,} configurations")
|
||
|
||
# Create chunks (1,000 configs per chunk)
|
||
chunk_dir = Path("chunks")
|
||
chunk_dir.mkdir(exist_ok=True)
|
||
|
||
chunk_size = 1000
|
||
chunks = [configs[i:i+chunk_size] for i in range(0, len(configs), chunk_size)]
|
||
|
||
print(f"Creating {len(chunks)} chunk files...")
|
||
for i, chunk in enumerate(chunks):
|
||
chunk_file = chunk_dir / f"v9_advanced_chunk_{i:04d}.json"
|
||
with open(chunk_file, 'w') as f:
|
||
json.dump(chunk, f)
|
||
|
||
print(f"✓ Created {len(chunks)} chunk files in chunks/")
|
||
print(f" Total configs: {len(configs):,}")
|
||
print(f" Configs per chunk: {chunk_size}")
|
||
print(f" Enhancement: Added MA gap filter (2×3×3 = 18× multiplier)")
|
||
PYTHON_CODE
|
||
|
||
# Setup exploration database
|
||
echo ""
|
||
echo "=========================================="
|
||
echo "STEP 2: Setup Database"
|
||
echo "=========================================="
|
||
|
||
python3 << 'PYTHON_CODE'
|
||
import sqlite3
|
||
from pathlib import Path
|
||
|
||
db_path = Path("exploration.db")
|
||
conn = sqlite3.connect(str(db_path))
|
||
cursor = conn.cursor()
|
||
|
||
# Drop existing v9_advanced tables if they exist
|
||
cursor.execute("DROP TABLE IF EXISTS v9_advanced_strategies")
|
||
cursor.execute("DROP TABLE IF EXISTS v9_advanced_chunks")
|
||
|
||
# Create chunks table
|
||
cursor.execute("""
|
||
CREATE TABLE v9_advanced_chunks (
|
||
id TEXT PRIMARY KEY,
|
||
start_combo INTEGER NOT NULL,
|
||
end_combo INTEGER NOT NULL,
|
||
total_combos INTEGER NOT NULL,
|
||
status TEXT NOT NULL,
|
||
assigned_worker TEXT,
|
||
started_at INTEGER,
|
||
completed_at INTEGER,
|
||
created_at INTEGER DEFAULT (strftime('%s', 'now'))
|
||
)
|
||
""")
|
||
|
||
# Create strategies table
|
||
cursor.execute("""
|
||
CREATE TABLE v9_advanced_strategies (
|
||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||
chunk_id TEXT NOT NULL,
|
||
params TEXT NOT NULL,
|
||
pnl REAL NOT NULL,
|
||
win_rate REAL NOT NULL,
|
||
profit_factor REAL NOT NULL,
|
||
max_drawdown REAL NOT NULL,
|
||
total_trades INTEGER NOT NULL,
|
||
created_at INTEGER DEFAULT (strftime('%s', 'now')),
|
||
FOREIGN KEY (chunk_id) REFERENCES v9_advanced_chunks(id)
|
||
)
|
||
""")
|
||
|
||
# Register all chunks
|
||
chunk_files = sorted(Path("chunks").glob("v9_advanced_chunk_*.json"))
|
||
for chunk_file in chunk_files:
|
||
chunk_id = chunk_file.stem
|
||
# Each chunk has ~1,000 configs (except possibly last one)
|
||
cursor.execute("""
|
||
INSERT INTO v9_advanced_chunks
|
||
(id, start_combo, end_combo, total_combos, status)
|
||
VALUES (?, 0, 1000, 1000, 'pending')
|
||
""", (chunk_id,))
|
||
|
||
conn.commit()
|
||
print(f"✓ Database ready: exploration.db")
|
||
print(f" Registered {len(chunk_files)} chunks")
|
||
conn.close()
|
||
PYTHON_CODE
|
||
|
||
echo ""
|
||
echo "=========================================="
|
||
echo "STEP 3: Launch Distributed Coordinator"
|
||
echo "=========================================="
|
||
echo ""
|
||
echo "Starting coordinator in background..."
|
||
echo "Monitor progress at: http://localhost:3001/cluster"
|
||
echo ""
|
||
|
||
# Launch v9 advanced coordinator (uses v9_advanced_chunks table)
|
||
nohup python3 v9_advanced_coordinator.py \
|
||
> coordinator_v9_advanced.log 2>&1 &
|
||
|
||
COORD_PID=$!
|
||
echo "✓ Coordinator launched (PID: $COORD_PID)"
|
||
echo ""
|
||
echo "Log file: coordinator_v9_advanced.log"
|
||
echo "Monitor: tail -f coordinator_v9_advanced.log"
|
||
echo ""
|
||
echo "Sweep will run for ~140-160 hours (1.7M configs, 2 workers)"
|
||
echo "Check status: http://localhost:3001/cluster"
|