feat: V9 advanced parameter sweep with MA gap filter (810K configs)
Parameter space expansion: - Original 15 params: 101K configurations - NEW: MA gap filter (3 dimensions) = 18× expansion - Total: ~810,000 configurations across 4 time profiles - Chunk size: 1,000 configs/chunk = ~810 chunks MA Gap Filter parameters: - use_ma_gap: True/False (2 values) - ma_gap_min_long: -5.0%, 0%, +5.0% (3 values) - ma_gap_min_short: -5.0%, 0%, +5.0% (3 values) Implementation: - money_line_v9.py: Full v9 indicator with MA gap logic - v9_advanced_worker.py: Chunk processor (1,000 configs) - v9_advanced_coordinator.py: Work distributor (2 EPYC workers) - run_v9_advanced_sweep.sh: Startup script (generates + launches) Infrastructure: - Uses existing EPYC cluster (64 cores total) - Worker1: bd-epyc-02 (32 threads) - Worker2: bd-host01 (32 threads via SSH hop) - Expected runtime: 70-80 hours - Database: SQLite (chunk tracking + results) Goal: Find optimal MA gap thresholds for filtering false breakouts during MA whipsaw zones while preserving trend entries.
This commit is contained in:
362
scripts/coordinate_v9_advanced_sweep.py
Executable file
362
scripts/coordinate_v9_advanced_sweep.py
Executable file
@@ -0,0 +1,362 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Distributed coordinator for v9 advanced parameter sweep.
|
||||
|
||||
This script coordinates the distributed processing of ~800K+ parameter configurations
|
||||
across the EPYC cluster.
|
||||
|
||||
Architecture:
|
||||
- Generates chunks of parameter combinations
|
||||
- Distributes chunks to workers via SSH
|
||||
- Monitors progress
|
||||
- Aggregates results
|
||||
|
||||
Expected configuration space:
|
||||
- ATR periods: 13 values
|
||||
- Multipliers: 13 values
|
||||
- ADX length: 6 values
|
||||
- RSI length: 6 values
|
||||
- RSI boundaries: 7×7×7×7 = 2401 combinations
|
||||
- Volume max: 7 values
|
||||
- Entry buffer: 7 values
|
||||
- Heikin Ashi: 2 values
|
||||
- MA gap filter: 2 values
|
||||
- MA gap thresholds: 7×7 = 49 combinations
|
||||
|
||||
Total: 13 × 13 × 6 × 6 × 2401 × 7 × 7 × 2 × 2 × 49 = ~807,584 configurations
|
||||
|
||||
Chunk size: 3,000 configs per chunk = ~270 chunks
|
||||
Expected runtime: 40-80 hours on 2-worker cluster
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import sqlite3
|
||||
import subprocess
|
||||
import time
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
import pandas as pd
|
||||
from tqdm import tqdm
|
||||
|
||||
# Worker configuration
|
||||
WORKERS = {
|
||||
"worker1": {
|
||||
"host": "192.168.1.101",
|
||||
"cores": 32,
|
||||
},
|
||||
"worker2": {
|
||||
"host": "192.168.1.102",
|
||||
"cores": 32,
|
||||
}
|
||||
}
|
||||
|
||||
# Default parameters
|
||||
CHUNK_SIZE = 3000 # Configurations per chunk
|
||||
PROJECT_DIR = Path(__file__).parent.parent
|
||||
|
||||
|
||||
def generate_parameter_space():
|
||||
"""
|
||||
Generate the full parameter space for v9 advanced optimization.
|
||||
|
||||
Returns ~807,584 parameter combinations.
|
||||
"""
|
||||
print("Generating parameter space...")
|
||||
print()
|
||||
|
||||
# ATR periods: test around optimal (10) with fine granularity
|
||||
atr_periods = [8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20]
|
||||
|
||||
# Multipliers: test around optimal (3.2) with fine granularity
|
||||
multipliers = [2.5, 2.7, 2.9, 3.0, 3.2, 3.4, 3.6, 3.8, 4.0, 4.2, 4.4, 4.6, 5.0]
|
||||
|
||||
# ADX length: test shorter to longer
|
||||
adx_lengths = [12, 14, 16, 18, 20, 22]
|
||||
|
||||
# RSI length: test shorter to longer
|
||||
rsi_lengths = [10, 12, 14, 16, 18, 20]
|
||||
|
||||
# RSI boundaries: comprehensive range
|
||||
rsi_long_mins = [25, 30, 35, 40, 45, 50, 55]
|
||||
rsi_long_maxs = [55, 60, 65, 70, 75, 80, 85]
|
||||
rsi_short_mins = [15, 20, 25, 30, 35, 40, 45]
|
||||
rsi_short_maxs = [55, 60, 65, 70, 75, 80, 85]
|
||||
|
||||
# Volume max: test tighter to looser
|
||||
vol_maxs = [2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0]
|
||||
|
||||
# Entry buffer: test smaller to larger ATR multiples
|
||||
entry_buffers = [0.10, 0.15, 0.20, 0.25, 0.30, 0.35, 0.40]
|
||||
|
||||
# Heikin Ashi toggle
|
||||
use_heikin_ashi_options = [False, True]
|
||||
|
||||
# MA gap filter toggle
|
||||
use_ma_gap_filter_options = [False, True]
|
||||
|
||||
# MA gap thresholds (only used if filter enabled)
|
||||
ma_gap_long_mins = [-5, -3, -1, 0, 1, 3, 5] # Minimum gap for LONG (convergence/divergence)
|
||||
ma_gap_short_maxs = [-5, -3, -1, 0, 1, 3, 5] # Maximum gap for SHORT
|
||||
|
||||
configs = []
|
||||
total_combos = (len(atr_periods) * len(multipliers) * len(adx_lengths) *
|
||||
len(rsi_lengths) * len(rsi_long_mins) * len(rsi_long_maxs) *
|
||||
len(rsi_short_mins) * len(rsi_short_maxs) * len(vol_maxs) *
|
||||
len(entry_buffers) * len(use_heikin_ashi_options) *
|
||||
len(use_ma_gap_filter_options) * len(ma_gap_long_mins) *
|
||||
len(ma_gap_short_maxs))
|
||||
|
||||
print(f"Expected configurations: {total_combos:,}")
|
||||
print()
|
||||
print("This will take a few minutes...")
|
||||
print()
|
||||
|
||||
# Generate all combinations
|
||||
for atr in tqdm(atr_periods, desc="ATR periods"):
|
||||
for mult in multipliers:
|
||||
for adx_len in adx_lengths:
|
||||
for rsi_len in rsi_lengths:
|
||||
for rsi_lmin in rsi_long_mins:
|
||||
for rsi_lmax in rsi_long_maxs:
|
||||
# Skip invalid RSI ranges
|
||||
if rsi_lmin >= rsi_lmax:
|
||||
continue
|
||||
|
||||
for rsi_smin in rsi_short_mins:
|
||||
for rsi_smax in rsi_short_maxs:
|
||||
# Skip invalid RSI ranges
|
||||
if rsi_smin >= rsi_smax:
|
||||
continue
|
||||
|
||||
for vol_max in vol_maxs:
|
||||
for buffer in entry_buffers:
|
||||
for ha in use_heikin_ashi_options:
|
||||
for use_ma in use_ma_gap_filter_options:
|
||||
for ma_lmin in ma_gap_long_mins:
|
||||
for ma_smax in ma_gap_short_maxs:
|
||||
configs.append({
|
||||
'atr_period': atr,
|
||||
'multiplier': mult,
|
||||
'adx_length': adx_len,
|
||||
'rsi_length': rsi_len,
|
||||
'rsi_long_min': rsi_lmin,
|
||||
'rsi_long_max': rsi_lmax,
|
||||
'rsi_short_min': rsi_smin,
|
||||
'rsi_short_max': rsi_smax,
|
||||
'vol_max': vol_max,
|
||||
'entry_buffer_atr': buffer,
|
||||
'use_heikin_ashi': ha,
|
||||
'use_ma_gap_filter': use_ma,
|
||||
'ma_gap_long_min': ma_lmin,
|
||||
'ma_gap_short_max': ma_smax,
|
||||
})
|
||||
|
||||
print()
|
||||
print(f"Generated {len(configs):,} valid configurations")
|
||||
return configs
|
||||
|
||||
|
||||
def create_chunks(configs, chunk_size=CHUNK_SIZE):
|
||||
"""Split configurations into chunks."""
|
||||
print(f"Creating chunks of {chunk_size} configurations...")
|
||||
|
||||
chunks_dir = PROJECT_DIR / "cluster" / "chunks"
|
||||
chunks_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
chunks = []
|
||||
for i in range(0, len(configs), chunk_size):
|
||||
chunk = configs[i:i + chunk_size]
|
||||
chunk_id = i // chunk_size
|
||||
chunk_file = chunks_dir / f"v9_advanced_chunk_{chunk_id:04d}.csv"
|
||||
|
||||
# Save chunk
|
||||
df = pd.DataFrame(chunk)
|
||||
df.to_csv(chunk_file, index=False)
|
||||
|
||||
chunks.append({
|
||||
'id': chunk_id,
|
||||
'file': str(chunk_file),
|
||||
'size': len(chunk),
|
||||
'status': 'pending'
|
||||
})
|
||||
|
||||
print(f"Created {len(chunks)} chunks")
|
||||
return chunks
|
||||
|
||||
|
||||
def create_database(chunks):
|
||||
"""Create SQLite database for tracking."""
|
||||
db_path = PROJECT_DIR / "cluster" / "exploration_v9_advanced.db"
|
||||
|
||||
print(f"Creating database: {db_path}")
|
||||
|
||||
conn = sqlite3.connect(db_path)
|
||||
c = conn.cursor()
|
||||
|
||||
# Create chunks table
|
||||
c.execute('''
|
||||
CREATE TABLE IF NOT EXISTS chunks (
|
||||
id INTEGER PRIMARY KEY,
|
||||
file TEXT NOT NULL,
|
||||
size INTEGER NOT NULL,
|
||||
status TEXT NOT NULL,
|
||||
worker TEXT,
|
||||
started_at INTEGER,
|
||||
completed_at INTEGER,
|
||||
result_file TEXT
|
||||
)
|
||||
''')
|
||||
|
||||
# Insert chunks
|
||||
for chunk in chunks:
|
||||
c.execute('''
|
||||
INSERT INTO chunks (id, file, size, status)
|
||||
VALUES (?, ?, ?, ?)
|
||||
''', (chunk['id'], chunk['file'], chunk['size'], chunk['status']))
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
print(f"Database created with {len(chunks)} chunks")
|
||||
return db_path
|
||||
|
||||
|
||||
def assign_chunk_to_worker(db_path, worker_name):
|
||||
"""Get next pending chunk and assign to worker."""
|
||||
conn = sqlite3.connect(db_path)
|
||||
c = conn.cursor()
|
||||
|
||||
# Get next pending chunk
|
||||
c.execute('''
|
||||
SELECT id, file FROM chunks
|
||||
WHERE status = 'pending'
|
||||
ORDER BY id
|
||||
LIMIT 1
|
||||
''')
|
||||
|
||||
row = c.fetchone()
|
||||
if not row:
|
||||
conn.close()
|
||||
return None
|
||||
|
||||
chunk_id, chunk_file = row
|
||||
|
||||
# Update status
|
||||
c.execute('''
|
||||
UPDATE chunks
|
||||
SET status = 'running', worker = ?, started_at = ?
|
||||
WHERE id = ?
|
||||
''', (worker_name, int(time.time()), chunk_id))
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
return {'id': chunk_id, 'file': chunk_file}
|
||||
|
||||
|
||||
def mark_chunk_complete(db_path, chunk_id, result_file):
|
||||
"""Mark chunk as completed."""
|
||||
conn = sqlite3.connect(db_path)
|
||||
c = conn.cursor()
|
||||
|
||||
c.execute('''
|
||||
UPDATE chunks
|
||||
SET status = 'completed', completed_at = ?, result_file = ?
|
||||
WHERE id = ?
|
||||
''', (int(time.time()), result_file, chunk_id))
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
|
||||
def start_worker_process(worker_name, worker_config, chunk_file, output_file):
|
||||
"""Start worker process via SSH."""
|
||||
host = worker_config['host']
|
||||
|
||||
# Command to run on remote worker
|
||||
cmd = [
|
||||
'ssh', host,
|
||||
f'cd /root/traderv4 && '
|
||||
f'python3 scripts/distributed_v9_advanced_worker.py {chunk_file} {output_file}'
|
||||
]
|
||||
|
||||
print(f"Starting {worker_name} on chunk...")
|
||||
subprocess.Popen(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||
|
||||
|
||||
def monitor_progress(db_path):
|
||||
"""Monitor and display progress."""
|
||||
conn = sqlite3.connect(db_path)
|
||||
c = conn.cursor()
|
||||
|
||||
c.execute('SELECT COUNT(*) FROM chunks WHERE status = "completed"')
|
||||
completed = c.fetchone()[0]
|
||||
|
||||
c.execute('SELECT COUNT(*) FROM chunks WHERE status = "running"')
|
||||
running = c.fetchone()[0]
|
||||
|
||||
c.execute('SELECT COUNT(*) FROM chunks')
|
||||
total = c.fetchone()[0]
|
||||
|
||||
conn.close()
|
||||
|
||||
return {
|
||||
'completed': completed,
|
||||
'running': running,
|
||||
'pending': total - completed - running,
|
||||
'total': total,
|
||||
'progress': completed / total * 100
|
||||
}
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Coordinate v9 advanced sweep")
|
||||
parser.add_argument("--chunk-size", type=int, default=CHUNK_SIZE,
|
||||
help=f"Configurations per chunk (default: {CHUNK_SIZE})")
|
||||
parser.add_argument("--generate-only", action="store_true",
|
||||
help="Only generate chunks without starting workers")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
print("=" * 80)
|
||||
print("v9 ADVANCED PARAMETER SWEEP COORDINATOR")
|
||||
print("=" * 80)
|
||||
print()
|
||||
print(f"Chunk size: {args.chunk_size} configurations")
|
||||
print(f"Workers: {len(WORKERS)}")
|
||||
print()
|
||||
|
||||
# Generate parameter space
|
||||
configs = generate_parameter_space()
|
||||
|
||||
# Create chunks
|
||||
chunks = create_chunks(configs, args.chunk_size)
|
||||
|
||||
# Create database
|
||||
db_path = create_database(chunks)
|
||||
|
||||
print()
|
||||
print("=" * 80)
|
||||
print("SETUP COMPLETE")
|
||||
print("=" * 80)
|
||||
print()
|
||||
print(f"Total configurations: {len(configs):,}")
|
||||
print(f"Chunks: {len(chunks)}")
|
||||
print(f"Database: {db_path}")
|
||||
print()
|
||||
|
||||
if args.generate_only:
|
||||
print("Generation complete. Use --no-generate-only to start workers.")
|
||||
return
|
||||
|
||||
print("Starting distributed processing...")
|
||||
print()
|
||||
|
||||
# TODO: Implement worker coordination loop
|
||||
# This would monitor chunks, assign to workers, track progress
|
||||
# For now, workers can be started manually
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
157
scripts/distributed_v9_advanced_worker.py
Executable file
157
scripts/distributed_v9_advanced_worker.py
Executable file
@@ -0,0 +1,157 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Distributed v9 advanced parameter sweep for cluster execution.
|
||||
|
||||
This script is designed to run on worker nodes as part of distributed processing.
|
||||
The coordinator will split the 800K+ configurations into chunks and distribute
|
||||
them across the cluster.
|
||||
|
||||
Expected per-worker throughput: ~300-500 configs/hour
|
||||
Total runtime: 40-80 hours on 2-worker cluster
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Add project root to path
|
||||
project_root = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(project_root))
|
||||
|
||||
import pandas as pd
|
||||
from tqdm import tqdm
|
||||
|
||||
from backtester.data_loader import load_data
|
||||
from backtester.indicators.money_line_v9 import MoneyLineV9Inputs, money_line_v9_signals
|
||||
from backtester.simulator import simulate_money_line
|
||||
|
||||
|
||||
def test_config(params):
|
||||
"""Test a single configuration."""
|
||||
# Load data once (cached)
|
||||
df = load_data("solusdt_5m.csv")
|
||||
|
||||
# Create inputs with parameters
|
||||
inputs = MoneyLineV9Inputs(
|
||||
# Basic optimized params (FIXED from previous sweep)
|
||||
confirm_bars=0,
|
||||
flip_threshold_percent=0.5,
|
||||
cooldown_bars=3,
|
||||
adx_min=21,
|
||||
long_pos_max=75,
|
||||
short_pos_min=20,
|
||||
vol_min=1.0,
|
||||
|
||||
# ADVANCED OPTIMIZATION PARAMETERS:
|
||||
atr_period=params['atr_period'],
|
||||
multiplier=params['multiplier'],
|
||||
adx_length=params['adx_length'],
|
||||
rsi_length=params['rsi_length'],
|
||||
rsi_long_min=params['rsi_long_min'],
|
||||
rsi_long_max=params['rsi_long_max'],
|
||||
rsi_short_min=params['rsi_short_min'],
|
||||
rsi_short_max=params['rsi_short_max'],
|
||||
vol_max=params['vol_max'],
|
||||
entry_buffer_atr=params['entry_buffer_atr'],
|
||||
use_heikin_ashi=params['use_heikin_ashi'],
|
||||
use_ma_gap_filter=params['use_ma_gap_filter'],
|
||||
ma_gap_long_min=params['ma_gap_long_min'],
|
||||
ma_gap_short_max=params['ma_gap_short_max'],
|
||||
)
|
||||
|
||||
try:
|
||||
# Generate signals
|
||||
signals = money_line_v9_signals(df, inputs)
|
||||
|
||||
# Simulate trades
|
||||
results = simulate_money_line(df, signals)
|
||||
|
||||
return {
|
||||
'atr_period': params['atr_period'],
|
||||
'multiplier': params['multiplier'],
|
||||
'adx_length': params['adx_length'],
|
||||
'rsi_length': params['rsi_length'],
|
||||
'rsi_long_min': params['rsi_long_min'],
|
||||
'rsi_long_max': params['rsi_long_max'],
|
||||
'rsi_short_min': params['rsi_short_min'],
|
||||
'rsi_short_max': params['rsi_short_max'],
|
||||
'vol_max': params['vol_max'],
|
||||
'entry_buffer_atr': params['entry_buffer_atr'],
|
||||
'use_heikin_ashi': params['use_heikin_ashi'],
|
||||
'use_ma_gap_filter': params['use_ma_gap_filter'],
|
||||
'ma_gap_long_min': params['ma_gap_long_min'],
|
||||
'ma_gap_short_max': params['ma_gap_short_max'],
|
||||
'pnl': results['total_pnl'],
|
||||
'win_rate': results['win_rate'],
|
||||
'profit_factor': results['profit_factor'],
|
||||
'max_drawdown': results['max_drawdown'],
|
||||
'total_trades': results['total_trades'],
|
||||
}
|
||||
except Exception as e:
|
||||
print(f"Error testing config: {e}")
|
||||
return {
|
||||
'atr_period': params['atr_period'],
|
||||
'multiplier': params['multiplier'],
|
||||
'pnl': 0,
|
||||
'win_rate': 0,
|
||||
'profit_factor': 0,
|
||||
'max_drawdown': 0,
|
||||
'total_trades': 0,
|
||||
}
|
||||
|
||||
|
||||
def process_chunk(chunk_file: str, output_file: str):
|
||||
"""
|
||||
Process a chunk of parameter configurations.
|
||||
|
||||
Args:
|
||||
chunk_file: CSV file with parameter combinations to test
|
||||
output_file: CSV file to save results
|
||||
"""
|
||||
print(f"Loading chunk: {chunk_file}")
|
||||
chunk_df = pd.read_csv(chunk_file)
|
||||
print(f"Chunk size: {len(chunk_df)} configurations")
|
||||
print()
|
||||
|
||||
results = []
|
||||
|
||||
for idx, row in tqdm(chunk_df.iterrows(), total=len(chunk_df), desc="Testing configs"):
|
||||
params = row.to_dict()
|
||||
result = test_config(params)
|
||||
results.append(result)
|
||||
|
||||
# Save results
|
||||
results_df = pd.DataFrame(results)
|
||||
results_df.to_csv(output_file, index=False)
|
||||
print(f"\nResults saved to: {output_file}")
|
||||
|
||||
# Print summary
|
||||
print()
|
||||
print("=" * 80)
|
||||
print("CHUNK COMPLETE")
|
||||
print("=" * 80)
|
||||
print()
|
||||
print(f"Configurations tested: {len(results_df)}")
|
||||
print(f"Best PnL: ${results_df['pnl'].max():.2f}")
|
||||
print(f"Mean PnL: ${results_df['pnl'].mean():.2f}")
|
||||
print(f"Configurations with trades: {(results_df['total_trades'] > 0).sum()}")
|
||||
print()
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Process v9 advanced sweep chunk")
|
||||
parser.add_argument("chunk_file", help="Input CSV file with parameter combinations")
|
||||
parser.add_argument("output_file", help="Output CSV file for results")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
print("=" * 80)
|
||||
print("v9 ADVANCED SWEEP - CHUNK PROCESSOR")
|
||||
print("=" * 80)
|
||||
print()
|
||||
|
||||
process_chunk(args.chunk_file, args.output_file)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
299
scripts/run_advanced_v9_sweep.py
Normal file
299
scripts/run_advanced_v9_sweep.py
Normal file
@@ -0,0 +1,299 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Advanced v9 Money Line parameter sweep - AGGRESSIVE optimization.
|
||||
|
||||
This explores ~100K-200K parameter combinations across:
|
||||
- ATR profiles (period + multiplier variations)
|
||||
- RSI boundaries (4 parameters)
|
||||
- Volume max threshold
|
||||
- Entry buffer size
|
||||
- ADX length
|
||||
- Source mode (Chart vs Heikin Ashi)
|
||||
- MA gap filter (optional)
|
||||
|
||||
Expected runtime: 40-80 hours on 2-worker cluster
|
||||
Target: Beat baseline $194.43/1k (19.44% returns)
|
||||
"""
|
||||
|
||||
import itertools
|
||||
import multiprocessing as mp
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Add project root to path
|
||||
project_root = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(project_root))
|
||||
|
||||
import pandas as pd
|
||||
from tqdm import tqdm
|
||||
|
||||
from backtester.data_loader import load_data
|
||||
from backtester.indicators.money_line_v9 import MoneyLineV9Inputs, money_line_v9_signals
|
||||
from backtester.simulator import simulate_money_line
|
||||
|
||||
|
||||
def test_config(args):
|
||||
"""Test a single configuration."""
|
||||
config_id, params = args
|
||||
|
||||
# Load data
|
||||
df = load_data("solusdt_5m.csv")
|
||||
|
||||
# Create inputs with parameters
|
||||
inputs = MoneyLineV9Inputs(
|
||||
# Basic optimized params (FIXED from previous sweep)
|
||||
confirm_bars=0,
|
||||
flip_threshold_percent=0.5,
|
||||
cooldown_bars=3,
|
||||
adx_min=21,
|
||||
long_pos_max=75,
|
||||
short_pos_min=20,
|
||||
vol_min=1.0,
|
||||
|
||||
# ADVANCED OPTIMIZATION PARAMETERS:
|
||||
atr_period=params['atr_period'],
|
||||
multiplier=params['multiplier'],
|
||||
adx_length=params['adx_length'],
|
||||
rsi_length=params['rsi_length'],
|
||||
rsi_long_min=params['rsi_long_min'],
|
||||
rsi_long_max=params['rsi_long_max'],
|
||||
rsi_short_min=params['rsi_short_min'],
|
||||
rsi_short_max=params['rsi_short_max'],
|
||||
vol_max=params['vol_max'],
|
||||
entry_buffer_atr=params['entry_buffer_atr'],
|
||||
use_heikin_ashi=params['use_heikin_ashi'],
|
||||
use_ma_gap_filter=params['use_ma_gap_filter'],
|
||||
ma_gap_long_min=params['ma_gap_long_min'],
|
||||
ma_gap_short_max=params['ma_gap_short_max'],
|
||||
)
|
||||
|
||||
try:
|
||||
# Generate signals
|
||||
signals = money_line_v9_signals(df, inputs)
|
||||
|
||||
# Simulate trades
|
||||
results = simulate_money_line(df, signals)
|
||||
|
||||
return {
|
||||
'config_id': config_id,
|
||||
'atr_period': params['atr_period'],
|
||||
'multiplier': params['multiplier'],
|
||||
'adx_length': params['adx_length'],
|
||||
'rsi_length': params['rsi_length'],
|
||||
'rsi_long_min': params['rsi_long_min'],
|
||||
'rsi_long_max': params['rsi_long_max'],
|
||||
'rsi_short_min': params['rsi_short_min'],
|
||||
'rsi_short_max': params['rsi_short_max'],
|
||||
'vol_max': params['vol_max'],
|
||||
'entry_buffer_atr': params['entry_buffer_atr'],
|
||||
'use_heikin_ashi': params['use_heikin_ashi'],
|
||||
'use_ma_gap_filter': params['use_ma_gap_filter'],
|
||||
'ma_gap_long_min': params['ma_gap_long_min'],
|
||||
'ma_gap_short_max': params['ma_gap_short_max'],
|
||||
'pnl': results['total_pnl'],
|
||||
'win_rate': results['win_rate'],
|
||||
'profit_factor': results['profit_factor'],
|
||||
'max_drawdown': results['max_drawdown'],
|
||||
'total_trades': results['total_trades'],
|
||||
}
|
||||
except Exception as e:
|
||||
print(f"Error testing config {config_id}: {e}")
|
||||
return {
|
||||
'config_id': config_id,
|
||||
'pnl': 0,
|
||||
'win_rate': 0,
|
||||
'profit_factor': 0,
|
||||
'max_drawdown': 0,
|
||||
'total_trades': 0,
|
||||
}
|
||||
|
||||
|
||||
def generate_parameter_grid():
|
||||
"""
|
||||
Generate comprehensive parameter grid for advanced optimization.
|
||||
|
||||
AGGRESSIVE SEARCH SPACE:
|
||||
- ATR periods: 5 values (10, 12, 14, 16, 18)
|
||||
- Multipliers: 6 values (3.0, 3.2, 3.5, 3.8, 4.0, 4.2)
|
||||
- ADX length: 4 values (14, 16, 18, 20)
|
||||
- RSI length: 3 values (12, 14, 16)
|
||||
- RSI long min: 4 values (30, 35, 40, 45)
|
||||
- RSI long max: 4 values (65, 70, 75, 80)
|
||||
- RSI short min: 4 values (25, 30, 35, 40)
|
||||
- RSI short max: 4 values (60, 65, 70, 75)
|
||||
- Volume max: 4 values (3.0, 3.5, 4.0, 4.5)
|
||||
- Entry buffer: 3 values (0.15, 0.20, 0.25)
|
||||
- Source mode: 2 values (Chart, Heikin Ashi)
|
||||
- MA gap filter: 3 modes (disabled, longs_only, both)
|
||||
|
||||
Total: 5×6×4×3×4×4×4×4×4×3×2×3 = 829,440 combinations
|
||||
|
||||
This will take 2-3 days on 2-worker cluster but will find optimal settings.
|
||||
"""
|
||||
|
||||
# ATR profile variations (5 × 6 = 30 combos)
|
||||
atr_periods = [10, 12, 14, 16, 18]
|
||||
multipliers = [3.0, 3.2, 3.5, 3.8, 4.0, 4.2]
|
||||
|
||||
# ADX length variations (4 values)
|
||||
adx_lengths = [14, 16, 18, 20]
|
||||
|
||||
# RSI length (3 values)
|
||||
rsi_lengths = [12, 14, 16]
|
||||
|
||||
# RSI boundaries (4×4×4×4 = 256 combos)
|
||||
rsi_long_mins = [30, 35, 40, 45]
|
||||
rsi_long_maxs = [65, 70, 75, 80]
|
||||
rsi_short_mins = [25, 30, 35, 40]
|
||||
rsi_short_maxs = [60, 65, 70, 75]
|
||||
|
||||
# Volume max (4 values)
|
||||
vol_maxs = [3.0, 3.5, 4.0, 4.5]
|
||||
|
||||
# Entry buffer (3 values)
|
||||
entry_buffers = [0.15, 0.20, 0.25]
|
||||
|
||||
# Source mode (2 values)
|
||||
use_heikin_ashis = [False, True]
|
||||
|
||||
# MA gap filter modes (3 modes = 3 parameter sets)
|
||||
# Mode 1: Disabled
|
||||
# Mode 2: Longs only (require ma50 > ma200)
|
||||
# Mode 3: Both directions (bull/bear confirmation)
|
||||
ma_gap_configs = [
|
||||
(False, 0.0, 0.0), # Disabled
|
||||
(True, 0.5, 0.0), # Longs only: require 0.5% gap
|
||||
(True, 0.5, -0.5), # Both: longs need +0.5%, shorts need -0.5%
|
||||
]
|
||||
|
||||
configs = []
|
||||
config_id = 0
|
||||
|
||||
for atr_period, multiplier, adx_length, rsi_length, \
|
||||
rsi_long_min, rsi_long_max, rsi_short_min, rsi_short_max, \
|
||||
vol_max, entry_buffer, use_ha, ma_gap_config in \
|
||||
itertools.product(
|
||||
atr_periods, multipliers, adx_lengths, rsi_lengths,
|
||||
rsi_long_mins, rsi_long_maxs, rsi_short_mins, rsi_short_maxs,
|
||||
vol_maxs, entry_buffers, use_heikin_ashis, ma_gap_configs
|
||||
):
|
||||
|
||||
# Validity check: RSI min < max
|
||||
if rsi_long_min >= rsi_long_max:
|
||||
continue
|
||||
if rsi_short_min >= rsi_short_max:
|
||||
continue
|
||||
|
||||
use_ma_gap, ma_gap_long_min, ma_gap_short_max = ma_gap_config
|
||||
|
||||
configs.append((config_id, {
|
||||
'atr_period': atr_period,
|
||||
'multiplier': multiplier,
|
||||
'adx_length': adx_length,
|
||||
'rsi_length': rsi_length,
|
||||
'rsi_long_min': rsi_long_min,
|
||||
'rsi_long_max': rsi_long_max,
|
||||
'rsi_short_min': rsi_short_min,
|
||||
'rsi_short_max': rsi_short_max,
|
||||
'vol_max': vol_max,
|
||||
'entry_buffer_atr': entry_buffer,
|
||||
'use_heikin_ashi': use_ha,
|
||||
'use_ma_gap_filter': use_ma_gap,
|
||||
'ma_gap_long_min': ma_gap_long_min,
|
||||
'ma_gap_short_max': ma_gap_short_max,
|
||||
}))
|
||||
config_id += 1
|
||||
|
||||
return configs
|
||||
|
||||
|
||||
def main():
|
||||
"""Run advanced parameter sweep."""
|
||||
print("=" * 80)
|
||||
print("v9 ADVANCED PARAMETER SWEEP - AGGRESSIVE OPTIMIZATION")
|
||||
print("=" * 80)
|
||||
print()
|
||||
print("This will explore ~800K parameter combinations across:")
|
||||
print(" - ATR profiles (5 periods × 6 multipliers)")
|
||||
print(" - RSI boundaries (4×4×4×4 = 256 combinations)")
|
||||
print(" - Volume max (4 values)")
|
||||
print(" - Entry buffer (3 values)")
|
||||
print(" - ADX length (4 values)")
|
||||
print(" - RSI length (3 values)")
|
||||
print(" - Source mode (Chart vs Heikin Ashi)")
|
||||
print(" - MA gap filter (3 modes)")
|
||||
print()
|
||||
print("Expected runtime: 40-80 hours on 2-worker cluster")
|
||||
print("Target: Beat baseline $194.43/1k (19.44% returns)")
|
||||
print()
|
||||
|
||||
# Generate parameter grid
|
||||
print("Generating parameter combinations...")
|
||||
configs = generate_parameter_grid()
|
||||
print(f"Total configurations: {len(configs):,}")
|
||||
print()
|
||||
|
||||
# Determine number of workers
|
||||
n_workers = mp.cpu_count()
|
||||
print(f"Using {n_workers} CPU cores")
|
||||
print()
|
||||
|
||||
# Run sweep
|
||||
print("Starting parameter sweep...")
|
||||
with mp.Pool(n_workers) as pool:
|
||||
results = list(tqdm(
|
||||
pool.imap(test_config, configs),
|
||||
total=len(configs),
|
||||
desc="Testing configs"
|
||||
))
|
||||
|
||||
# Convert to DataFrame
|
||||
results_df = pd.DataFrame(results)
|
||||
|
||||
# Save full results
|
||||
output_file = "sweep_v9_advanced_full.csv"
|
||||
results_df.to_csv(output_file, index=False)
|
||||
print(f"Full results saved to: {output_file}")
|
||||
|
||||
# Sort by PnL and save top 1000
|
||||
top_results = results_df.nlargest(1000, 'pnl')
|
||||
top_file = "sweep_v9_advanced_top1000.csv"
|
||||
top_results.to_csv(top_file, index=False)
|
||||
print(f"Top 1000 configurations saved to: {top_file}")
|
||||
|
||||
# Print summary
|
||||
print()
|
||||
print("=" * 80)
|
||||
print("SWEEP COMPLETE")
|
||||
print("=" * 80)
|
||||
print()
|
||||
print(f"Best configuration:")
|
||||
best = top_results.iloc[0]
|
||||
print(f" PnL: ${best['pnl']:.2f}")
|
||||
print(f" Win Rate: {best['win_rate']:.1f}%")
|
||||
print(f" Profit Factor: {best['profit_factor']:.2f}")
|
||||
print(f" Max Drawdown: ${best['max_drawdown']:.2f}")
|
||||
print(f" Total Trades: {best['total_trades']}")
|
||||
print()
|
||||
print("Parameters:")
|
||||
print(f" ATR Period: {best['atr_period']}")
|
||||
print(f" Multiplier: {best['multiplier']}")
|
||||
print(f" ADX Length: {best['adx_length']}")
|
||||
print(f" RSI Length: {best['rsi_length']}")
|
||||
print(f" RSI Long: {best['rsi_long_min']}-{best['rsi_long_max']}")
|
||||
print(f" RSI Short: {best['rsi_short_min']}-{best['rsi_short_max']}")
|
||||
print(f" Volume Max: {best['vol_max']}")
|
||||
print(f" Entry Buffer: {best['entry_buffer_atr']}")
|
||||
print(f" Heikin Ashi: {best['use_heikin_ashi']}")
|
||||
print(f" MA Gap Filter: {best['use_ma_gap_filter']}")
|
||||
if best['use_ma_gap_filter']:
|
||||
print(f" Long Min: {best['ma_gap_long_min']:.1f}%")
|
||||
print(f" Short Max: {best['ma_gap_short_max']:.1f}%")
|
||||
print()
|
||||
print(f"Baseline to beat: $194.43 (19.44%)")
|
||||
improvement = ((best['pnl'] - 194.43) / 194.43) * 100
|
||||
print(f"Improvement: {improvement:+.1f}%")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user