feat: Add v11 test sweep system (256 combinations) with office hours scheduling
Co-authored-by: mindesbunister <32161838+mindesbunister@users.noreply.github.com>
This commit is contained in:
BIN
cluster/__pycache__/v11_test_coordinator.cpython-312.pyc
Normal file
BIN
cluster/__pycache__/v11_test_coordinator.cpython-312.pyc
Normal file
Binary file not shown.
BIN
cluster/__pycache__/v11_test_worker.cpython-312.pyc
Normal file
BIN
cluster/__pycache__/v11_test_worker.cpython-312.pyc
Normal file
Binary file not shown.
57
cluster/run_v11_test_sweep.sh
Executable file
57
cluster/run_v11_test_sweep.sh
Executable file
@@ -0,0 +1,57 @@
|
||||
#!/bin/bash
|
||||
# V11 Test Parameter Sweep Launch Script
|
||||
# Initializes database and starts coordinator for 256-combination test sweep
|
||||
|
||||
set -e # Exit on error
|
||||
|
||||
echo "================================================================"
|
||||
echo "V11 TEST PARAMETER SWEEP"
|
||||
echo "================================================================"
|
||||
echo "Combinations: 256 (2^8 parameters)"
|
||||
echo "Chunks: 2 × 128 combinations"
|
||||
echo "Worker 1: Always available (27 cores)"
|
||||
echo "Worker 2: Office hours aware (27 cores nights/weekends only)"
|
||||
echo "Expected runtime: 6-25 minutes"
|
||||
echo "================================================================"
|
||||
echo ""
|
||||
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
# Check if data file exists
|
||||
if [ ! -f "data/solusdt_5m.csv" ]; then
|
||||
echo "✗ Error: data/solusdt_5m.csv not found"
|
||||
echo " Please ensure market data is available"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✓ Market data found"
|
||||
|
||||
# Check if coordinator script exists
|
||||
if [ ! -f "v11_test_coordinator.py" ]; then
|
||||
echo "✗ Error: v11_test_coordinator.py not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✓ Coordinator script found"
|
||||
|
||||
# Launch coordinator in background
|
||||
echo ""
|
||||
echo "🚀 Starting coordinator..."
|
||||
nohup python3 v11_test_coordinator.py > coordinator_v11_test.log 2>&1 &
|
||||
COORDINATOR_PID=$!
|
||||
|
||||
echo "✓ Coordinator started (PID: $COORDINATOR_PID)"
|
||||
echo ""
|
||||
echo "================================================================"
|
||||
echo "MONITORING"
|
||||
echo "================================================================"
|
||||
echo "Log file: tail -f coordinator_v11_test.log"
|
||||
echo "Database: sqlite3 exploration.db"
|
||||
echo "Results: cluster/v11_test_results/*.csv"
|
||||
echo ""
|
||||
echo "To check status:"
|
||||
echo " sqlite3 exploration.db \"SELECT * FROM v11_test_chunks\""
|
||||
echo ""
|
||||
echo "To stop sweep:"
|
||||
echo " kill $COORDINATOR_PID"
|
||||
echo "================================================================"
|
||||
437
cluster/v11_test_coordinator.py
Executable file
437
cluster/v11_test_coordinator.py
Executable file
@@ -0,0 +1,437 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
V11 Test Parameter Sweep Coordinator
|
||||
|
||||
Coordinates 256-combination test sweep across 2 workers with smart scheduling.
|
||||
Worker 2 respects office hours (Mon-Fri 8am-6pm disabled, nights/weekends OK).
|
||||
|
||||
Test sweep: 2 chunks × 128 combinations = 256 total
|
||||
Expected runtime: 6-25 minutes depending on worker availability
|
||||
"""
|
||||
|
||||
import sqlite3
|
||||
import subprocess
|
||||
import time
|
||||
import signal
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
import urllib.request
|
||||
import json
|
||||
|
||||
# Worker configuration
|
||||
WORKERS = {
|
||||
'worker1': {
|
||||
'host': 'root@10.10.254.106',
|
||||
'workspace': '/home/comprehensive_sweep',
|
||||
'cores': 27,
|
||||
},
|
||||
'worker2': {
|
||||
'host': 'root@10.20.254.100',
|
||||
'workspace': '/home/backtest_dual/backtest',
|
||||
'ssh_hop': 'root@10.10.254.106',
|
||||
'cores': 27,
|
||||
'time_restricted': True,
|
||||
'allowed_start_hour': 18, # 6 PM
|
||||
'allowed_end_hour': 8, # 8 AM
|
||||
}
|
||||
}
|
||||
|
||||
DATA_FILE = 'data/solusdt_5m.csv'
|
||||
DB_PATH = 'exploration.db'
|
||||
CHUNK_SIZE = 128 # Each chunk processes 128 combinations
|
||||
|
||||
# Telegram configuration
|
||||
TELEGRAM_BOT_TOKEN = '8240234365:AAEm6hg_XOm54x8ctnwpNYreFKRAEvWU3uY'
|
||||
TELEGRAM_CHAT_ID = '579304651'
|
||||
|
||||
|
||||
def send_telegram_message(message: str):
|
||||
"""Send notification to Telegram"""
|
||||
try:
|
||||
url = f"https://api.telegram.org/bot{TELEGRAM_BOT_TOKEN}/sendMessage"
|
||||
data = {
|
||||
'chat_id': TELEGRAM_CHAT_ID,
|
||||
'text': message,
|
||||
'parse_mode': 'HTML'
|
||||
}
|
||||
|
||||
req = urllib.request.Request(
|
||||
url,
|
||||
data=json.dumps(data).encode('utf-8'),
|
||||
headers={'Content-Type': 'application/json'}
|
||||
)
|
||||
|
||||
with urllib.request.urlopen(req, timeout=10) as response:
|
||||
if response.status == 200:
|
||||
print(f"✓ Telegram notification sent")
|
||||
else:
|
||||
print(f"⚠️ Telegram notification failed: {response.status}")
|
||||
except Exception as e:
|
||||
print(f"⚠️ Error sending Telegram notification: {e}")
|
||||
|
||||
|
||||
def is_worker2_available() -> bool:
|
||||
"""Check if Worker 2 can run (respects office hours)"""
|
||||
now = datetime.now()
|
||||
|
||||
# Weekend (Sat=5, Sun=6): Available 24/7
|
||||
if now.weekday() >= 5:
|
||||
return True
|
||||
|
||||
# Weekday: Only 6 PM - 8 AM (avoid office hours 8am-6pm)
|
||||
hour = now.hour
|
||||
# Allowed if hour >= 18 (6 PM) OR hour < 8 (8 AM)
|
||||
return hour >= 18 or hour < 8
|
||||
|
||||
|
||||
def get_available_workers() -> list:
|
||||
"""Return list of workers available right now"""
|
||||
workers = ['worker1'] # Always available
|
||||
if is_worker2_available():
|
||||
workers.append('worker2')
|
||||
print("✓ Worker 2 available (outside office hours)")
|
||||
else:
|
||||
print("⚠️ Worker 2 unavailable (office hours Mon-Fri 8am-6pm)")
|
||||
return workers
|
||||
|
||||
|
||||
def init_database():
|
||||
"""Initialize database tables for v11 test sweep"""
|
||||
conn = sqlite3.connect(DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Drop existing test tables if present
|
||||
cursor.execute("DROP TABLE IF EXISTS v11_test_chunks")
|
||||
cursor.execute("DROP TABLE IF EXISTS v11_test_strategies")
|
||||
|
||||
# Create chunks table
|
||||
cursor.execute("""
|
||||
CREATE TABLE v11_test_chunks (
|
||||
id TEXT PRIMARY KEY,
|
||||
start_combo INTEGER,
|
||||
end_combo INTEGER,
|
||||
total_combos INTEGER,
|
||||
status TEXT,
|
||||
assigned_worker TEXT,
|
||||
started_at INTEGER,
|
||||
completed_at INTEGER
|
||||
)
|
||||
""")
|
||||
|
||||
# Create strategies table
|
||||
cursor.execute("""
|
||||
CREATE TABLE v11_test_strategies (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
chunk_id TEXT,
|
||||
params TEXT,
|
||||
pnl REAL,
|
||||
win_rate REAL,
|
||||
profit_factor REAL,
|
||||
max_drawdown REAL,
|
||||
total_trades INTEGER,
|
||||
FOREIGN KEY (chunk_id) REFERENCES v11_test_chunks(id)
|
||||
)
|
||||
""")
|
||||
|
||||
# Register 2 chunks (256 combinations total)
|
||||
chunks = [
|
||||
('v11_test_chunk_0000', 0, 128, 128),
|
||||
('v11_test_chunk_0001', 128, 256, 128),
|
||||
]
|
||||
|
||||
for chunk_id, start, end, total in chunks:
|
||||
cursor.execute(
|
||||
"INSERT INTO v11_test_chunks (id, start_combo, end_combo, total_combos, status) VALUES (?, ?, ?, ?, 'pending')",
|
||||
(chunk_id, start, end, total)
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
print("✓ Database initialized with 2 chunks")
|
||||
|
||||
|
||||
def get_pending_chunks() -> list:
|
||||
"""Get list of pending chunks"""
|
||||
conn = sqlite3.connect(DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT id, start_combo FROM v11_test_chunks WHERE status='pending'")
|
||||
chunks = cursor.fetchall()
|
||||
conn.close()
|
||||
return chunks
|
||||
|
||||
|
||||
def assign_chunk(chunk_id: str, worker_name: str):
|
||||
"""Mark chunk as assigned to worker"""
|
||||
conn = sqlite3.connect(DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
"UPDATE v11_test_chunks SET status='running', assigned_worker=?, started_at=? WHERE id=?",
|
||||
(worker_name, int(time.time()), chunk_id)
|
||||
)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
|
||||
def deploy_worker(worker_name: str, chunk_id: str, start_combo: int):
|
||||
"""Deploy worker to EPYC server via SSH"""
|
||||
worker = WORKERS[worker_name]
|
||||
|
||||
print(f"\n{'='*60}")
|
||||
print(f"Deploying {worker_name} for {chunk_id}")
|
||||
print(f"{'='*60}")
|
||||
|
||||
# Build SSH command
|
||||
workspace = worker['workspace']
|
||||
|
||||
# Copy v11 test worker script
|
||||
print(f"📦 Copying v11_test_worker.py to {worker_name}...")
|
||||
|
||||
if 'ssh_hop' in worker:
|
||||
# Worker 2: Use SSH hop through worker 1
|
||||
scp_cmd = [
|
||||
'scp',
|
||||
'-o', 'StrictHostKeyChecking=no',
|
||||
'-o', f'ProxyJump={worker["ssh_hop"]}',
|
||||
'cluster/v11_test_worker.py',
|
||||
f'{worker["host"]}:{workspace}/'
|
||||
]
|
||||
else:
|
||||
# Worker 1: Direct connection
|
||||
scp_cmd = [
|
||||
'scp',
|
||||
'-o', 'StrictHostKeyChecking=no',
|
||||
'cluster/v11_test_worker.py',
|
||||
f'{worker["host"]}:{workspace}/'
|
||||
]
|
||||
|
||||
result = subprocess.run(scp_cmd, capture_output=True, text=True)
|
||||
if result.returncode != 0:
|
||||
print(f"✗ Failed to copy worker script: {result.stderr}")
|
||||
return False
|
||||
|
||||
print(f"✓ Worker script deployed")
|
||||
|
||||
# Copy v11 indicator module
|
||||
print(f"📦 Copying v11 indicator to {worker_name}...")
|
||||
|
||||
if 'ssh_hop' in worker:
|
||||
scp_cmd = [
|
||||
'scp',
|
||||
'-o', 'StrictHostKeyChecking=no',
|
||||
'-o', f'ProxyJump={worker["ssh_hop"]}',
|
||||
'backtester/v11_moneyline_all_filters.py',
|
||||
f'{worker["host"]}:{workspace}/backtester/'
|
||||
]
|
||||
else:
|
||||
scp_cmd = [
|
||||
'scp',
|
||||
'-o', 'StrictHostKeyChecking=no',
|
||||
'backtester/v11_moneyline_all_filters.py',
|
||||
f'{worker["host"]}:{workspace}/backtester/'
|
||||
]
|
||||
|
||||
result = subprocess.run(scp_cmd, capture_output=True, text=True)
|
||||
if result.returncode != 0:
|
||||
print(f"✗ Failed to copy indicator: {result.stderr}")
|
||||
return False
|
||||
|
||||
print(f"✓ Indicator deployed")
|
||||
|
||||
# Start worker
|
||||
print(f"🚀 Starting worker process...")
|
||||
|
||||
worker_cmd = f"cd {workspace} && nohup python3 v11_test_worker.py {DATA_FILE} {chunk_id} {start_combo} > {chunk_id}_worker.log 2>&1 &"
|
||||
|
||||
if 'ssh_hop' in worker:
|
||||
ssh_cmd = [
|
||||
'ssh',
|
||||
'-o', 'StrictHostKeyChecking=no',
|
||||
'-o', f'ProxyJump={worker["ssh_hop"]}',
|
||||
worker['host'],
|
||||
worker_cmd
|
||||
]
|
||||
else:
|
||||
ssh_cmd = [
|
||||
'ssh',
|
||||
'-o', 'StrictHostKeyChecking=no',
|
||||
worker['host'],
|
||||
worker_cmd
|
||||
]
|
||||
|
||||
result = subprocess.run(ssh_cmd, capture_output=True, text=True)
|
||||
if result.returncode != 0:
|
||||
print(f"✗ Failed to start worker: {result.stderr}")
|
||||
return False
|
||||
|
||||
print(f"✓ Worker started on {worker_name}")
|
||||
return True
|
||||
|
||||
|
||||
def check_chunk_completion(worker_name: str, chunk_id: str) -> bool:
|
||||
"""Check if chunk has completed by looking for results CSV"""
|
||||
worker = WORKERS[worker_name]
|
||||
workspace = worker['workspace']
|
||||
|
||||
check_cmd = f"test -f {workspace}/v11_test_results/{chunk_id}_results.csv && echo 'exists'"
|
||||
|
||||
if 'ssh_hop' in worker:
|
||||
ssh_cmd = [
|
||||
'ssh',
|
||||
'-o', 'StrictHostKeyChecking=no',
|
||||
'-o', f'ProxyJump={worker["ssh_hop"]}',
|
||||
worker['host'],
|
||||
check_cmd
|
||||
]
|
||||
else:
|
||||
ssh_cmd = [
|
||||
'ssh',
|
||||
'-o', 'StrictHostKeyChecking=no',
|
||||
worker['host'],
|
||||
check_cmd
|
||||
]
|
||||
|
||||
result = subprocess.run(ssh_cmd, capture_output=True, text=True, timeout=10)
|
||||
return 'exists' in result.stdout
|
||||
|
||||
|
||||
def mark_chunk_complete(chunk_id: str):
|
||||
"""Mark chunk as completed in database"""
|
||||
conn = sqlite3.connect(DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
"UPDATE v11_test_chunks SET status='completed', completed_at=? WHERE id=?",
|
||||
(int(time.time()), chunk_id)
|
||||
)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
|
||||
def signal_handler(sig, frame):
|
||||
"""Handle termination signals"""
|
||||
message = (
|
||||
"⚠️ <b>V11 Test Sweep STOPPED</b>\n\n"
|
||||
"Coordinator received termination signal.\n"
|
||||
"Sweep stopped prematurely.\n\n"
|
||||
f"Time: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
|
||||
)
|
||||
send_telegram_message(message)
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def main():
|
||||
"""Main coordinator loop"""
|
||||
signal.signal(signal.SIGINT, signal_handler)
|
||||
signal.signal(signal.SIGTERM, signal_handler)
|
||||
|
||||
print("\n" + "="*60)
|
||||
print("V11 TEST PARAMETER SWEEP COORDINATOR")
|
||||
print("="*60)
|
||||
print(f"Total combinations: 256 (2^8)")
|
||||
print(f"Chunks: 2 × 128 combinations")
|
||||
print(f"Workers: 2 × 27 cores (85% CPU)")
|
||||
print(f"Expected runtime: 6-25 minutes")
|
||||
print("="*60 + "\n")
|
||||
|
||||
# Initialize database
|
||||
print("📊 Initializing database...")
|
||||
init_database()
|
||||
|
||||
# Send start notification
|
||||
available_workers = get_available_workers()
|
||||
start_msg = (
|
||||
f"🚀 <b>V11 Test Sweep STARTED</b>\n\n"
|
||||
f"Combinations: 256 (2^8)\n"
|
||||
f"Chunks: 2 × 128 combos\n"
|
||||
f"Workers: {len(available_workers)} available\n"
|
||||
f"- Worker 1: Always on (27 cores)\n"
|
||||
)
|
||||
if 'worker2' in available_workers:
|
||||
start_msg += f"- Worker 2: Active (27 cores)\n"
|
||||
else:
|
||||
start_msg += f"- Worker 2: Office hours (waiting for 6 PM)\n"
|
||||
start_msg += f"\nStart: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
|
||||
|
||||
send_telegram_message(start_msg)
|
||||
|
||||
# Deploy workers to available chunks
|
||||
start_time = time.time()
|
||||
active_chunks = {} # chunk_id -> worker_name
|
||||
|
||||
pending_chunks = get_pending_chunks()
|
||||
available_workers = get_available_workers()
|
||||
|
||||
for worker_name in available_workers:
|
||||
if pending_chunks:
|
||||
chunk_id, start_combo = pending_chunks.pop(0)
|
||||
print(f"\n📍 Assigning {chunk_id} to {worker_name}")
|
||||
assign_chunk(chunk_id, worker_name)
|
||||
|
||||
if deploy_worker(worker_name, chunk_id, start_combo):
|
||||
active_chunks[chunk_id] = worker_name
|
||||
print(f"✓ {chunk_id} active on {worker_name}")
|
||||
else:
|
||||
print(f"✗ Failed to deploy {chunk_id} on {worker_name}")
|
||||
|
||||
# Monitor progress
|
||||
print("\n" + "="*60)
|
||||
print("MONITORING SWEEP PROGRESS")
|
||||
print("="*60 + "\n")
|
||||
|
||||
while active_chunks:
|
||||
time.sleep(30) # Check every 30 seconds
|
||||
|
||||
completed_this_round = []
|
||||
|
||||
for chunk_id, worker_name in active_chunks.items():
|
||||
if check_chunk_completion(worker_name, chunk_id):
|
||||
print(f"✓ {chunk_id} COMPLETED on {worker_name}")
|
||||
mark_chunk_complete(chunk_id)
|
||||
completed_this_round.append(chunk_id)
|
||||
|
||||
# Remove completed chunks
|
||||
for chunk_id in completed_this_round:
|
||||
del active_chunks[chunk_id]
|
||||
|
||||
# Try to assign pending chunks to freed workers
|
||||
if completed_this_round and pending_chunks:
|
||||
available_workers = get_available_workers()
|
||||
|
||||
for worker_name in available_workers:
|
||||
if worker_name not in active_chunks.values() and pending_chunks:
|
||||
chunk_id, start_combo = pending_chunks.pop(0)
|
||||
print(f"\n📍 Assigning {chunk_id} to {worker_name}")
|
||||
assign_chunk(chunk_id, worker_name)
|
||||
|
||||
if deploy_worker(worker_name, chunk_id, start_combo):
|
||||
active_chunks[chunk_id] = worker_name
|
||||
print(f"✓ {chunk_id} active on {worker_name}")
|
||||
|
||||
# All chunks complete
|
||||
duration = time.time() - start_time
|
||||
duration_min = duration / 60
|
||||
|
||||
print("\n" + "="*60)
|
||||
print("V11 TEST SWEEP COMPLETE!")
|
||||
print("="*60)
|
||||
print(f"Duration: {duration_min:.1f} minutes")
|
||||
print(f"Chunks: 2/2 completed")
|
||||
print(f"Strategies: 256 tested")
|
||||
print("="*60 + "\n")
|
||||
|
||||
# Send completion notification
|
||||
complete_msg = (
|
||||
f"✅ <b>V11 Test Sweep COMPLETE</b>\n\n"
|
||||
f"Duration: {duration_min:.1f} minutes\n"
|
||||
f"Chunks: 2/2 completed\n"
|
||||
f"Strategies: 256 tested\n\n"
|
||||
f"Check results:\n"
|
||||
f"- cluster/v11_test_results/\n"
|
||||
f"- sqlite3 exploration.db\n\n"
|
||||
f"Completed: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
|
||||
)
|
||||
send_telegram_message(complete_msg)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
298
cluster/v11_test_worker.py
Executable file
298
cluster/v11_test_worker.py
Executable file
@@ -0,0 +1,298 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
V11 Test Parameter Sweep Worker
|
||||
|
||||
Processes chunks of v11 test parameter configurations (256 combinations total).
|
||||
Uses 27 cores (85% CPU) for multiprocessing.
|
||||
|
||||
Test parameter grid (2 values each = 2^8 = 256 combinations):
|
||||
- flip_threshold: 0.5, 0.6
|
||||
- adx_min: 18, 21
|
||||
- long_pos_max: 75, 80
|
||||
- short_pos_min: 20, 25
|
||||
- vol_min: 0.8, 1.0
|
||||
- entry_buffer_atr: 0.15, 0.20
|
||||
- rsi_long_min: 35, 40
|
||||
- rsi_short_max: 65, 70
|
||||
"""
|
||||
|
||||
import sys
|
||||
import csv
|
||||
import pandas as pd
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Any
|
||||
from multiprocessing import Pool
|
||||
import functools
|
||||
import itertools
|
||||
|
||||
# Add backtester to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from backtester.v11_moneyline_all_filters import (
|
||||
money_line_v11_signals,
|
||||
MoneyLineV11Inputs
|
||||
)
|
||||
from backtester.simulator import simulate_money_line
|
||||
|
||||
# CPU limit: 85% of 32 threads = 27 cores
|
||||
MAX_WORKERS = 27
|
||||
|
||||
# Test parameter grid (256 combinations)
|
||||
PARAMETER_GRID = {
|
||||
'flip_threshold': [0.5, 0.6],
|
||||
'adx_min': [18, 21],
|
||||
'long_pos_max': [75, 80],
|
||||
'short_pos_min': [20, 25],
|
||||
'vol_min': [0.8, 1.0],
|
||||
'entry_buffer_atr': [0.15, 0.20],
|
||||
'rsi_long_min': [35, 40],
|
||||
'rsi_short_max': [65, 70],
|
||||
}
|
||||
|
||||
|
||||
def load_market_data(csv_file: str) -> pd.DataFrame:
|
||||
"""Load OHLCV data from CSV"""
|
||||
df = pd.read_csv(csv_file)
|
||||
|
||||
# Ensure required columns exist
|
||||
required = ['timestamp', 'open', 'high', 'low', 'close', 'volume']
|
||||
for col in required:
|
||||
if col not in df.columns:
|
||||
raise ValueError(f"Missing required column: {col}")
|
||||
|
||||
# Convert timestamp if needed
|
||||
if df['timestamp'].dtype == 'object':
|
||||
df['timestamp'] = pd.to_datetime(df['timestamp'])
|
||||
|
||||
df = df.set_index('timestamp')
|
||||
print(f"✓ Loaded {len(df):,} bars from {csv_file}")
|
||||
return df
|
||||
|
||||
|
||||
def backtest_config(df: pd.DataFrame, config: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Run backtest for single v11 test parameter configuration
|
||||
|
||||
Returns dict with:
|
||||
- params: original config dict
|
||||
- pnl: total P&L
|
||||
- trades: number of trades
|
||||
- win_rate: % winners
|
||||
- profit_factor: wins/losses ratio
|
||||
- max_drawdown: max drawdown $
|
||||
"""
|
||||
try:
|
||||
# Create v11 inputs
|
||||
inputs = MoneyLineV11Inputs(
|
||||
flip_threshold=config['flip_threshold'],
|
||||
adx_min=config['adx_min'],
|
||||
long_pos_max=config['long_pos_max'],
|
||||
short_pos_min=config['short_pos_min'],
|
||||
vol_min=config['vol_min'],
|
||||
entry_buffer_atr=config['entry_buffer_atr'],
|
||||
rsi_long_min=config['rsi_long_min'],
|
||||
rsi_short_max=config['rsi_short_max'],
|
||||
)
|
||||
|
||||
# Generate signals
|
||||
signals = money_line_v11_signals(df, inputs)
|
||||
|
||||
if not signals:
|
||||
return {
|
||||
'params': config,
|
||||
'pnl': 0.0,
|
||||
'trades': 0,
|
||||
'win_rate': 0.0,
|
||||
'profit_factor': 0.0,
|
||||
'max_drawdown': 0.0,
|
||||
}
|
||||
|
||||
# Simple backtesting: track equity curve
|
||||
equity = 1000.0 # Starting capital
|
||||
peak_equity = equity
|
||||
max_drawdown = 0.0
|
||||
wins = 0
|
||||
losses = 0
|
||||
win_pnl = 0.0
|
||||
loss_pnl = 0.0
|
||||
|
||||
for signal in signals:
|
||||
# Simple trade simulation
|
||||
# TP1 at +0.86%, SL at -1.29% (ATR-based defaults)
|
||||
entry = signal.entry_price
|
||||
|
||||
# Look ahead in data to see if TP or SL hit
|
||||
signal_idx = df.index.get_loc(signal.timestamp)
|
||||
|
||||
# Look ahead up to 100 bars
|
||||
max_bars = min(100, len(df) - signal_idx - 1)
|
||||
if max_bars <= 0:
|
||||
continue
|
||||
|
||||
future_data = df.iloc[signal_idx+1:signal_idx+1+max_bars]
|
||||
|
||||
if signal.direction == "long":
|
||||
tp_price = entry * 1.0086 # +0.86%
|
||||
sl_price = entry * 0.9871 # -1.29%
|
||||
|
||||
# Check if TP or SL hit
|
||||
hit_tp = (future_data['high'] >= tp_price).any()
|
||||
hit_sl = (future_data['low'] <= sl_price).any()
|
||||
|
||||
if hit_tp:
|
||||
pnl = 1000.0 * 0.0086 # $8.60 on $1000 position
|
||||
equity += pnl
|
||||
wins += 1
|
||||
win_pnl += pnl
|
||||
elif hit_sl:
|
||||
pnl = -1000.0 * 0.0129 # -$12.90 on $1000 position
|
||||
equity += pnl
|
||||
losses += 1
|
||||
loss_pnl += abs(pnl)
|
||||
else: # short
|
||||
tp_price = entry * 0.9914 # -0.86%
|
||||
sl_price = entry * 1.0129 # +1.29%
|
||||
|
||||
# Check if TP or SL hit
|
||||
hit_tp = (future_data['low'] <= tp_price).any()
|
||||
hit_sl = (future_data['high'] >= sl_price).any()
|
||||
|
||||
if hit_tp:
|
||||
pnl = 1000.0 * 0.0086 # $8.60 on $1000 position
|
||||
equity += pnl
|
||||
wins += 1
|
||||
win_pnl += pnl
|
||||
elif hit_sl:
|
||||
pnl = -1000.0 * 0.0129 # -$12.90 on $1000 position
|
||||
equity += pnl
|
||||
losses += 1
|
||||
loss_pnl += abs(pnl)
|
||||
|
||||
# Track drawdown
|
||||
peak_equity = max(peak_equity, equity)
|
||||
current_drawdown = peak_equity - equity
|
||||
max_drawdown = max(max_drawdown, current_drawdown)
|
||||
|
||||
total_trades = wins + losses
|
||||
win_rate = wins / total_trades if total_trades > 0 else 0.0
|
||||
profit_factor = win_pnl / loss_pnl if loss_pnl > 0 else (float('inf') if win_pnl > 0 else 0.0)
|
||||
total_pnl = equity - 1000.0
|
||||
|
||||
return {
|
||||
'params': config,
|
||||
'pnl': round(total_pnl, 2),
|
||||
'trades': total_trades,
|
||||
'win_rate': round(win_rate * 100, 1),
|
||||
'profit_factor': round(profit_factor, 3) if profit_factor != float('inf') else 999.0,
|
||||
'max_drawdown': round(max_drawdown, 2),
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
print(f"✗ Error backtesting config: {e}")
|
||||
return {
|
||||
'params': config,
|
||||
'pnl': 0.0,
|
||||
'trades': 0,
|
||||
'win_rate': 0.0,
|
||||
'profit_factor': 0.0,
|
||||
'max_drawdown': 0.0,
|
||||
}
|
||||
|
||||
|
||||
def generate_parameter_combinations() -> List[Dict[str, Any]]:
|
||||
"""Generate all 256 parameter combinations"""
|
||||
keys = PARAMETER_GRID.keys()
|
||||
values = PARAMETER_GRID.values()
|
||||
|
||||
combinations = []
|
||||
for combo in itertools.product(*values):
|
||||
config = dict(zip(keys, combo))
|
||||
combinations.append(config)
|
||||
|
||||
return combinations
|
||||
|
||||
|
||||
def process_chunk(data_file: str, chunk_id: str, start_idx: int, end_idx: int):
|
||||
"""Process a chunk of parameter combinations"""
|
||||
print(f"\n{'='*60}")
|
||||
print(f"V11 Test Worker - {chunk_id}")
|
||||
print(f"Processing combinations {start_idx} to {end_idx-1}")
|
||||
print(f"{'='*60}\n")
|
||||
|
||||
# Load market data
|
||||
df = load_market_data(data_file)
|
||||
|
||||
# Generate all combinations
|
||||
all_combos = generate_parameter_combinations()
|
||||
print(f"✓ Generated {len(all_combos)} total combinations")
|
||||
|
||||
# Get this chunk's combinations
|
||||
chunk_combos = all_combos[start_idx:end_idx]
|
||||
print(f"✓ Processing {len(chunk_combos)} combinations in this chunk\n")
|
||||
|
||||
# Backtest with multiprocessing
|
||||
print(f"⚡ Starting {MAX_WORKERS}-core backtest...\n")
|
||||
|
||||
with Pool(processes=MAX_WORKERS) as pool:
|
||||
backtest_func = functools.partial(backtest_config, df)
|
||||
results = pool.map(backtest_func, chunk_combos)
|
||||
|
||||
print(f"\n✓ Completed {len(results)} backtests")
|
||||
|
||||
# Write results to CSV
|
||||
output_dir = Path('v11_test_results')
|
||||
output_dir.mkdir(exist_ok=True)
|
||||
|
||||
csv_file = output_dir / f"{chunk_id}_results.csv"
|
||||
|
||||
with open(csv_file, 'w', newline='') as f:
|
||||
writer = csv.writer(f)
|
||||
|
||||
# Header
|
||||
writer.writerow([
|
||||
'flip_threshold', 'adx_min', 'long_pos_max', 'short_pos_min',
|
||||
'vol_min', 'entry_buffer_atr', 'rsi_long_min', 'rsi_short_max',
|
||||
'pnl', 'win_rate', 'profit_factor', 'max_drawdown', 'total_trades'
|
||||
])
|
||||
|
||||
# Data rows
|
||||
for result in results:
|
||||
params = result['params']
|
||||
writer.writerow([
|
||||
params['flip_threshold'],
|
||||
params['adx_min'],
|
||||
params['long_pos_max'],
|
||||
params['short_pos_min'],
|
||||
params['vol_min'],
|
||||
params['entry_buffer_atr'],
|
||||
params['rsi_long_min'],
|
||||
params['rsi_short_max'],
|
||||
result['pnl'],
|
||||
result['win_rate'],
|
||||
result['profit_factor'],
|
||||
result['max_drawdown'],
|
||||
result['trades'],
|
||||
])
|
||||
|
||||
print(f"✓ Results saved to {csv_file}")
|
||||
|
||||
# Show top 5 results
|
||||
sorted_results = sorted(results, key=lambda x: x['pnl'], reverse=True)
|
||||
print(f"\n🏆 Top 5 Results:")
|
||||
for i, r in enumerate(sorted_results[:5], 1):
|
||||
print(f" {i}. PnL: ${r['pnl']:,.2f} | Trades: {r['trades']} | WR: {r['win_rate']}%")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if len(sys.argv) != 4:
|
||||
print("Usage: python v11_test_worker.py <data_file> <chunk_id> <start_idx>")
|
||||
sys.exit(1)
|
||||
|
||||
data_file = sys.argv[1]
|
||||
chunk_id = sys.argv[2]
|
||||
start_idx = int(sys.argv[3])
|
||||
|
||||
# Calculate end index (128 combos per chunk)
|
||||
end_idx = start_idx + 128
|
||||
|
||||
process_chunk(data_file, chunk_id, start_idx, end_idx)
|
||||
Reference in New Issue
Block a user