refactor: move all remaining files to orig/ directory

Completed clean root directory structure:
- Root now contains only: .git, .env, docs/, orig/
- Moved all remaining files and directories to orig/:
  - Config files (.claude, .dockerignore, .drone.yml, etc.)
  - All .env variants (except active .env)
  - Git config (.gitconfig, .github, .gitignore, etc.)
  - Tool configs (.golangci.yml, .revive.toml, etc.)
  - Documentation (*.md files, @prompts)
  - Build files (Dockerfiles, Makefile, go.mod, go.sum)
  - Docker compose files
  - All source directories (scripts, tests, tools, etc.)
  - Runtime directories (logs, monitoring, reports)
  - Dependency files (node_modules, lib, cache)
  - Special files (--delete)

- Removed empty runtime directories (bin/, data/)

V2 structure is now clean:
- docs/planning/ - V2 planning documents
- orig/ - Complete V1 codebase preserved
- .env - Active environment config (not in git)

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
Administrator
2025-11-10 10:53:05 +01:00
parent 803de231ba
commit c54c569f30
718 changed files with 8304 additions and 8281 deletions

View File

@@ -1,352 +0,0 @@
#!/bin/bash
#############################################################################
# 24-Hour Production Validation Test
#
# This script runs the MEV bot for 24 hours with comprehensive monitoring
# and validation to ensure production readiness.
#
# Usage: ./scripts/24h-validation-test.sh
#############################################################################
set -euo pipefail
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Configuration
TEST_DURATION_HOURS=24
LOG_DIR="logs/24h_validation_$(date +%Y%m%d_%H%M%S)"
PID_FILE="/tmp/mev-bot-24h-test.pid"
REPORT_FILE="${LOG_DIR}/validation_report.md"
METRICS_FILE="${LOG_DIR}/metrics.json"
# Create log directory
mkdir -p "${LOG_DIR}"
echo -e "${BLUE}═══════════════════════════════════════════════════════════════${NC}"
echo -e "${BLUE} 24-Hour MEV Bot Production Validation Test${NC}"
echo -e "${BLUE}═══════════════════════════════════════════════════════════════${NC}"
echo ""
echo -e "${GREEN}Test Start Time:${NC} $(date)"
echo -e "${GREEN}Test Duration:${NC} ${TEST_DURATION_HOURS} hours"
echo -e "${GREEN}Log Directory:${NC} ${LOG_DIR}"
echo ""
#############################################################################
# Pre-Flight Checks
#############################################################################
echo -e "${YELLOW}[1/7] Running Pre-Flight Checks...${NC}"
# Check if bot binary exists
if [ ! -f "./bin/mev-bot" ]; then
echo -e "${RED}✗ Error: MEV bot binary not found${NC}"
echo -e "${YELLOW}Building binary...${NC}"
make build
fi
# Check environment variables
if [ -z "${ARBITRUM_RPC_ENDPOINT:-}" ]; then
echo -e "${RED}✗ Error: ARBITRUM_RPC_ENDPOINT not set${NC}"
exit 1
fi
# Check if provider config exists
if [ ! -f "./config/providers_runtime.yaml" ]; then
echo -e "${RED}✗ Error: Provider config not found${NC}"
exit 1
fi
# Test RPC connection
echo -e "${YELLOW}Testing RPC connection...${NC}"
if ! timeout 10 ./bin/mev-bot --version &>/dev/null; then
echo -e "${YELLOW}Warning: Could not verify bot version${NC}"
fi
echo -e "${GREEN}✓ Pre-flight checks passed${NC}"
echo ""
#############################################################################
# Initialize Monitoring
#############################################################################
echo -e "${YELLOW}[2/7] Initializing Monitoring...${NC}"
# Create monitoring script
cat > "${LOG_DIR}/monitor.sh" << 'MONITOR_EOF'
#!/bin/bash
LOG_FILE="$1"
METRICS_FILE="$2"
while true; do
# Extract metrics from logs
OPPORTUNITIES=$(grep -c "ARBITRAGE OPPORTUNITY DETECTED" "$LOG_FILE" 2>/dev/null || echo "0")
PROFITABLE=$(grep "Net Profit:" "$LOG_FILE" | grep -v "negative" | wc -l || echo "0")
EVENTS_PROCESSED=$(grep -c "Worker.*processing.*event" "$LOG_FILE" 2>/dev/null || echo "0")
ERRORS=$(grep -c "\[ERROR\]" "$LOG_FILE" 2>/dev/null || echo "0")
WARNINGS=$(grep -c "\[WARN\]" "$LOG_FILE" 2>/dev/null || echo "0")
# Cache metrics
CACHE_HITS=$(grep "Reserve cache metrics" "$LOG_FILE" | tail -1 | grep -oP 'hits=\K[0-9]+' || echo "0")
CACHE_MISSES=$(grep "Reserve cache metrics" "$LOG_FILE" | tail -1 | grep -oP 'misses=\K[0-9]+' || echo "0")
# Calculate hit rate
if [ "$CACHE_HITS" -gt 0 ] || [ "$CACHE_MISSES" -gt 0 ]; then
TOTAL=$((CACHE_HITS + CACHE_MISSES))
HIT_RATE=$(awk "BEGIN {print ($CACHE_HITS / $TOTAL) * 100}")
else
HIT_RATE="0"
fi
# System metrics
CPU=$(top -bn1 | grep "Cpu(s)" | awk '{print $2}' | cut -d'%' -f1)
MEM=$(free | grep Mem | awk '{print ($3/$2) * 100.0}')
DISK=$(df -h . | tail -1 | awk '{print $5}' | sed 's/%//')
# Write metrics JSON
cat > "$METRICS_FILE" << METRICS
{
"timestamp": "$(date -Iseconds)",
"uptime_seconds": $SECONDS,
"opportunities_detected": $OPPORTUNITIES,
"profitable_opportunities": $PROFITABLE,
"events_processed": $EVENTS_PROCESSED,
"errors": $ERRORS,
"warnings": $WARNINGS,
"cache": {
"hits": $CACHE_HITS,
"misses": $CACHE_MISSES,
"hit_rate_percent": $HIT_RATE
},
"system": {
"cpu_percent": $CPU,
"memory_percent": $MEM,
"disk_percent": $DISK
}
}
METRICS
sleep 60
done
MONITOR_EOF
chmod +x "${LOG_DIR}/monitor.sh"
echo -e "${GREEN}✓ Monitoring initialized${NC}"
echo ""
#############################################################################
# Start MEV Bot
#############################################################################
echo -e "${YELLOW}[3/7] Starting MEV Bot...${NC}"
# Set environment variables for the test
export LOG_LEVEL="info"
export PROVIDER_CONFIG_PATH="$PWD/config/providers_runtime.yaml"
# Start the bot with timeout
nohup timeout ${TEST_DURATION_HOURS}h ./bin/mev-bot start \
> "${LOG_DIR}/mev_bot.log" 2>&1 &
BOT_PID=$!
echo "$BOT_PID" > "$PID_FILE"
echo -e "${GREEN}✓ MEV Bot started (PID: $BOT_PID)${NC}"
echo ""
# Wait for bot to initialize
echo -e "${YELLOW}Waiting for bot initialization (30 seconds)...${NC}"
sleep 30
# Check if bot is still running
if ! kill -0 "$BOT_PID" 2>/dev/null; then
echo -e "${RED}✗ Error: Bot failed to start${NC}"
echo -e "${YELLOW}Last 50 lines of log:${NC}"
tail -50 "${LOG_DIR}/mev_bot.log"
exit 1
fi
echo -e "${GREEN}✓ Bot initialized successfully${NC}"
echo ""
#############################################################################
# Start Monitoring
#############################################################################
echo -e "${YELLOW}[4/7] Starting Background Monitoring...${NC}"
"${LOG_DIR}/monitor.sh" "${LOG_DIR}/mev_bot.log" "$METRICS_FILE" &
MONITOR_PID=$!
echo -e "${GREEN}✓ Monitoring started (PID: $MONITOR_PID)${NC}"
echo ""
#############################################################################
# Real-Time Status Display
#############################################################################
echo -e "${YELLOW}[5/7] Monitoring Progress...${NC}"
echo -e "${BLUE}═══════════════════════════════════════════════════════════════${NC}"
echo ""
echo -e "${GREEN}Test is now running for ${TEST_DURATION_HOURS} hours${NC}"
echo -e "${YELLOW}Press Ctrl+C to stop early and generate report${NC}"
echo ""
echo -e "Log file: ${LOG_DIR}/mev_bot.log"
echo -e "Metrics file: ${METRICS_FILE}"
echo ""
echo -e "${BLUE}═══════════════════════════════════════════════════════════════${NC}"
echo ""
# Trap Ctrl+C to generate report
trap 'echo -e "\n${YELLOW}Stopping test early...${NC}"; kill $BOT_PID $MONITOR_PID 2>/dev/null; generate_report; exit 0' INT
# Function to generate report
generate_report() {
echo -e "\n${YELLOW}[6/7] Generating Validation Report...${NC}"
# Read final metrics
if [ -f "$METRICS_FILE" ]; then
METRICS=$(cat "$METRICS_FILE")
else
METRICS="{}"
fi
# Generate markdown report
cat > "$REPORT_FILE" << REPORT_EOF
# 24-Hour Production Validation Test Report
**Test Date:** $(date)
**Duration:** ${TEST_DURATION_HOURS} hours
**Status:** COMPLETED
---
## Summary
\`\`\`json
$METRICS
\`\`\`
---
## Key Metrics
### Opportunities
- **Total Detected:** $(echo "$METRICS" | grep -oP '"opportunities_detected":\s*\K[0-9]+' || echo "N/A")
- **Profitable:** $(echo "$METRICS" | grep -oP '"profitable_opportunities":\s*\K[0-9]+' || echo "N/A")
- **Events Processed:** $(echo "$METRICS" | grep -oP '"events_processed":\s*\K[0-9]+' || echo "N/A")
### Cache Performance
- **Hit Rate:** $(echo "$METRICS" | grep -oP '"hit_rate_percent":\s*\K[0-9.]+' || echo "N/A")%
- **Target:** 75-85%
- **Status:** $(if [ "$(echo "$METRICS" | grep -oP '"hit_rate_percent":\s*\K[0-9.]+' || echo "0")" -ge 75 ]; then echo "✓ PASS"; else echo "⚠ BELOW TARGET"; fi)
### System Health
- **CPU Usage:** $(echo "$METRICS" | grep -oP '"cpu_percent":\s*\K[0-9.]+' || echo "N/A")%
- **Memory Usage:** $(echo "$METRICS" | grep -oP '"memory_percent":\s*\K[0-9.]+' || echo "N/A")%
- **Errors:** $(echo "$METRICS" | grep -oP '"errors":\s*\K[0-9]+' || echo "N/A")
- **Warnings:** $(echo "$METRICS" | grep -oP '"warnings":\s*\K[0-9]+' || echo "N/A")
---
## Log Analysis
### Top Errors
\`\`\`
$(grep "\[ERROR\]" "${LOG_DIR}/mev_bot.log" | sort | uniq -c | sort -rn | head -10)
\`\`\`
### Top Warnings
\`\`\`
$(grep "\[WARN\]" "${LOG_DIR}/mev_bot.log" | sort | uniq -c | sort -rn | head -10)
\`\`\`
### Sample Opportunities
\`\`\`
$(grep "ARBITRAGE OPPORTUNITY DETECTED" "${LOG_DIR}/mev_bot.log" | head -5)
\`\`\`
---
## Validation Criteria
| Criterion | Target | Actual | Status |
|-----------|--------|--------|--------|
| Uptime | 100% | $(if kill -0 $BOT_PID 2>/dev/null; then echo "100%"; else echo "< 100%"; fi) | $(if kill -0 $BOT_PID 2>/dev/null; then echo "✓ PASS"; else echo "✗ FAIL"; fi) |
| Cache Hit Rate | 75-85% | $(echo "$METRICS" | grep -oP '"hit_rate_percent":\s*\K[0-9.]+' || echo "N/A")% | $(if [ "$(echo "$METRICS" | grep -oP '"hit_rate_percent":\s*\K[0-9.]+' || echo "0")" -ge 75 ]; then echo "✓ PASS"; else echo "⚠ CHECK"; fi) |
| No Crashes | 0 | TBD | TBD |
| Error Rate | < 5% | TBD | TBD |
---
## Recommendations
1. **Cache Performance:** $(if [ "$(echo "$METRICS" | grep -oP '"hit_rate_percent":\s*\K[0-9.]+' || echo "0")" -ge 75 ]; then echo "Cache is performing within target range"; else echo "Consider tuning cache TTL and invalidation logic"; fi)
2. **Opportunities:** Review profitable opportunities and analyze why others were rejected
3. **Errors:** Address top errors before production deployment
4. **System Resources:** Monitor CPU/memory usage trends for capacity planning
---
## Next Steps
- [ ] Review this report with the team
- [ ] Address any identified issues
- [ ] Run additional 24h test if needed
- [ ] Proceed to limited production deployment
---
**Generated:** $(date)
REPORT_EOF
echo -e "${GREEN}✓ Report generated: $REPORT_FILE${NC}"
}
# Display real-time stats every 5 minutes
while kill -0 $BOT_PID 2>/dev/null; do
sleep 300 # 5 minutes
if [ -f "$METRICS_FILE" ]; then
echo -e "${BLUE}[$(date '+%H:%M:%S')] Status Update:${NC}"
echo -e " Opportunities: $(grep -oP '"opportunities_detected":\s*\K[0-9]+' "$METRICS_FILE" || echo "0")"
echo -e " Profitable: $(grep -oP '"profitable_opportunities":\s*\K[0-9]+' "$METRICS_FILE" || echo "0")"
echo -e " Events: $(grep -oP '"events_processed":\s*\K[0-9]+' "$METRICS_FILE" || echo "0")"
echo -e " Cache Hit Rate: $(grep -oP '"hit_rate_percent":\s*\K[0-9.]+' "$METRICS_FILE" || echo "0")%"
echo -e " CPU: $(grep -oP '"cpu_percent":\s*\K[0-9.]+' "$METRICS_FILE" || echo "0")%"
echo -e " Memory: $(grep -oP '"memory_percent":\s*\K[0-9.]+' "$METRICS_FILE" || echo "0")%"
echo ""
fi
done
#############################################################################
# Test Complete
#############################################################################
# Stop monitoring
kill $MONITOR_PID 2>/dev/null || true
# Generate final report
generate_report
echo ""
echo -e "${BLUE}═══════════════════════════════════════════════════════════════${NC}"
echo -e "${GREEN}✓ 24-Hour Validation Test Complete${NC}"
echo -e "${BLUE}═══════════════════════════════════════════════════════════════${NC}"
echo ""
echo -e "${GREEN}Test End Time:${NC} $(date)"
echo -e "${GREEN}Report Location:${NC} $REPORT_FILE"
echo -e "${GREEN}Logs Location:${NC} ${LOG_DIR}"
echo ""
echo -e "${YELLOW}[7/7] Next Steps:${NC}"
echo -e " 1. Review the validation report: cat $REPORT_FILE"
echo -e " 2. Analyze logs for errors: grep ERROR ${LOG_DIR}/mev_bot.log"
echo -e " 3. Check for profitable opportunities: grep 'Net Profit' ${LOG_DIR}/mev_bot.log"
echo -e " 4. Verify cache performance meets target (75-85% hit rate)"
echo ""
echo -e "${GREEN}Test completed successfully!${NC}"

View File

@@ -1,83 +0,0 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
import "forge-std/Script.sol";
import "../contracts/balancer/FlashLoanReceiverSecure.sol";
/// @title Deploy FlashLoanReceiverSecure to Fork
/// @notice Deployment script for testing flash loan contract on forked Arbitrum
contract DeployFlashLoanSecure is Script {
// Balancer Vault on Arbitrum mainnet
address constant BALANCER_VAULT = 0xBA12222222228d8Ba445958a75a0704d566BF2C8;
// Arbitrum token addresses for testing
address constant WETH = 0x82aF49447D8a07e3bd95BD0d56f35241523fBab1;
address constant USDC = 0xaf88d065e77c8cC2239327C5EDb3A432268e5831;
address constant USDT = 0xFd086bC7CD5C481DCC9C85ebE478A1C0b69FCbb9;
address constant ARB = 0x912CE59144191C1204E64559FE8253a0e49E6548;
// Uniswap V3 Router
address constant UNISWAP_V3_ROUTER = 0xE592427A0AEce92De3Edee1F18E0157C05861564;
function run() external {
uint256 deployerPrivateKey = vm.envUint("PRIVATE_KEY");
vm.startBroadcast(deployerPrivateKey);
console.log("========================================");
console.log("Deploying FlashLoanReceiverSecure");
console.log("========================================");
console.log("Deployer:", vm.addr(deployerPrivateKey));
console.log("Balancer Vault:", BALANCER_VAULT);
console.log("");
// Deploy FlashLoanReceiverSecure
FlashLoanReceiverSecure flashLoanReceiver = new FlashLoanReceiverSecure(
BALANCER_VAULT
);
console.log("========================================");
console.log("Deployment Successful!");
console.log("========================================");
console.log("Contract Address:", address(flashLoanReceiver));
console.log("Owner:", flashLoanReceiver.owner());
console.log("Max Slippage:", flashLoanReceiver.MAX_SLIPPAGE_BPS(), "bps (0.5%)");
console.log("Max Path Length:", flashLoanReceiver.MAX_PATH_LENGTH());
console.log("");
console.log("========================================");
console.log("Test Token Addresses (Arbitrum)");
console.log("========================================");
console.log("WETH:", WETH);
console.log("USDC:", USDC);
console.log("USDT:", USDT);
console.log("ARB:", ARB);
console.log("");
console.log("========================================");
console.log("DEX Router Addresses");
console.log("========================================");
console.log("Uniswap V3:", UNISWAP_V3_ROUTER);
console.log("");
console.log("========================================");
console.log("Next Steps");
console.log("========================================");
console.log("1. Fund contract with test ETH for gas");
console.log("2. Test flash loan with small amount");
console.log("3. Verify slippage protection works");
console.log("4. Test reentrancy protection");
console.log("5. Execute real arbitrage path");
console.log("");
console.log("Example: Flash loan 1 WETH");
console.log(" cast send", address(flashLoanReceiver));
console.log(" 'executeArbitrage(address[],uint256[],bytes)'");
console.log(" '[", WETH, "]'");
console.log(" '[1000000000000000000]' # 1 WETH");
console.log(" '<encoded-path>'");
console.log("");
vm.stopBroadcast();
}
}

View File

@@ -1,175 +0,0 @@
# MEV Bot Scripts Directory
This directory contains all operational, utility, and development scripts for the MEV Bot project.
## Core Scripts
### Build & Runtime
- **build.sh** - Universal Go build script with configurable options
- **run.sh** - Main MEV bot execution script with production environment loading
- **test.sh** - Basic test runner
### Log Management
- **log-manager.sh** ⭐ Production-grade log management system
- Real-time analysis and health monitoring
- Performance tracking with MEV metrics
- Corruption detection and alerting
- Background daemon and dashboard generation
- See: `./scripts/log-manager.sh --help`
## CI/CD & Quality Assurance
### Primary CI Pipeline
- **../harness/local-ci-pipeline.sh** - Comprehensive CI/CD pipeline
- **ci-precommit.sh** - Fast pre-commit validation (10-30s)
- **ci-quick.sh** - Quick CI pipeline (30-60s)
- **ci-dev.sh** - Development CI pipeline (1-2min)
- **ci-full.sh** - Full CI pipeline (3-5min)
- **ci-container.sh** - Containerized CI execution
- **ci-watch.sh** - Watch mode for continuous validation
### Testing
- **test-runner.sh** - Configurable test execution (levels: basic, unit, integration, comprehensive, audit)
- **run_audit_suite.sh** - Mathematical correctness audit
- **security-validation.sh** - Comprehensive security validation
- **quick-test.sh** - Quick fix validation (30s)
- **run-stress-tests.sh** - Stress testing
- **run-fork-tests.sh** - Blockchain fork testing
## Deployment & Production
### Contract Deployment
- **deploy-contracts.sh** - Deploy smart contracts to Arbitrum
- **verify-contracts.sh** - Verify contracts on Arbiscan
- **deploy-staging.sh** - Staging environment deployment
- **deploy-production.sh** - Full production deployment with Docker Compose
### Production Operations
- **production-start.sh** - Start production MEV bot
- **production-validation.sh** - Pre-deployment validation checks
- **pre-run-validation.sh** - Environment validation before startup
## Wallet Management
- **setup-keystore.sh** - Encrypt and securely store private keys (AES-256-CBC)
- **fund-bot-wallet.sh** - Fund MEV bot wallet using Foundry cast
- **check-wallet-balance.sh** - Check wallet balance on Arbitrum One
## Monitoring & Analysis
- **watch-live.sh** - Real-time MEV bot activity monitor
- **analyze.sh** - Comprehensive system analysis (tests, benchmarks, coverage, static analysis)
- **performance-profile.sh** - Performance profiling with pprof
## Development Utilities
### Environment Setup
- **setup-env.sh** - Environment variable setup
- **setup-dev.sh** - Development environment setup
- **fix-rpc-config.sh** - Fix RPC configuration issues
### Git Workflow
- **git-hooks-setup.sh** - Install git hooks
- **git-enhanced.sh** - Enhanced git workflow commands
- **git-local-server.sh** - Local git server simulation
### Data & Code Generation
- **fetch_arbiscan_tx.sh** - Fetch transaction data from Arbiscan
- **extract_multicall_fixture.sh** - Extract multicall fixtures for testing
- **refresh-mev-datasets.sh** - Update MEV research datasets
- **generate-bindings.sh** - Generate Go bindings for smart contracts
### Other Utilities
- **kill-bot.sh** - Stop running MEV bot processes
- **dependency-scan.sh** - Scan for dependency vulnerabilities
- **verify-organization.sh** - Verify project organization
- **24h-validation-test.sh** - 24-hour validation test
## Special Directories
### deprecated/
Contains scripts that have been superseded by better alternatives. See `deprecated/README.md` for migration guide.
**Replaced by log-manager.sh:**
- archive-logs.sh
- quick-archive.sh
- view-latest-archive.sh
- rotate-logs.sh
- setup-log-rotation.sh
### demos/
Contains demonstration and example scripts for testing purposes only. Not for production use.
## Quick Reference
### Development Workflow
```bash
# Setup
./scripts/setup-dev.sh
# Quick validation
./scripts/ci-precommit.sh
# Run tests
./scripts/test-runner.sh --level comprehensive --coverage
# Security check
./scripts/security-validation.sh
# Math audit
./scripts/run_audit_suite.sh
```
### Production Deployment
```bash
# Validate environment
./scripts/production-validation.sh
# Deploy contracts
./scripts/deploy-contracts.sh
# Setup wallet
./scripts/setup-keystore.sh
./scripts/check-wallet-balance.sh
# Deploy and start
./scripts/deploy-production.sh
./scripts/run.sh
```
### Monitoring
```bash
# Live activity monitor
./scripts/watch-live.sh
# Log management
./scripts/log-manager.sh analyze
./scripts/log-manager.sh health
./scripts/log-manager.sh dashboard
# Performance profiling
./scripts/performance-profile.sh
```
## Documentation
For detailed script analysis and recommendations, see:
- **docs/SCRIPT_ANALYSIS_REPORT.md** - Comprehensive script analysis
- **Makefile** - Build automation targets and workflows
## Contributing
When adding new scripts:
1. Make scripts executable: `chmod +x script-name.sh`
2. Add shebang: `#!/bin/bash` or `#!/usr/bin/env bash`
3. Use set -e for error handling
4. Add descriptive comments
5. Update this README
6. Add help text (use --help flag)
---
**Total Scripts:** 80+
**Active Scripts:** 50+
**Deprecated Scripts:** 5
**Demo Scripts:** 1

View File

@@ -1,110 +0,0 @@
#!/bin/bash
echo "Analyzing 171 Valid Failing Pools"
echo "=================================="
echo ""
# Get the valid pools from cleaned blacklist
POOLS=$(cat logs/pool_blacklist.json | jq -r '.[].address' | head -30)
echo "Testing first 30 valid failing pools to identify exchange types..."
echo ""
RPC="https://arb1.arbitrum.io/rpc"
UNISWAP_V3=0
UNISWAP_V2=0
SUSHISWAP=0
OTHER=0
UNKNOWN=0
for POOL in $POOLS; do
echo "Analyzing $POOL:"
# Test UniswapV3 methods
TOKEN0=$(curl -s -X POST $RPC \
-H "Content-Type: application/json" \
-d "{\"jsonrpc\":\"2.0\",\"method\":\"eth_call\",\"params\":[{\"to\":\"$POOL\",\"data\":\"0x0dfe1681\"},\"latest\"],\"id\":1}" \
| jq -r '.result // "error"')
TOKEN1=$(curl -s -X POST $RPC \
-H "Content-Type: application/json" \
-d "{\"jsonrpc\":\"2.0\",\"method\":\"eth_call\",\"params\":[{\"to\":\"$POOL\",\"data\":\"0xd21220a7\"},\"latest\"],\"id\":1}" \
| jq -r '.result // "error"')
# Check fee() for V3
FEE=$(curl -s -X POST $RPC \
-H "Content-Type: application/json" \
-d "{\"jsonrpc\":\"2.0\",\"method\":\"eth_call\",\"params\":[{\"to\":\"$POOL\",\"data\":\"0xddca3f43\"},\"latest\"],\"id\":1}" \
| jq -r '.result // "error"')
# Check getReserves() for V2
RESERVES=$(curl -s -X POST $RPC \
-H "Content-Type: application/json" \
-d "{\"jsonrpc\":\"2.0\",\"method\":\"eth_call\",\"params\":[{\"to\":\"$POOL\",\"data\":\"0x0902f1ac\"},\"latest\"],\"id\":1}" \
| jq -r '.result // "error"')
# Check slot0() for V3
SLOT0=$(curl -s -X POST $RPC \
-H "Content-Type: application/json" \
-d "{\"jsonrpc\":\"2.0\",\"method\":\"eth_call\",\"params\":[{\"to\":\"$POOL\",\"data\":\"0x3850c7bd\"},\"latest\"],\"id\":1}" \
| jq -r '.result // "error"')
# Analyze results
if [[ "$TOKEN0" == "0x"* ]] && [[ "$TOKEN1" == "0x"* ]]; then
if [[ "$FEE" == "0x"* ]] && [[ "$SLOT0" == "0x"* ]]; then
echo " ✅ UniswapV3 Pool (has token0, token1, fee, slot0)"
UNISWAP_V3=$((UNISWAP_V3 + 1))
# Decode the fee to see what tier
if [[ "$FEE" == "0x"* ]]; then
FEE_INT=$((16#${FEE:2}))
echo " Fee tier: $FEE_INT ($(echo "scale=2; $FEE_INT/10000" | bc)%)"
fi
# Decode tokens
TOKEN0_ADDR="0x${TOKEN0: -40}"
TOKEN1_ADDR="0x${TOKEN1: -40}"
echo " Token0: $TOKEN0_ADDR"
echo " Token1: $TOKEN1_ADDR"
elif [[ "$RESERVES" == "0x"* ]] && [[ ${#RESERVES} -gt 66 ]]; then
echo " ✅ UniswapV2/Sushiswap Pool (has token0, token1, getReserves)"
UNISWAP_V2=$((UNISWAP_V2 + 1))
else
echo " ⚠️ Has token0/token1 but unknown type"
UNKNOWN=$((UNKNOWN + 1))
fi
else
# Try other DEX signatures
echo " ❌ Not Uniswap - checking other protocols..."
# Check for Balancer vault getPoolTokens
BALANCER=$(curl -s -X POST $RPC \
-H "Content-Type: application/json" \
-d "{\"jsonrpc\":\"2.0\",\"method\":\"eth_call\",\"params\":[{\"to\":\"$POOL\",\"data\":\"0xf94d4668\"},\"latest\"],\"id\":1}" \
| jq -r '.result // "error"')
if [[ "$BALANCER" != "error" ]] && [[ "$BALANCER" == "0x"* ]]; then
echo " ✅ Possibly Balancer Pool"
OTHER=$((OTHER + 1))
else
echo " ❓ Unknown DEX type"
UNKNOWN=$((UNKNOWN + 1))
fi
fi
echo ""
done
echo "Summary of 30 Analyzed Pools"
echo "============================="
echo "UniswapV3: $UNISWAP_V3"
echo "UniswapV2/Sushi: $UNISWAP_V2"
echo "Other DEX: $OTHER"
echo "Unknown: $UNKNOWN"
echo ""
echo "Checking original error reasons from blacklist..."
echo "================================================="
cat logs/pool_blacklist.json | jq '[.[] | select(.is_blacklisted == true)] | group_by(.last_reason) | map({reason: .[0].last_reason, count: length})' | jq -r '.[] | "\(.reason): \(.count)"'

View File

@@ -1,51 +0,0 @@
#!/bin/bash
# MEV Bot Comprehensive Analysis Script
echo "Running comprehensive analysis of the MEV bot system..."
# Ensure local Go cache directories are available for sandboxed environments
PROJECT_ROOT="$(pwd)"
export GOCACHE="${GOCACHE:-${PROJECT_ROOT}/.gocache}"
export GOMODCACHE="${GOMODCACHE:-${PROJECT_ROOT}/.gomodcache}"
export GOPATH="${GOPATH:-${PROJECT_ROOT}/.gopath}"
export GOFLAGS="${GOFLAGS:--mod=vendor}"
export GOLANGCI_LINT_CACHE="${GOLANGCI_LINT_CACHE:-${PROJECT_ROOT}/.golangci-cache}"
mkdir -p "$GOCACHE" "$GOMODCACHE" "${GOPATH}/pkg/mod" "$GOLANGCI_LINT_CACHE"
# Ensure imports are correct
echo "Checking Go imports"
goimports -w .
# Check go.mod for dependencies without modifying files
echo "Checking Go module dependencies..."
if ! go list ./cmd/... ./internal/... ./pkg/... > /dev/null; then
echo "Dependency graph check failed; please review module configuration."
fi
# Ensure log directory exists for tests that expect it
mkdir -p logs
# Run all tests
echo "Running all tests..."
go test ./pkg/... -v
# Run benchmarks if available
echo "Running benchmarks..."
go test -bench=. -benchmem ./pkg/uniswap/
# Check for any race conditions
echo "Checking for race conditions..."
go test -race ./pkg/...
# Check code coverage
echo "Checking code coverage..."
go test -coverprofile=coverage.out ./pkg/...
go tool cover -func=coverage.out
# Run static analysis
echo "Running static analysis..."
golangci-lint run ./cmd/... ./internal/... ./pkg/...
echo "Analysis complete. Review the output for any errors or warnings."

View File

@@ -1,377 +0,0 @@
#!/usr/bin/env bash
# Critical Fixes Application Script
# Date: 2025-10-30
# Purpose: Apply all critical fixes identified in log analysis
set -euo pipefail # Exit on error, undefined vars, pipe failures
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
echo "========================================="
echo "MEV Bot Critical Fixes - Application Script"
echo "========================================="
echo "Date: $(date)"
echo "Project: $PROJECT_ROOT"
echo ""
# Color codes
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
# Logging functions
log_info() {
echo -e "${GREEN}[INFO]${NC} $1"
}
log_warn() {
echo -e "${YELLOW}[WARN]${NC} $1"
}
log_error() {
echo -e "${RED}[ERROR]${NC} $1"
}
# Backup function
create_backup() {
local file=$1
local backup_dir="$PROJECT_ROOT/backups/$(date +%Y%m%d_%H%M%S)"
mkdir -p "$backup_dir"
if [ -f "$file" ]; then
cp "$file" "$backup_dir/$(basename "$file").backup"
log_info "Backed up: $file -> $backup_dir"
fi
}
echo "========================================"
echo "FIX 1: Log Manager Script Bug (Line 188)"
echo "========================================"
LOG_MANAGER_SCRIPT="$PROJECT_ROOT/scripts/log-manager.sh"
if [ -f "$LOG_MANAGER_SCRIPT" ]; then
log_info "Found log-manager.sh, applying fix..."
create_backup "$LOG_MANAGER_SCRIPT"
# Fix: Replace the problematic line with proper variable quoting
awk '
{
if (/recent_health_trend.*recent_errors/) {
print " \"recent_health_trend\": \"$([ -n \"${recent_errors}\" ] && [ \"${recent_errors}\" -lt 10 ] 2>/dev/null && echo good || echo concerning)\"";
} else {
print;
}
}
' "$LOG_MANAGER_SCRIPT" > "$LOG_MANAGER_SCRIPT.tmp"
if [ -f "$LOG_MANAGER_SCRIPT.tmp" ]; then
mv "$LOG_MANAGER_SCRIPT.tmp" "$LOG_MANAGER_SCRIPT"
chmod +x "$LOG_MANAGER_SCRIPT"
log_info "✅ Log manager script fixed"
else
log_error "Failed to create fixed script"
fi
else
log_warn "Log manager script not found, skipping..."
fi
echo ""
echo "========================================="
echo "FIX 2: Add Zero Address Validation Checks"
echo "========================================="
# Create a validation helper file
VALIDATION_HELPER="$PROJECT_ROOT/pkg/utils/address_validation.go"
log_info "Creating address validation helper..."
mkdir -p "$PROJECT_ROOT/pkg/utils"
cat > "$VALIDATION_HELPER" << 'GOEOF'
package utils
import (
"fmt"
"github.com/ethereum/go-ethereum/common"
)
// ValidateAddress ensures an address is not zero
func ValidateAddress(addr common.Address, name string) error {
if addr == (common.Address{}) {
return fmt.Errorf("%s cannot be zero address", name)
}
return nil
}
// ValidateAddresses validates multiple addresses
func ValidateAddresses(addrs map[string]common.Address) error {
for name, addr := range addrs {
if err := ValidateAddress(addr, name); err != nil {
return err
}
}
return nil
}
// IsZeroAddress checks if address is zero
func IsZeroAddress(addr common.Address) bool {
return addr == (common.Address{})
}
GOEOF
log_info "✅ Created address validation helper"
echo ""
echo "========================================="
echo "FIX 3: Update RPC Configuration"
echo "========================================="
ENV_FILE="$PROJECT_ROOT/.env"
ENV_PRODUCTION="$PROJECT_ROOT/.env.production"
log_info "Updating RPC configuration for conservative rate limiting..."
# Update .env if it exists
if [ -f "$ENV_FILE" ]; then
create_backup "$ENV_FILE"
# Ensure proper RPC configuration
if ! grep -q "ARBITRUM_RPC_RATE_LIMIT" "$ENV_FILE"; then
cat >> "$ENV_FILE" << 'ENVEOF'
# RPC Rate Limiting (Conservative Settings)
ARBITRUM_RPC_RATE_LIMIT=5
ARBITRUM_RPC_BURST=10
ARBITRUM_RPC_MAX_RETRIES=3
ARBITRUM_RPC_BACKOFF_SECONDS=1
ENVEOF
log_info "Added rate limiting config to .env"
fi
fi
# Update .env.production if it exists
if [ -f "$ENV_PRODUCTION" ]; then
create_backup "$ENV_PRODUCTION"
if ! grep -q "ARBITRUM_RPC_RATE_LIMIT" "$ENV_PRODUCTION"; then
cat >> "$ENV_PRODUCTION" << 'ENVEOF'
# RPC Rate Limiting (Production Settings)
ARBITRUM_RPC_RATE_LIMIT=10
ARBITRUM_RPC_BURST=20
ARBITRUM_RPC_MAX_RETRIES=5
ARBITRUM_RPC_BACKOFF_SECONDS=2
ENVEOF
log_info "Added rate limiting config to .env.production"
fi
fi
log_info "✅ RPC configuration updated"
echo ""
echo "========================================="
echo "FIX 4: Create Pre-Run Validation Script"
echo "========================================="
VALIDATION_SCRIPT="$PROJECT_ROOT/scripts/pre-run-validation.sh"
cat > "$VALIDATION_SCRIPT" << 'VALEOF'
#!/bin/bash
# Pre-Run Validation Script
# Validates environment before starting MEV bot
set -e
echo "========================================="
echo "MEV Bot Pre-Run Validation"
echo "========================================="
ERRORS=0
# Check RPC endpoints
echo "[1/5] Checking RPC endpoints..."
if [ -z "$ARBITRUM_RPC_ENDPOINT" ]; then
echo "❌ ARBITRUM_RPC_ENDPOINT not set"
ERRORS=$((ERRORS + 1))
else
echo "✅ ARBITRUM_RPC_ENDPOINT: $ARBITRUM_RPC_ENDPOINT"
fi
# Check for wss:// or https:// prefix
echo "[2/5] Validating endpoint format..."
if [[ "$ARBITRUM_RPC_ENDPOINT" == wss://* ]] || [[ "$ARBITRUM_RPC_ENDPOINT" == https://* ]]; then
echo "✅ Endpoint format valid"
else
echo "❌ Endpoint must start with wss:// or https://"
ERRORS=$((ERRORS + 1))
fi
# Check log directory
echo "[3/5] Checking log directory..."
if [ -d "logs" ]; then
echo "✅ Log directory exists"
# Check for excessive zero addresses in recent logs
if [ -f "logs/liquidity_events_$(date +%Y-%m-%d).jsonl" ]; then
ZERO_COUNT=$(grep -c "0x0000000000000000000000000000000000000000" "logs/liquidity_events_$(date +%Y-%m-%d).jsonl" 2>/dev/null || echo 0)
echo "Zero addresses in today's events: $ZERO_COUNT"
if [ "$ZERO_COUNT" -gt 10 ]; then
echo "⚠️ WARNING: High zero address count detected"
fi
fi
else
mkdir -p logs
echo "✅ Created log directory"
fi
# Check binary exists
echo "[4/5] Checking binary..."
if [ -f "./mev-bot" ] || [ -f "./bin/mev-bot" ]; then
echo "✅ MEV bot binary found"
else
echo "❌ MEV bot binary not found. Run 'make build' first"
ERRORS=$((ERRORS + 1))
fi
# Check for port conflicts
echo "[5/5] Checking for port conflicts..."
if lsof -Pi :9090 -sTCP:LISTEN -t >/dev/null 2>&1; then
echo "⚠️ WARNING: Port 9090 (metrics) already in use"
fi
if lsof -Pi :8080 -sTCP:LISTEN -t >/dev/null 2>&1; then
echo "⚠️ WARNING: Port 8080 (dashboard) already in use"
fi
echo ""
echo "========================================="
if [ $ERRORS -eq 0 ]; then
echo "✅ Validation PASSED - Safe to start"
exit 0
else
echo "❌ Validation FAILED - $ERRORS error(s) found"
exit 1
fi
VALEOF
chmod +x "$VALIDATION_SCRIPT"
log_info "✅ Created pre-run validation script"
echo ""
echo "========================================="
echo "FIX 5: Archive Old Logs"
echo "========================================="
log_info "Archiving old logs to reduce disk usage..."
cd "$PROJECT_ROOT/logs"
# Archive logs older than 1 day
find . -name "*.log" -type f -mtime +1 -size +10M -exec gzip {} \; 2>/dev/null || true
# Move very old archives
if [ -d "archived" ]; then
ARCHIVE_COUNT=$(find archived/ -name "*.log" -type f | wc -l)
if [ "$ARCHIVE_COUNT" -gt 5 ]; then
log_info "Found $ARCHIVE_COUNT old archive files"
find archived/ -name "*.log" -type f -mtime +7 -delete 2>/dev/null || true
log_info "Cleaned up old archives"
fi
fi
cd "$PROJECT_ROOT"
log_info "✅ Log archiving complete"
echo ""
echo "========================================="
echo "FIX 6: Create Quick Test Script"
echo "========================================="
TEST_SCRIPT="$PROJECT_ROOT/scripts/quick-test.sh"
cat > "$TEST_SCRIPT" << 'TESTEOF'
#!/bin/bash
# Quick Test Script - Validates fixes are working
set -e
echo "========================================="
echo "MEV Bot Quick Test"
echo "========================================="
# Run pre-validation
echo "[1/3] Running pre-run validation..."
./scripts/pre-run-validation.sh
# Build
echo "[2/3] Building..."
make build 2>&1 | tail -10
# Run for 30 seconds
echo "[3/3] Running bot for 30 seconds..."
timeout 30 ./mev-bot start 2>&1 | tee test-run.log || true
echo ""
echo "========================================="
echo "Analyzing Test Run..."
echo "========================================="
# Check for critical errors
WSS_ERRORS=$(grep -c "unsupported protocol scheme" test-run.log 2>/dev/null || echo 0)
ZERO_ADDR=$(grep -c "0x00000000000000000000000000000000000000000" test-run.log 2>/dev/null || echo 0)
RATE_LIMITS=$(grep -c "Too Many Requests" test-run.log 2>/dev/null || echo 0)
echo "WebSocket errors: $WSS_ERRORS"
echo "Zero addresses: $ZERO_ADDR"
echo "Rate limit errors: $RATE_LIMITS"
if [ "$WSS_ERRORS" -eq 0 ] && [ "$ZERO_ADDR" -lt 10 ] && [ "$RATE_LIMITS" -lt 10 ]; then
echo ""
echo "✅ TEST PASSED - Fixes appear to be working"
exit 0
else
echo ""
echo "⚠️ TEST WARNINGS - Some issues remain:"
[ "$WSS_ERRORS" -gt 0 ] && echo " - WebSocket errors still present"
[ "$ZERO_ADDR" -ge 10 ] && echo " - High zero address count"
[ "$RATE_LIMITS" -ge 10 ] && echo " - Rate limiting issues"
exit 1
fi
TESTEOF
chmod +x "$TEST_SCRIPT"
log_info "✅ Created quick test script"
echo ""
echo "========================================="
echo "Summary of Applied Fixes"
echo "========================================="
echo ""
echo "✅ Fixed log manager script bug (line 188)"
echo "✅ Created address validation helper"
echo "✅ Updated RPC configuration with rate limiting"
echo "✅ Created pre-run validation script"
echo "✅ Archived old logs"
echo "✅ Created quick test script"
echo ""
echo "========================================="
echo "Next Steps"
echo "========================================="
echo ""
echo "1. Review changes: git diff"
echo "2. Run validation: ./scripts/pre-run-validation.sh"
echo "3. Build: make build"
echo "4. Quick test: ./scripts/quick-test.sh"
echo "5. Full test: timeout 60 ./mev-bot start"
echo ""
echo "Backup location: $PROJECT_ROOT/backups/$(date +%Y%m%d_%H%M%S)"
echo ""
echo "========================================="
echo "Fixes Applied Successfully!"
echo "========================================="

View File

@@ -1,144 +0,0 @@
#!/bin/bash
# MEV Bot Auto-Update Script
# Checks for updates on master branch and automatically pulls, rebuilds, and restarts
set -e
# Configuration
BRANCH="${GIT_BRANCH:-master}"
REMOTE="${GIT_REMOTE:-origin}"
PROJECT_DIR="${PROJECT_DIR:-$(cd "$(dirname "$0")/.." && pwd)}"
LOG_FILE="${PROJECT_DIR}/logs/auto-update.log"
# Color codes
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
RED='\033[0;31m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Ensure log directory exists
mkdir -p "${PROJECT_DIR}/logs"
# Logging function
log() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1" | tee -a "$LOG_FILE"
}
log_color() {
echo -e "$1" | tee -a "$LOG_FILE"
}
# Change to project directory
cd "$PROJECT_DIR"
log_color "${BLUE}========================================="
log "MEV Bot Auto-Update Check"
log_color "${BLUE}=========================================${NC}"
log "Project: $PROJECT_DIR"
log "Branch: $BRANCH"
log "Remote: $REMOTE"
log ""
# Fetch latest changes from remote
log_color "${YELLOW}Fetching latest changes from $REMOTE...${NC}"
if git fetch "$REMOTE" >> "$LOG_FILE" 2>&1; then
log_color "${GREEN}✓ Fetch successful${NC}"
else
log_color "${RED}✗ Fetch failed${NC}"
exit 1
fi
# Get current and remote commit hashes
LOCAL_COMMIT=$(git rev-parse HEAD)
REMOTE_COMMIT=$(git rev-parse "$REMOTE/$BRANCH")
log "Local commit: $LOCAL_COMMIT"
log "Remote commit: $REMOTE_COMMIT"
# Check if update is available
if [ "$LOCAL_COMMIT" = "$REMOTE_COMMIT" ]; then
log_color "${GREEN}✓ Already up to date${NC}"
log ""
exit 0
fi
# Update available
log_color "${YELLOW}⚠ Update available!${NC}"
log ""
# Show what's new
log_color "${BLUE}New commits:${NC}"
git log --oneline "$LOCAL_COMMIT..$REMOTE_COMMIT" | tee -a "$LOG_FILE"
log ""
# Check if we're on the correct branch
CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD)
if [ "$CURRENT_BRANCH" != "$BRANCH" ]; then
log_color "${RED}✗ Not on $BRANCH branch (currently on $CURRENT_BRANCH)${NC}"
log "Skipping auto-update. Please switch to $BRANCH manually."
exit 1
fi
# Check for uncommitted changes
if ! git diff-index --quiet HEAD --; then
log_color "${RED}✗ Uncommitted changes detected${NC}"
log "Skipping auto-update. Please commit or stash changes first."
git status --short | tee -a "$LOG_FILE"
exit 1
fi
# Pull the changes
log_color "${YELLOW}Pulling changes from $REMOTE/$BRANCH...${NC}"
if git pull "$REMOTE" "$BRANCH" >> "$LOG_FILE" 2>&1; then
log_color "${GREEN}✓ Pull successful${NC}"
else
log_color "${RED}✗ Pull failed${NC}"
exit 1
fi
# The post-merge hook will handle rebuild and restart
log ""
log_color "${YELLOW}Post-merge hook will handle rebuild and restart...${NC}"
log ""
# Wait for post-merge hook to complete
sleep 2
# Verify container is running
log_color "${YELLOW}Verifying container status...${NC}"
if docker compose ps | grep -q "mev-bot.*running"; then
log_color "${GREEN}✓ Container is running${NC}"
else
log_color "${RED}✗ Container is not running${NC}"
docker compose ps | tee -a "$LOG_FILE"
exit 1
fi
# Show container status
log ""
log_color "${GREEN}========================================="
log "Update Complete!"
log_color "${GREEN}=========================================${NC}"
log "Updated from: $(echo $LOCAL_COMMIT | cut -c1-7)"
log "Updated to: $(echo $REMOTE_COMMIT | cut -c1-7)"
log ""
# Send notification if configured
if command -v curl &> /dev/null && [ -n "$WEBHOOK_URL" ]; then
NEW_COMMITS=$(git log --oneline "$LOCAL_COMMIT..$REMOTE_COMMIT" | wc -l)
MESSAGE="MEV Bot auto-updated: $NEW_COMMITS new commit(s) on $BRANCH"
curl -X POST "$WEBHOOK_URL" \
-H "Content-Type: application/json" \
-d "{\"text\":\"$MESSAGE\"}" \
>> "$LOG_FILE" 2>&1 || true
log "Notification sent to webhook"
fi
log "View logs: tail -f $LOG_FILE"
log "View container logs: docker compose logs -f mev-bot"
log ""
exit 0

View File

@@ -1,43 +0,0 @@
#!/usr/bin/env bash
# Battery Calibration Script for Arch Linux on MacBook Air
# Helps safely discharge/charge battery to recalibrate SMC.
LOGFILE="$HOME/battery_calibration.log"
THRESHOLD_SHUTDOWN=2 # % at which system will auto-shutdown
THRESHOLD_WARN1=10
THRESHOLD_WARN2=5
get_capacity() {
cat /sys/class/power_supply/BAT0/capacity
}
get_status() {
cat /sys/class/power_supply/BAT0/status
}
log() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*" | tee -a "$LOGFILE"
}
log "=== Starting battery calibration session ==="
while true; do
CAPACITY=$(get_capacity)
STATUS=$(get_status)
log "Battery: $CAPACITY% | Status: $STATUS"
if [[ "$STATUS" == "Discharging" ]]; then
if (( CAPACITY <= THRESHOLD_WARN1 && CAPACITY > THRESHOLD_WARN2 )); then
notify-send "Battery Calibration" "Battery low: $CAPACITY%. Save work."
elif (( CAPACITY <= THRESHOLD_WARN2 && CAPACITY > THRESHOLD_SHUTDOWN )); then
notify-send "Battery Calibration" "Battery critical: $CAPACITY%. Shutdown soon."
elif (( CAPACITY <= THRESHOLD_SHUTDOWN )); then
log "Battery reached $CAPACITY%. Shutting down for full discharge..."
systemctl poweroff
fi
fi
sleep 60 # check every minute
done

View File

@@ -1,103 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Reusable, agnostic build script
# Can be used in any Go project by adjusting configuration
# Configuration variables
BINARY_NAME="${BINARY_NAME:-$(basename "$PWD")}"
BINARY_DIR="${BINARY_DIR:-bin}"
MAIN_FILE="${MAIN_FILE:-cmd/mev-bot/main.go}"
BUILD_TAGS="${BUILD_TAGS:-}"
LDFLAGS="${LDFLAGS:-}"
OUTPUT="${OUTPUT:-$BINARY_DIR/$BINARY_NAME}"
GOOS="${GOOS:-$(go env GOOS)}"
GOARCH="${GOARCH:-$(go env GOARCH)}"
# Parse command line arguments
while [[ $# -gt 0 ]]; do
case $1 in
-n|--name)
BINARY_NAME="$2"
shift 2
;;
-o|--output)
OUTPUT="$2"
shift 2
;;
-m|--main)
MAIN_FILE="$2"
shift 2
;;
-t|--tags)
BUILD_TAGS="$2"
shift 2
;;
-l|--ldflags)
LDFLAGS="$2"
shift 2
;;
--goos)
GOOS="$2"
shift 2
;;
--goarch)
GOARCH="$2"
shift 2
;;
-h|--help)
echo "Usage: $0 [OPTIONS]"
echo "Build a Go application with configurable options"
echo ""
echo "Options:"
echo " -n, --name NAME Binary name (default: current directory name)"
echo " -o, --output PATH Output path (default: bin/BINARY_NAME)"
echo " -m, --main PATH Main package path (default: .)"
echo " -t, --tags TAGS Build tags"
echo " -l, --ldflags FLAGS Ldflags to pass to go build"
echo " --goos OS Target OS (default: $(go env GOOS))"
echo " --goarch ARCH Target architecture (default: $(go env GOARCH))"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
echo "Building $BINARY_NAME for $GOOS/$GOARCH..."
# Ensure local Go cache directories exist when not provided
PROJECT_ROOT="$(pwd)"
if [[ -z "${GOCACHE:-}" ]]; then
export GOCACHE="${PROJECT_ROOT}/.gocache"
fi
if [[ -z "${GOMODCACHE:-}" ]]; then
export GOMODCACHE="${PROJECT_ROOT}/.gomodcache"
fi
if [[ -z "${GOPATH:-}" ]]; then
export GOPATH="${PROJECT_ROOT}/.gopath"
fi
if [[ -z "${GOFLAGS:-}" && -d "${PROJECT_ROOT}/vendor" ]]; then
export GOFLAGS="-mod=vendor"
fi
mkdir -p "$GOCACHE" "$GOMODCACHE" "${GOPATH}/pkg/mod"
# Create binary directory
mkdir -p "$(dirname "$OUTPUT")"
# Set environment variables for cross-compilation
export GOOS="$GOOS"
export GOARCH="$GOARCH"
# Build the application
echo "Building $BINARY_NAME..."
[ -n "$BUILD_TAGS" ] && echo " Build tags: $BUILD_TAGS"
[ -n "$LDFLAGS" ] && echo " LDFLAGS: $LDFLAGS"
go build -o "$OUTPUT" ${BUILD_TAGS:+-tags "$BUILD_TAGS"} ${LDFLAGS:+-ldflags "$LDFLAGS"} "$MAIN_FILE"
echo "Build completed successfully!"
echo "Binary: $OUTPUT"

View File

@@ -1,113 +0,0 @@
#!/bin/bash
echo "Checking Pool Contract Interfaces"
echo "================================="
# Test pool addresses
POOLS=(
"0x6f38e884725a116C9C7fBF208e79FE8828a2595F"
"0x2f5e87C9312fa29aed5c179E456625D79015299c"
"0xB1026b8e7276e7AC75410F1fcbbe21796e8f7526"
)
# RPC endpoint
RPC="https://arb1.arbitrum.io/rpc"
echo ""
echo "Testing standard UniswapV3 pool methods..."
echo ""
for POOL in "${POOLS[@]}"; do
echo "Pool: $POOL"
echo "-------------------------------------------"
# Test token0() - 0x0dfe1681
echo -n " token0(): "
TOKEN0_RESULT=$(curl -s -X POST $RPC \
-H "Content-Type: application/json" \
-d "{\"jsonrpc\":\"2.0\",\"method\":\"eth_call\",\"params\":[{\"to\":\"$POOL\",\"data\":\"0x0dfe1681\"},\"latest\"],\"id\":1}" \
| jq -r '.result // .error.message')
if [[ "$TOKEN0_RESULT" == "0x"* ]] && [[ ${#TOKEN0_RESULT} -eq 66 ]]; then
echo "✅ Success - $(echo $TOKEN0_RESULT | cut -c 27-66)"
else
echo "❌ Failed - $TOKEN0_RESULT"
fi
# Test token1() - 0xd21220a7
echo -n " token1(): "
TOKEN1_RESULT=$(curl -s -X POST $RPC \
-H "Content-Type: application/json" \
-d "{\"jsonrpc\":\"2.0\",\"method\":\"eth_call\",\"params\":[{\"to\":\"$POOL\",\"data\":\"0xd21220a7\"},\"latest\"],\"id\":1}" \
| jq -r '.result // .error.message')
if [[ "$TOKEN1_RESULT" == "0x"* ]] && [[ ${#TOKEN1_RESULT} -eq 66 ]]; then
echo "✅ Success - $(echo $TOKEN1_RESULT | cut -c 27-66)"
else
echo "❌ Failed - $TOKEN1_RESULT"
fi
# Test fee() - 0xddca3f43
echo -n " fee(): "
FEE_RESULT=$(curl -s -X POST $RPC \
-H "Content-Type: application/json" \
-d "{\"jsonrpc\":\"2.0\",\"method\":\"eth_call\",\"params\":[{\"to\":\"$POOL\",\"data\":\"0xddca3f43\"},\"latest\"],\"id\":1}" \
| jq -r '.result // .error.message')
if [[ "$FEE_RESULT" == "0x"* ]]; then
echo "✅ Success"
else
echo "❌ Failed - $FEE_RESULT"
fi
# Get bytecode size to check if contract exists
echo -n " Contract exists: "
CODE=$(curl -s -X POST $RPC \
-H "Content-Type: application/json" \
-d "{\"jsonrpc\":\"2.0\",\"method\":\"eth_getCode\",\"params\":[\"$POOL\",\"latest\"],\"id\":1}" \
| jq -r '.result')
if [[ "$CODE" != "0x" ]] && [[ -n "$CODE" ]]; then
BYTES=$((${#CODE} / 2 - 1))
echo "✅ Yes ($BYTES bytes)"
else
echo "❌ No contract at this address"
fi
echo ""
done
echo "Alternative Method Signatures to Try:"
echo "======================================"
echo ""
echo "Testing Uniswap V2 style methods..."
echo ""
for POOL in "${POOLS[@]}"; do
echo "Pool: $POOL"
echo "-------------------------------------------"
# Test Uniswap V2 token0() - same selector
echo -n " V2 token0(): "
TOKEN0_RESULT=$(curl -s -X POST $RPC \
-H "Content-Type: application/json" \
-d "{\"jsonrpc\":\"2.0\",\"method\":\"eth_call\",\"params\":[{\"to\":\"$POOL\",\"data\":\"0x0dfe1681\"},\"latest\"],\"id\":1}" \
| jq -r '.result // .error.message')
echo "$TOKEN0_RESULT" | head -c 50
echo ""
# Test getReserves() for V2 - 0x0902f1ac
echo -n " getReserves(): "
RESERVES_RESULT=$(curl -s -X POST $RPC \
-H "Content-Type: application/json" \
-d "{\"jsonrpc\":\"2.0\",\"method\":\"eth_call\",\"params\":[{\"to\":\"$POOL\",\"data\":\"0x0902f1ac\"},\"latest\"],\"id\":1}" \
| jq -r '.result // .error.message')
if [[ "$RESERVES_RESULT" == "0x"* ]] && [[ ${#RESERVES_RESULT} -gt 66 ]]; then
echo "✅ V2 Pool detected"
else
echo "❌ Not a V2 pool"
fi
echo ""
done

View File

@@ -1,153 +0,0 @@
#!/usr/bin/env bash
# Check wallet balance on Arbitrum One
# Verifies wallet is ready for MEV bot execution
set -euo pipefail
PRIVATE_KEY_FILE="/tmp/wallet_key.txt"
ALCHEMY_RPC="https://arb-mainnet.g.alchemy.com/v2/d6VAHgzkOI3NgLGem6uBMiADT1E9rROB"
MIN_BALANCE_ETH="0.001"
echo "═══════════════════════════════════════════════════════════"
echo "💰 MEV Bot Wallet Balance Check"
echo "═══════════════════════════════════════════════════════════"
echo ""
# Check if private key file exists
if [ ! -f "$PRIVATE_KEY_FILE" ]; then
echo "❌ Error: Private key file not found at $PRIVATE_KEY_FILE"
exit 1
fi
PRIVATE_KEY=$(cat "$PRIVATE_KEY_FILE")
# Derive wallet address
echo "🔍 Deriving wallet address..."
if command -v cast &> /dev/null; then
WALLET_ADDRESS=$(cast wallet address "$PRIVATE_KEY")
elif command -v python3 &> /dev/null; then
WALLET_ADDRESS=$(python3 << 'EOF'
try:
from eth_account import Account
import sys
with open('/tmp/wallet_key.txt', 'r') as f:
private_key = f.read().strip()
if private_key.startswith('0x'):
private_key = private_key[2:]
account = Account.from_key(bytes.fromhex(private_key))
print(account.address)
except Exception as e:
print(f"Error: {e}", file=sys.stderr)
sys.exit(1)
EOF
)
else
echo "❌ Error: Neither cast nor python3 available"
echo " Please install Foundry or Python with eth_account"
exit 1
fi
echo "✅ Wallet Address: $WALLET_ADDRESS"
echo ""
# Query balance from Arbitrum
echo "🌐 Querying Arbitrum One network..."
BALANCE_HEX=$(curl -s -X POST "$ALCHEMY_RPC" \
-H "Content-Type: application/json" \
-d "{\"jsonrpc\":\"2.0\",\"method\":\"eth_getBalance\",\"params\":[\"$WALLET_ADDRESS\",\"latest\"],\"id\":1}" | \
grep -o '"result":"[^"]*"' | \
cut -d'"' -f4)
if [ -z "$BALANCE_HEX" ]; then
echo "❌ Error: Failed to query balance from Arbitrum"
echo " RPC endpoint may be unavailable"
exit 1
fi
# Convert hex to decimal (wei)
# Handle both with and without 0x prefix
if [[ "$BALANCE_HEX" == 0x* ]]; then
BALANCE_WEI=$((BALANCE_HEX))
else
BALANCE_WEI=$((0x$BALANCE_HEX))
fi
# Convert wei to ETH (1 ETH = 10^18 wei)
BALANCE_ETH=$(echo "scale=6; $BALANCE_WEI / 1000000000000000000" | bc)
echo "✅ Balance Retrieved"
echo ""
# Display balance
echo "═══════════════════════════════════════════════════════════"
echo "💰 Current Balance"
echo "═══════════════════════════════════════════════════════════"
echo ""
echo " Address: $WALLET_ADDRESS"
echo " Network: Arbitrum One (Chain ID: 42161)"
echo " Balance: $BALANCE_ETH ETH"
echo " Value: ~\$$(echo "scale=2; $BALANCE_ETH * 2000" | bc) USD (at \$2000/ETH)"
echo ""
# Check if balance meets minimum
BALANCE_CHECK=$(echo "$BALANCE_ETH >= $MIN_BALANCE_ETH" | bc)
if [ "$BALANCE_CHECK" -eq 1 ]; then
echo "✅ Wallet is funded and ready for execution!"
echo ""
# Calculate estimated trades
TRADES=$(echo "scale=0; $BALANCE_ETH / 0.0005" | bc)
echo "📊 Estimated Capacity:"
echo " • Max gas per trade: ~\$1.00 (~0.0005 ETH)"
echo " • Estimated trades: ~$TRADES transactions"
echo " • Recommended refill: When balance < 0.0005 ETH"
echo ""
echo "⏭️ Next Steps:"
echo " 1. Run: ./scripts/setup-keystore.sh"
echo " 2. Enable execution mode in config"
echo " 3. Restart bot to begin trading"
echo ""
# Arbiscan link
echo "🔗 View on Arbiscan:"
echo " https://arbiscan.io/address/$WALLET_ADDRESS"
echo ""
exit 0
else
echo "⚠️ Wallet balance is below minimum threshold"
echo ""
echo "❌ Current: $BALANCE_ETH ETH"
echo "✅ Required: $MIN_BALANCE_ETH ETH"
echo "📉 Deficit: $(echo "scale=6; $MIN_BALANCE_ETH - $BALANCE_ETH" | bc) ETH"
echo ""
echo "💡 Funding Instructions:"
echo ""
echo " **Option 1: Bridge from Ethereum**"
echo " • Visit: https://bridge.arbitrum.io/"
echo " • Bridge 0.001-0.005 ETH from Ethereum to Arbitrum"
echo " • Wait 7-15 minutes for confirmation"
echo ""
echo " **Option 2: Exchange Withdrawal**"
echo " • Go to Coinbase/Binance/Kraken"
echo " • Withdraw → ETH → Select 'Arbitrum One' network"
echo " • Send to: $WALLET_ADDRESS"
echo " • Amount: 0.001-0.005 ETH"
echo ""
echo " **Option 3: From Existing Arbitrum Wallet**"
echo " • Open MetaMask on Arbitrum One network"
echo " • Send 0.001-0.005 ETH to: $WALLET_ADDRESS"
echo ""
echo "🔗 Track transaction:"
echo " https://arbiscan.io/address/$WALLET_ADDRESS"
echo ""
exit 1
fi

View File

@@ -1,75 +0,0 @@
#!/usr/bin/env bash
# Run CI pipeline inside a container (for isolation)
# Usage: ./scripts/ci-container.sh [quick|dev|full]
# Supports: Podman, Docker, and Podman-in-Podman
set -euo pipefail
MODE="${1:-dev}"
case $MODE in
quick)
echo "🐳 Running Quick CI in Container..."
SKIP_FLAGS="-e HARNESS_SKIP_DOCKER=true -e HARNESS_SKIP_MATH_AUDIT=true -e HARNESS_SKIP_SECURITY=true"
;;
dev)
echo "🐳 Running Development CI in Container..."
SKIP_FLAGS="-e HARNESS_SKIP_DOCKER=true"
;;
full)
echo "🐳 Running Full CI in Container (Podman/Docker compatible)..."
SKIP_FLAGS="-e HARNESS_SKIP_DOCKER=true"
;;
*)
echo "Usage: $0 [quick|dev|full]"
echo " quick - Fast validation (30-60s)"
echo " dev - Development pipeline (1-2min)"
echo " full - Complete validation with container support (2-3min)"
exit 1
;;
esac
# Load container runtime detection
source "$(dirname "$0")/container-runtime.sh" init
if [[ -z "$CONTAINER_RUNTIME" ]]; then
echo "❌ Error: No container runtime found (podman or docker required)"
echo "Install with: sudo apt install podman"
exit 1
fi
echo "Using container runtime: $CONTAINER_RUNTIME"
echo ""
# Create cache directories for performance
mkdir -p .gocache .gomodcache
# Get DinD mount flags if inside container
DIND_MOUNTS=""
if [[ "$INSIDE_CONTAINER" == "true" ]]; then
DIND_MOUNTS="$(source "$(dirname "$0")/container-runtime.sh" socket)"
if [[ -n "$DIND_MOUNTS" ]]; then
DIND_MOUNTS="-v $DIND_MOUNTS"
fi
fi
# Run pipeline in container
$CONTAINER_RUNTIME run --rm \
-v "$(pwd)":/workspace \
-v "$(pwd)/.gocache":/root/.cache/go-build \
-v "$(pwd)/.gomodcache":/go/pkg/mod \
$DIND_MOUNTS \
-w /workspace \
$SKIP_FLAGS \
golang:1.25-alpine \
sh -c "
echo 'Installing CI tools...' &&
apk add --no-cache git make bash curl &&
go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest &&
echo 'Running pipeline...' &&
./harness/local-ci-pipeline.sh
"
echo ""
echo "✅ Container CI completed successfully!"
echo "📊 Check reports in: harness/reports/"

View File

@@ -1,17 +0,0 @@
#!/usr/bin/env bash
# Development CI pipeline - good balance of speed and coverage
# Usage: ./scripts/ci-dev.sh
set -euo pipefail
echo "🛠️ Running Development CI Pipeline..."
echo "⏱️ Expected time: 1-2 minutes"
echo ""
HARNESS_SKIP_DOCKER=true \
./harness/local-ci-pipeline.sh
echo ""
echo "✅ Development CI completed successfully!"
echo "📊 Check reports in: harness/reports/"
echo "📋 For full validation, run: ./scripts/ci-full.sh"

View File

@@ -1,16 +0,0 @@
#!/usr/bin/env bash
# Full CI pipeline - complete validation for releases
# Usage: ./scripts/ci-full.sh
set -euo pipefail
echo "🎯 Running Full CI Pipeline..."
echo "⏱️ Expected time: 3-5 minutes"
echo ""
./harness/local-ci-pipeline.sh
echo ""
echo "🎉 Full CI completed successfully!"
echo "📊 Complete reports available in: harness/reports/"
echo "📋 View summary: cat harness/reports/pipeline-report.md"

View File

@@ -1,33 +0,0 @@
#!/usr/bin/env bash
# Pre-commit validation - minimal checks for speed
# Usage: ./scripts/ci-precommit.sh
set -euo pipefail
echo "🔍 Running Pre-commit Validation..."
echo "⏱️ Expected time: 10-30 seconds"
echo ""
# Quick build and test
echo "Building binary..."
make build
echo "Running tests..."
make test
echo "Checking formatting..."
if ! gofmt -l . | grep -q .; then
echo "✅ Code formatting is clean"
else
echo "❌ Code needs formatting:"
gofmt -l .
echo "Run: gofmt -w ."
exit 1
fi
echo "Running static analysis..."
go vet ./...
echo ""
echo "✅ Pre-commit validation passed!"
echo "💡 For more thorough checks, run: ./scripts/ci-quick.sh"

View File

@@ -1,18 +0,0 @@
#!/usr/bin/env bash
# Quick CI validation for development workflow
# Usage: ./scripts/ci-quick.sh
set -euo pipefail
echo "🚀 Running Quick CI Pipeline..."
echo "⏱️ Expected time: 30-60 seconds"
echo ""
HARNESS_SKIP_DOCKER=true \
HARNESS_SKIP_MATH_AUDIT=true \
HARNESS_SKIP_SECURITY=true \
./harness/local-ci-pipeline.sh
echo ""
echo "✅ Quick CI completed successfully!"
echo "📋 For full validation, run: ./scripts/ci-full.sh"

View File

@@ -1,59 +0,0 @@
#!/usr/bin/env bash
# Watch for file changes and run CI automatically
# Usage: ./scripts/ci-watch.sh [quick|precommit]
set -euo pipefail
MODE="${1:-precommit}"
case $MODE in
quick)
CI_SCRIPT="./scripts/ci-quick.sh"
echo "👀 Watching for changes - will run quick CI..."
;;
precommit)
CI_SCRIPT="./scripts/ci-precommit.sh"
echo "👀 Watching for changes - will run pre-commit validation..."
;;
*)
echo "Usage: $0 [quick|precommit]"
echo " precommit - Fast build/test only"
echo " quick - Quick CI pipeline"
exit 1
;;
esac
# Check if inotifywait is available
if ! command -v inotifywait >/dev/null 2>&1; then
echo "❌ inotifywait not found. Install with:"
echo "sudo apt install inotify-tools"
exit 1
fi
echo "Watching: pkg/ internal/ cmd/ *.go"
echo "Press Ctrl+C to stop"
echo ""
# Run initial check
$CI_SCRIPT
echo ""
echo "👀 Watching for changes..."
# Watch for changes and re-run
while inotifywait -q -r -e modify,move,create,delete \
--include='.*\.go$' \
pkg/ internal/ cmd/ . 2>/dev/null; do
echo ""
echo "🔄 Files changed, running $MODE validation..."
echo ""
if $CI_SCRIPT; then
echo ""
echo "✅ Validation passed - watching for changes..."
else
echo ""
echo "❌ Validation failed - fix issues and save files to retry"
fi
done

View File

@@ -1,105 +0,0 @@
#!/bin/bash
echo "Cleaning Pool Blacklist - Removing Invalid Addresses"
echo "===================================================="
echo ""
# Backup the current blacklist
cp logs/pool_blacklist.json logs/pool_blacklist.json.backup.$(date +%Y%m%d_%H%M%S)
echo "✅ Backed up current blacklist"
# Create a temporary file for valid pools
TEMP_FILE="/tmp/valid_pools.json"
echo "[]" > $TEMP_FILE
# RPC endpoint
RPC="https://arb1.arbitrum.io/rpc"
# Get all blacklisted pools
POOLS=$(cat logs/pool_blacklist.json | jq -r '.[] | .address')
TOTAL=0
VALID=0
INVALID=0
echo ""
echo "Checking each pool for contract existence..."
echo ""
for POOL in $POOLS; do
TOTAL=$((TOTAL + 1))
# Check if contract exists
CODE=$(curl -s -X POST $RPC \
-H "Content-Type: application/json" \
-d "{\"jsonrpc\":\"2.0\",\"method\":\"eth_getCode\",\"params\":[\"$POOL\",\"latest\"],\"id\":1}" \
| jq -r '.result')
if [[ "$CODE" != "0x" ]] && [[ -n "$CODE" ]] && [[ ${#CODE} -gt 10 ]]; then
# Contract exists - keep in blacklist
ENTRY=$(cat logs/pool_blacklist.json | jq --arg addr "$POOL" '.[] | select(.address == $addr)')
if [ ! -z "$ENTRY" ]; then
echo "$ENTRY" | jq -s '.' | jq '.[0]' >> $TEMP_FILE.tmp
VALID=$((VALID + 1))
echo "$POOL - Valid contract ($(( ${#CODE} / 2 - 1 )) bytes)"
fi
else
# No contract - remove from blacklist
INVALID=$((INVALID + 1))
echo "$POOL - No contract, removing"
fi
# Show progress
if [ $((TOTAL % 10)) -eq 0 ]; then
echo " Progress: $TOTAL pools checked..."
fi
done
# Combine all valid entries into a proper JSON array
if [ -f $TEMP_FILE.tmp ]; then
cat $TEMP_FILE.tmp | jq -s '.' > $TEMP_FILE
rm $TEMP_FILE.tmp
fi
echo ""
echo "Summary"
echo "======="
echo "Total pools checked: $TOTAL"
echo "Valid contracts: $VALID"
echo "Invalid (removed): $INVALID"
echo ""
# Replace the blacklist with cleaned version
if [ $VALID -gt 0 ]; then
mv $TEMP_FILE logs/pool_blacklist.json
echo "✅ Blacklist updated with $VALID valid pools"
else
echo "⚠️ No valid pools found - keeping original blacklist"
fi
echo ""
echo "Testing a few valid UniswapV3 pools to ensure they work..."
echo ""
# Test known good pools
GOOD_POOLS=(
"0xC31E54c7a869B9FcBEcc14363CF510d1c41fa443" # WETH/USDC.e
"0x641C00A822e8b671738d32a431a4Fb6074E5c79d" # USDT/WETH
)
for POOL in "${GOOD_POOLS[@]}"; do
echo -n "Testing $POOL: "
TOKEN0=$(curl -s -X POST $RPC \
-H "Content-Type: application/json" \
-d "{\"jsonrpc\":\"2.0\",\"method\":\"eth_call\",\"params\":[{\"to\":\"$POOL\",\"data\":\"0x0dfe1681\"},\"latest\"],\"id\":1}" \
| jq -r '.result // "error"')
if [[ "$TOKEN0" == "0x"* ]] && [[ ${#TOKEN0} -eq 66 ]]; then
echo "✅ Works"
else
echo "❌ Failed"
fi
done
echo ""
echo "Cleanup complete!"

View File

@@ -1,153 +0,0 @@
#!/usr/bin/env bash
# Container Runtime Detection & Configuration
# Detects and uses available container runtime: podman-in-podman > podman > docker-in-docker > docker
set -euo pipefail
# Color codes
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
# Export these for use in calling scripts
export CONTAINER_RUNTIME=""
export COMPOSE_CMD=""
export CONTAINER_SOCKET=""
export INSIDE_CONTAINER=""
# Detect if we're inside a container
detect_container_env() {
if [[ -f /.dockerenv ]] || [[ -f /run/.containerenv ]]; then
INSIDE_CONTAINER="true"
else
INSIDE_CONTAINER="false"
fi
}
# Find available container runtime
detect_runtime() {
local runtime_priority=(
"podman"
"docker"
)
for runtime in "${runtime_priority[@]}"; do
if command -v "$runtime" &>/dev/null; then
CONTAINER_RUNTIME="$runtime"
# Get compose command
if command -v "${runtime}-compose" &>/dev/null; then
COMPOSE_CMD="${runtime}-compose"
elif [[ "$runtime" == "docker" ]] && command -v docker-compose &>/dev/null; then
COMPOSE_CMD="docker-compose"
elif [[ "$runtime" == "podman" ]] && command -v podman-compose &>/dev/null; then
COMPOSE_CMD="podman-compose"
elif [[ "$runtime" == "podman" ]]; then
# Fallback: podman has built-in compose
COMPOSE_CMD="podman compose"
else
COMPOSE_CMD="$runtime compose"
fi
return 0
fi
done
return 1
}
# Setup DinD (Docker in Docker) socket
setup_dind_socket() {
local runtime="$1"
case "$runtime" in
podman)
# Podman socket location
if [[ -S "$XDG_RUNTIME_DIR/podman/podman.sock" ]]; then
CONTAINER_SOCKET="$XDG_RUNTIME_DIR/podman/podman.sock"
elif [[ -S "/run/podman/podman.sock" ]]; then
CONTAINER_SOCKET="/run/podman/podman.sock"
elif [[ -S "/run/user/$(id -u)/podman/podman.sock" ]]; then
CONTAINER_SOCKET="/run/user/$(id -u)/podman/podman.sock"
fi
;;
docker)
# Docker socket location
if [[ -S "/var/run/docker.sock" ]]; then
CONTAINER_SOCKET="/var/run/docker.sock"
elif [[ -S "/run/docker.sock" ]]; then
CONTAINER_SOCKET="/run/docker.sock"
fi
;;
esac
}
# Get mount flags for DinD
get_dind_mount_flags() {
local runtime="$1"
if [[ -z "$CONTAINER_SOCKET" ]]; then
return
fi
case "$runtime" in
podman)
echo "-v $CONTAINER_SOCKET:/run/podman/podman.sock"
;;
docker)
echo "-v $CONTAINER_SOCKET:/var/run/docker.sock"
;;
esac
}
# Initialize runtime
init_runtime() {
detect_container_env
if ! detect_runtime; then
echo -e "${RED}❌ Error: No container runtime found (podman or docker required)${NC}" >&2
return 1
fi
setup_dind_socket "$CONTAINER_RUNTIME"
# Export for subshells
export CONTAINER_RUNTIME
export COMPOSE_CMD
export CONTAINER_SOCKET
export INSIDE_CONTAINER
return 0
}
# Display status
show_status() {
echo -e "${BLUE}Container Runtime Detection:${NC}"
echo " Runtime: ${GREEN}$CONTAINER_RUNTIME${NC}"
echo " Compose: ${GREEN}$COMPOSE_CMD${NC}"
echo " Inside Container: ${GREEN}$INSIDE_CONTAINER${NC}"
if [[ -n "$CONTAINER_SOCKET" ]]; then
echo " Socket: ${GREEN}$CONTAINER_SOCKET${NC}"
fi
}
# Main execution
if [[ "${1:-}" == "init" ]]; then
init_runtime
elif [[ "${1:-}" == "status" ]]; then
init_runtime
show_status
elif [[ "${1:-}" == "runtime" ]]; then
init_runtime
echo "$CONTAINER_RUNTIME"
elif [[ "${1:-}" == "compose" ]]; then
init_runtime
echo "$COMPOSE_CMD"
elif [[ "${1:-}" == "socket" ]]; then
init_runtime
echo "$CONTAINER_SOCKET"
else
init_runtime
fi

View File

@@ -1,495 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Universal Go Project Template
# This template can be used to quickly set up a new Go project with standardized structure and tooling
PROJECT_NAME="${1:-}"
if [ -z "$PROJECT_NAME" ]; then
echo "Usage: $0 <project-name>"
echo "Creates a new Go project template with standardized structure and tooling"
exit 1
fi
echo "Creating new Go project: $PROJECT_NAME"
# Create project directory structure
mkdir -p "$PROJECT_NAME"/{cmd,pkg,scripts,docs,tests,examples,tools,config,logs,reports,storage}
# Create main.go in cmd directory
cat > "$PROJECT_NAME/cmd/main.go" << EOF
package main
import (
"fmt"
"log"
)
func main() {
fmt.Println("Hello, $PROJECT_NAME!")
log.Println("Starting $PROJECT_NAME application...")
// Your application logic here
log.Println("Application finished.")
}
EOF
# Create README
cat > "$PROJECT_NAME/README.md" << EOF
# $PROJECT_NAME
A Go project with standardized structure and tooling.
## Project Structure
- \`cmd/\` - Main applications
- \`pkg/\` - Library code
- \`scripts/\` - Build and deployment scripts
- \`tests/\` - Test files (unit, integration, e2e)
- \`docs/\` - Documentation
- \`examples/\` - Example code
- \`tools/\` - Development tools
- \`config/\` - Configuration files
## Getting Started
1. Install dependencies: \`make deps\`
2. Run tests: \`make test\`
3. Build: \`make build\`
4. Run: \`make run\`
## Development Commands
\`\`\`
make build # Build the application
make test # Run tests
make test-coverage # Run tests with coverage
make lint # Lint the code
make fmt # Format the code
make vet # Vet the code
make run # Build and run the application
make dev-setup # Setup development environment
make audit-full # Run comprehensive audit
\`\`\`
## License
MIT
EOF
# Create go.mod
cat > "$PROJECT_NAME/go.mod" << EOF
module $PROJECT_NAME
go 1.24
require (
)
EOF
# Create comprehensive Makefile
cat > "$PROJECT_NAME/Makefile" << 'MAKEFILE'
# Universal Go Project Makefile Template
# Variables
BINARY=$(notdir $(CURDIR))
BINARY_PATH=bin/$(BINARY)
MAIN_FILE=cmd/main.go
# Default target
.PHONY: all
all: build
# Build the application
.PHONY: build
build:
@echo "Building $(BINARY)..."
@mkdir -p bin
@go build -o $(BINARY_PATH) $(MAIN_FILE)
@echo "Build successful!"
# Build with race detection
.PHONY: build-race
build-race:
@echo "Building $(BINARY) with race detection..."
@mkdir -p bin
@go build -race -o $(BINARY_PATH) $(MAIN_FILE)
@echo "Race-build successful!"
# Run the application
.PHONY: run
run: build
@echo "Running $(BINARY)..."
@$(BINARY_PATH)
# Run the application in development mode
.PHONY: run-dev
run-dev:
@echo "Running $(BINARY) in development mode..."
@go run $(MAIN_FILE)
# Multi-level Testing System
# Basic tests (fast)
.PHONY: test-basic
test-basic:
@echo "Running basic tests (fast)..."
@go test -v -short ./...
# Unit tests
.PHONY: test-unit
test-unit:
@echo "Running unit tests..."
@go test -v ./tests/unit/... ./pkg/...
# Integration tests
.PHONY: test-integration
test-integration:
@echo "Running integration tests..."
@go test -v ./tests/integration/...
# End-to-end tests
.PHONY: test-e2e
test-e2e:
@echo "Running end-to-end tests..."
@go test -v ./tests/e2e/...
# Property tests
.PHONY: test-property
test-property:
@echo "Running property tests..."
@go test -v ./tests/property/...
# Fuzzing tests
.PHONY: test-fuzzing
test-fuzzing:
@echo "Running fuzzing tests..."
@go test -v ./tests/fuzzing/...
# Stress tests
.PHONY: test-stress
test-stress:
@echo "Running stress tests..."
@go test -v ./tests/stress/...
# Security tests
.PHONY: test-security
test-security:
@echo "Running security tests..."
@go test -v ./tests/security/...
# Benchmark tests
.PHONY: test-bench
test-bench:
@echo "Running benchmark tests..."
@go test -bench=. -benchmem -run=^$$ ./...
# Comprehensive tests (all test types)
.PHONY: test-comprehensive
test-comprehensive:
@echo "Running comprehensive tests..."
@$(MAKE) test-unit
@$(MAKE) test-integration
@$(MAKE) test-e2e
# Full audit tests (comprehensive + security + stress + benchmarks)
.PHONY: test-audit
test-audit:
@echo "Running full audit tests..."
@$(MAKE) test-comprehensive
@$(MAKE) test-security
@$(MAKE) test-stress
@$(MAKE) test-bench
# Run tests with coverage
.PHONY: test-coverage
test-coverage:
@echo "Running tests with coverage..."
@go test -coverprofile=coverage.out ./...
@go tool cover -html=coverage.out -o coverage.html
@echo "Coverage report generated: coverage.html"
# Run tests with coverage for specific package
.PHONY: test-coverage-pkg
test-coverage-pkg:
@echo "Running tests with coverage for specific package..."
@go test -coverprofile=coverage.out $(PKG) && go tool cover -html=coverage.out -o coverage.html
@echo "Coverage report generated: coverage.html"
# Code Quality Tools
# Format code
.PHONY: fmt
fmt:
@echo "Formatting code..."
@go fmt ./...
# Vet code
.PHONY: vet
vet:
@echo "Vetting code..."
@go vet ./...
# Lint code (requires golangci-lint)
.PHONY: lint
lint:
@echo "Linting code..."
@which golangci-lint > /dev/null || (echo "golangci-lint not found, installing..." && go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest)
@golangci-lint run
# Security audit
.PHONY: audit-security
audit-security:
@echo "Running security audit..."
@which gosec > /dev/null || (echo "gosec not found, installing..." && go install github.com/securego/gosec/v2/cmd/gosec@latest)
@gosec ./...
@which govulncheck > /dev/null || (echo "govulncheck not found, installing..." && go install golang.org/x/vuln/cmd/govulncheck@latest)
@govulncheck ./...
# Dependency audit
.PHONY: audit-deps
audit-deps:
@echo "Running dependency audit..."
@go list -m -u all
@govulncheck ./...
# Code quality audit
.PHONY: audit-quality
audit-quality:
@echo "Running code quality audit..."
@$(MAKE) vet
@$(MAKE) lint
# Comprehensive audit (all checks)
.PHONY: audit-full
audit-full:
@echo "Running comprehensive audit..."
@$(MAKE) audit-quality
@$(MAKE) audit-security
@$(MAKE) audit-deps
@$(MAKE) test-audit
# Development helpers
# Install dependencies
.PHONY: deps
deps:
@echo "Installing dependencies..."
@go mod tidy
# Install development dependencies
.PHONY: dev-deps
dev-deps:
@echo "Installing development dependencies..."
@go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest
@go install github.com/securego/gosec/v2/cmd/gosec@latest
@go install golang.org/x/vuln/cmd/govulncheck@latest
@go install github.com/go-delve/delve/cmd/dlv@latest
@go mod tidy
@echo "Development dependencies installed!"
# Development setup
.PHONY: dev-setup
dev-setup:
@echo "Setting up development environment..."
@$(MAKE) deps
@$(MAKE) dev-deps
@./scripts/setup-dev.sh
@echo "Development environment setup complete!"
# Development workflow (fmt + vet + lint + basic test)
.PHONY: dev-workflow
dev-workflow:
@echo "Running development workflow..."
@$(MAKE) fmt
@$(MAKE) vet
@$(MAKE) lint
@$(MAKE) test-basic
# Development workflow with coverage
.PHONY: dev-workflow-full
dev-workflow-full:
@echo "Running development workflow with coverage..."
@$(MAKE) fmt
@$(MAKE) vet
@$(MAKE) lint
@$(MAKE) test-coverage
# Debug run
.PHONY: debug
debug:
@echo "Running application in debug mode..."
@which dlv > /dev/null || (echo "delve not found, install with: go install github.com/go-delve/delve/cmd/dlv@latest && make dev-deps")
@dlv exec -- $(BINARY_PATH)
# Watch and run tests (requires 'entr' command)
.PHONY: watch-tests
watch-tests:
@echo "Watching for file changes and running tests..."
@echo "Note: Requires 'entr' to be installed. Install with: apt-get install entr (or brew install entr)"
@find . -name "*.go" -not -path "./vendor/*" -not -path "./bin/*" | entr -c $(MAKE) test-basic
# Watch and run dev workflow (requires 'entr' command)
.PHONY: watch-dev
watch-dev:
@echo "Watching for file changes and running dev workflow..."
@echo "Note: Requires 'entr' to be installed. Install with: apt-get install entr (or brew install entr)"
@find . -name "*.go" -not -path "./vendor/*" -not -path "./bin/*" | entr -c $(MAKE) dev-workflow
# Documentation generation
.PHONY: docs
docs:
@echo "Generating code documentation..."
@mkdir -p docs/gen
@go doc -all ./... > docs/gen/code-documentation.txt
@echo "Code documentation generated in docs/gen/code-documentation.txt"
# Clean build artifacts
.PHONY: clean
clean:
@echo "Cleaning..."
@rm -rf bin/
@rm -f coverage.out coverage.html
@rm -rf reports/
@echo "Clean complete!"
# Update dependencies
.PHONY: update
update:
@echo "Updating dependencies..."
@go get -u ./...
@go mod tidy
@echo "Dependencies updated!"
# Help
.PHONY: help
help:
@echo "Available targets:"
@echo ""
@echo "Build & Run:"
@echo " all - Build the application (default)"
@echo " build - Build the application"
@echo " build-race - Build with race detection"
@echo " run - Build and run the application"
@echo " run-dev - Run without building"
@echo ""
@echo "Testing (Multi-Level):"
@echo " test-basic - Run basic tests (fast)"
@echo " test-unit - Run unit tests"
@echo " test-integration - Run integration tests"
@echo " test-e2e - Run end-to-end tests"
@echo " test-property - Run property tests"
@echo " test-fuzzing - Run fuzzing tests"
@echo " test-stress - Run stress tests"
@echo " test-security - Run security tests"
@echo " test-bench - Run benchmark tests"
@echo " test-comprehensive - Run comprehensive tests (all test types)"
@echo " test-audit - Run full audit tests (comprehensive + security + stress)"
@echo " test-coverage - Run tests with coverage report"
@echo " test-coverage-pkg - Run tests with coverage for specific package (use with PKG=package/path)"
@echo ""
@echo "Quality & Auditing:"
@echo " fmt - Format code"
@echo " vet - Vet code"
@echo " lint - Lint code (requires golangci-lint)"
@echo " audit-security - Run security audit"
@echo " audit-deps - Run dependency audit"
@echo " audit-quality - Run code quality audit"
@echo " audit-full - Run comprehensive audit (all checks)"
@echo ""
@echo "Development:"
@echo " dev-setup - Setup development environment"
@echo " dev-deps - Install development dependencies"
@echo " dev-workflow - Run development workflow (fmt + vet + lint + basic test)"
@echo " dev-workflow-full - Run development workflow with coverage"
@echo " debug - Run application in debug mode"
@echo " watch-tests - Watch for changes and run basic tests (requires entr)"
@echo " watch-dev - Watch for changes and run dev workflow (requires entr)"
@echo ""
@echo "Maintenance:"
@echo " clean - Clean build artifacts"
@echo " deps - Install dependencies"
@echo " update - Update dependencies"
@echo " docs - Generate code documentation"
@echo " help - Show this help"
@echo ""
@echo "Examples:"
@echo " make test-coverage PKG=./pkg/my-package/ # Coverage for specific package"
@echo " make watch-dev # Watch for changes and run dev workflow"
</PROMPT>
# Create setup script
cat > "$PROJECT_NAME/scripts/setup-dev.sh" << 'SETUP'
#!/usr/bin/env bash
set -euo pipefail
# Development environment setup script
echo "Setting up development environment for $(basename $(pwd))..."
# Create directories if they don't exist
mkdir -p logs
mkdir -p reports
mkdir -p reports/coverage
mkdir -p reports/test-results
mkdir -p reports/augments
mkdir -p storage
mkdir -p storage/keystore
mkdir -p storage/cache
mkdir -p .gocache
# Check if Go is installed
if ! command -v go &> /dev/null; then
echo "Error: Go is not installed" >&2
exit 1
fi
# Check if required tools are installed
echo "Checking for required tools..."
# Install golangci-lint if not present
if ! command -v golangci-lint &> /dev/null; then
echo "Installing golangci-lint..."
go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest
fi
# Install gosec if not present
if ! command -v gosec &> /dev/null; then
echo "Installing gosec..."
go install github.com/securego/gosec/v2/cmd/gosec@latest
fi
# Install govulncheck if not present
if ! command -v govulncheck &> /dev/null; then
echo "Installing govulncheck..."
go install golang.org/x/vuln/cmd/govulncheck@latest
fi
# Install delve if not present
if ! command -v dlv &> /dev/null; then
echo "Installing delve..."
go install github.com/go-delve/delve/cmd/dlv@latest
fi
echo "Development environment setup complete!"
SETUP
# Make setup script executable
chmod +x "$PROJECT_NAME/scripts/setup-dev.sh"
echo "Project template created successfully in $PROJECT_NAME/"
echo "To get started:"
echo " cd $PROJECT_NAME"
echo " make dev-setup"
echo " make test"
echo " make build"
echo " make run"
EOF
chmod +x "$PROJECT_NAME/scripts/setup-dev.sh"
echo "Project template created successfully in $PROJECT_NAME/"
echo "To get started:"
echo " cd $PROJECT_NAME"
echo " make dev-setup"
echo " make test"
echo " make build"
echo " make run"

View File

@@ -1,29 +0,0 @@
# Demo & Example Scripts
These scripts are for demonstration and testing purposes only. They should not be used in production environments.
## Available Demos
### demo-production-logs.sh
Demonstrates the production log management system capabilities.
**Purpose:** Show how the log-manager.sh system works
**Usage:**
```bash
./scripts/demos/demo-production-logs.sh
```
**What it does:**
- Generates sample log entries
- Runs log analysis
- Shows health checks
- Demonstrates alerting
- Creates performance reports
- Generates operations dashboard
**Note:** This is a demonstration script. For production log management, use `./scripts/log-manager.sh`
---
**See:** `docs/SCRIPT_ANALYSIS_REPORT.md` for more information

View File

@@ -1,106 +0,0 @@
#!/bin/bash
# MEV Bot Production Log Management Demonstration
# Shows comprehensive capabilities of the production log management system
set -euo pipefail
GREEN='\033[0;32m'
BLUE='\033[0;34m'
YELLOW='\033[1;33m'
PURPLE='\033[0;35m'
BOLD='\033[1m'
NC='\033[0m'
echo -e "${BOLD}🚀 MEV Bot Production Log Management System Demo${NC}"
echo -e "${BLUE}================================================${NC}"
echo
# Initialize system
echo -e "${YELLOW}📋 Step 1: Initialize Production Log Management${NC}"
./scripts/log-manager.sh init
echo
# Show current status
echo -e "${YELLOW}📊 Step 2: System Status Overview${NC}"
./scripts/log-manager.sh status
echo
# Run comprehensive analysis
echo -e "${YELLOW}🔍 Step 3: Comprehensive Log Analysis${NC}"
./scripts/log-manager.sh analyze
echo
# Run health checks
echo -e "${YELLOW}🏥 Step 4: System Health Check${NC}"
timeout 10 ./scripts/log-manager.sh health 2>/dev/null || echo "Health check completed"
echo
# Performance monitoring
echo -e "${YELLOW}⚡ Step 5: Performance Monitoring${NC}"
./scripts/log-manager.sh monitor
echo
# Create advanced archive
echo -e "${YELLOW}📦 Step 6: Advanced Archive Creation${NC}"
./scripts/log-manager.sh archive
echo
# Generate operational dashboard
echo -e "${YELLOW}📈 Step 7: Generate Operations Dashboard${NC}"
dashboard_file=$(./scripts/log-manager.sh dashboard | grep "Dashboard generated" | awk '{print $3}' || echo "")
if [[ -f "$dashboard_file" ]]; then
echo -e "${GREEN}✅ Dashboard created: $dashboard_file${NC}"
else
echo -e "${YELLOW}⚠️ Dashboard creation in progress...${NC}"
fi
echo
# Show created files
echo -e "${YELLOW}📁 Step 8: Generated Files Overview${NC}"
echo -e "${BLUE}Analytics:${NC}"
ls -la logs/analytics/ 2>/dev/null | head -5 || echo "No analytics files yet"
echo -e "${BLUE}Health Reports:${NC}"
ls -la logs/health/ 2>/dev/null | head -3 || echo "No health reports yet"
echo -e "${BLUE}Archives:${NC}"
ls -la logs/archives/ 2>/dev/null | head -3 || echo "No archives yet"
echo
echo -e "${YELLOW}🔧 Step 9: Available Commands${NC}"
cat << 'EOF'
Production Log Manager Commands:
├── ./scripts/log-manager.sh analyze # Real-time log analysis
├── ./scripts/log-manager.sh health # Corruption detection
├── ./scripts/log-manager.sh monitor # Performance tracking
├── ./scripts/log-manager.sh archive # Advanced archiving
├── ./scripts/log-manager.sh start-daemon # Background monitoring
├── ./scripts/log-manager.sh dashboard # Operations dashboard
└── ./scripts/log-manager.sh full # Complete cycle
Real-time Monitoring:
./scripts/log-manager.sh start-daemon # Start background monitoring
./scripts/log-manager.sh stop-daemon # Stop background monitoring
Configuration:
config/log-manager.conf # Customize behavior
EOF
echo
echo -e "${GREEN}✅ Production Log Management System Demonstration Complete${NC}"
echo -e "${BLUE}The system provides:${NC}"
echo "• Real-time log analysis with health scoring"
echo "• Automated corruption detection and alerting"
echo "• Performance monitoring with trending"
echo "• Advanced archiving with metadata"
echo "• Operational dashboards with live metrics"
echo "• Background daemon for continuous monitoring"
echo "• Multi-channel alerting (email, Slack)"
echo "• Intelligent cleanup with retention policies"
echo
echo -e "${PURPLE}🎯 Next Steps:${NC}"
echo "1. Configure alerts in config/log-manager.conf"
echo "2. Start daemon: ./scripts/log-manager.sh start-daemon"
echo "3. View dashboard: open \$(./scripts/log-manager.sh dashboard | tail -1)"
echo "4. Monitor status: ./scripts/log-manager.sh status"

View File

@@ -1,60 +0,0 @@
#!/bin/bash
# This script checks for vulnerabilities in project dependencies
set -e
echo "Starting dependency vulnerability scan..."
# Initialize exit code
exit_code=0
# Run govulncheck
echo "Running govulncheck..."
if command -v govulncheck >/dev/null 2>&1; then
if ! govulncheck ./...; then
echo "❌ govulncheck found vulnerabilities"
exit_code=1
else
echo "✅ govulncheck found no vulnerabilities"
fi
else
echo "⚠️ govulncheck not installed, skipping"
fi
# Run nancy (for Sonatype Nexus IQ)
echo "Running nancy scan..."
if command -v nancy >/dev/null 2>&1; then
if ! go list -json -m all | nancy --skip-update-check; then
echo "❌ nancy found vulnerable dependencies"
exit_code=1
else
echo "✅ nancy found no vulnerabilities"
fi
else
echo "⚠️ nancy not installed, skipping"
fi
# Check for deprecated packages
echo "Checking for deprecated packages..."
if go list -json -m all | grep -i deprecated; then
echo "⚠️ Found deprecated packages in dependencies"
else
echo "✅ No deprecated packages found"
fi
# Check for unmaintained packages (packages without recent updates)
echo "Checking for potentially unmaintained packages..."
# This is a basic check - in a real scenario, you might want to check
# the age of the latest commits for each dependency
go list -m -u all || echo "Dependency update check completed"
echo "Dependency vulnerability scan completed."
if [ $exit_code -ne 0 ]; then
echo "❌ Dependency vulnerability scan found issues"
exit $exit_code
else
echo "✅ Dependency vulnerability scan passed"
exit 0
fi

View File

@@ -1,371 +0,0 @@
#!/bin/bash
# Arbitrage Contract Deployment Script for Arbitrum
# This script deploys the necessary smart contracts for MEV arbitrage execution
set -euo pipefail
# Configuration
NETWORK="arbitrum"
GAS_PRICE="200000000" # 0.2 gwei for Arbitrum
GAS_LIMIT="5000000" # 5M gas for deployment
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[0;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Logging functions
log_info() {
echo -e "${BLUE}[INFO]${NC} $1"
}
log_success() {
echo -e "${GREEN}[SUCCESS]${NC} $1"
}
log_warning() {
echo -e "${YELLOW}[WARNING]${NC} $1"
}
log_error() {
echo -e "${RED}[ERROR]${NC} $1"
}
# Check prerequisites
check_prerequisites() {
log_info "Checking prerequisites..."
# Check if we have required environment variables
if [[ -z "${ARBITRUM_RPC_ENDPOINT:-}" ]]; then
log_error "ARBITRUM_RPC_ENDPOINT not set"
exit 1
fi
if [[ -z "${MEV_BOT_ENCRYPTION_KEY:-}" ]]; then
log_error "MEV_BOT_ENCRYPTION_KEY not set"
exit 1
fi
# Check if we have a private key for deployment
if [[ -z "${DEPLOYER_PRIVATE_KEY:-}" ]]; then
log_warning "DEPLOYER_PRIVATE_KEY not set - using test key for simulation"
export DEPLOYER_PRIVATE_KEY="0x0000000000000000000000000000000000000000000000000000000000000001"
fi
log_success "Prerequisites checked"
}
# Create contract templates
create_contract_templates() {
log_info "Creating smart contract templates..."
mkdir -p contracts
# Create ArbitrageExecutor contract
cat > contracts/ArbitrageExecutor.sol << 'EOF'
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.19;
import "@openzeppelin/contracts/access/Ownable.sol";
import "@openzeppelin/contracts/security/ReentrancyGuard.sol";
/**
* @title ArbitrageExecutor
* @dev Executes arbitrage opportunities across DEXs on Arbitrum
*/
contract ArbitrageExecutor is Ownable, ReentrancyGuard {
struct ArbitrageParams {
address tokenIn;
address tokenOut;
uint256 amountIn;
uint256 minAmountOut;
address[] exchanges;
bytes[] swapData;
uint256 deadline;
}
event ArbitrageExecuted(
address indexed tokenIn,
address indexed tokenOut,
uint256 amountIn,
uint256 amountOut,
uint256 profit
);
// Minimum profit threshold (in wei)
uint256 public minProfitThreshold = 5e15; // 0.005 ETH
// Gas limit for external calls
uint256 public gasLimit = 300000;
constructor() {}
/**
* @dev Execute arbitrage opportunity
*/
function executeArbitrage(ArbitrageParams calldata params)
external
nonReentrant
returns (uint256 profit)
{
require(block.timestamp <= params.deadline, "Deadline exceeded");
require(params.exchanges.length >= 2, "Need at least 2 exchanges");
// Simulate arbitrage execution
// In production, this would perform actual swaps across DEXs
uint256 balanceBefore = address(this).balance;
// TODO: Implement actual arbitrage logic
// 1. Flash loan from Aave/Balancer
// 2. Swap on first exchange
// 3. Swap on second exchange
// 4. Repay flash loan
// 5. Keep profit
uint256 balanceAfter = address(this).balance;
profit = balanceAfter > balanceBefore ? balanceAfter - balanceBefore : 0;
require(profit >= minProfitThreshold, "Profit below threshold");
emit ArbitrageExecuted(
params.tokenIn,
params.tokenOut,
params.amountIn,
params.minAmountOut,
profit
);
}
/**
* @dev Update minimum profit threshold
*/
function setMinProfitThreshold(uint256 _threshold) external onlyOwner {
minProfitThreshold = _threshold;
}
/**
* @dev Emergency withdraw
*/
function emergencyWithdraw() external onlyOwner {
payable(owner()).transfer(address(this).balance);
}
receive() external payable {}
}
EOF
# Create FlashSwapper contract
cat > contracts/FlashSwapper.sol << 'EOF'
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.19;
import "@openzeppelin/contracts/access/Ownable.sol";
import "@openzeppelin/contracts/security/ReentrancyGuard.sol";
/**
* @title FlashSwapper
* @dev Handles flash loans for arbitrage execution
*/
contract FlashSwapper is Ownable, ReentrancyGuard {
struct FlashSwapParams {
address asset;
uint256 amount;
bytes data;
}
event FlashSwapExecuted(
address indexed asset,
uint256 amount,
uint256 fee,
bool success
);
constructor() {}
/**
* @dev Execute flash swap for arbitrage
*/
function executeFlashSwap(FlashSwapParams calldata params)
external
nonReentrant
returns (bool success)
{
// TODO: Implement Balancer/Aave flash loan integration
// For now, simulate successful execution
emit FlashSwapExecuted(
params.asset,
params.amount,
0, // Fee would be calculated based on protocol
true
);
return true;
}
/**
* @dev Emergency functions
*/
function emergencyWithdraw() external onlyOwner {
payable(owner()).transfer(address(this).balance);
}
receive() external payable {}
}
EOF
log_success "Contract templates created"
}
# Simulate deployment
simulate_deployment() {
log_info "Simulating contract deployment on Arbitrum..."
# Calculate deployment costs
DEPLOYMENT_GAS=3000000
GAS_PRICE_GWEI=$(echo "scale=2; $GAS_PRICE / 1000000000" | bc -l)
DEPLOYMENT_COST_ETH=$(echo "scale=6; $DEPLOYMENT_GAS * $GAS_PRICE / 1000000000000000000" | bc -l)
DEPLOYMENT_COST_USD=$(echo "scale=2; $DEPLOYMENT_COST_ETH * 2000" | bc -l) # Assume $2000 ETH
log_info "Deployment estimates:"
echo " Gas price: ${GAS_PRICE_GWEI} gwei"
echo " Gas limit: ${DEPLOYMENT_GAS}"
echo " Cost: ~${DEPLOYMENT_COST_ETH} ETH (~\$${DEPLOYMENT_COST_USD})"
# Simulate contract addresses (deterministic for testing)
ARBITRAGE_ADDR="0x$(echo -n "arbitrage_executor_$(date +%s)" | sha256sum | cut -c1-40)"
FLASHSWAP_ADDR="0x$(echo -n "flash_swapper_$(date +%s)" | sha256sum | cut -c1-40)"
log_success "Simulated deployment successful:"
echo " ArbitrageExecutor: $ARBITRAGE_ADDR"
echo " FlashSwapper: $FLASHSWAP_ADDR"
# Save addresses to config
cat > contracts/addresses.json << EOF
{
"network": "$NETWORK",
"deployment_block": $(date +%s),
"contracts": {
"ArbitrageExecutor": "$ARBITRAGE_ADDR",
"FlashSwapper": "$FLASHSWAP_ADDR"
},
"deployment_cost": {
"gas_used": $DEPLOYMENT_GAS,
"gas_price_gwei": $GAS_PRICE_GWEI,
"cost_eth": "$DEPLOYMENT_COST_ETH",
"cost_usd": "$DEPLOYMENT_COST_USD"
}
}
EOF
log_success "Contract addresses saved to contracts/addresses.json"
}
# Update configuration with contract addresses
update_config() {
log_info "Updating MEV bot configuration..."
# Read addresses
ARBITRAGE_ADDR=$(cat contracts/addresses.json | grep -A1 "ArbitrageExecutor" | tail -1 | cut -d'"' -f4)
FLASHSWAP_ADDR=$(cat contracts/addresses.json | grep -A1 "FlashSwapper" | tail -1 | cut -d'"' -f4)
# Create/update config file
cat > config/contracts.yaml << EOF
# Smart Contract Configuration for MEV Bot
contracts:
arbitrage_executor: "$ARBITRAGE_ADDR"
flash_swapper: "$FLASHSWAP_ADDR"
# Uniswap V3 addresses on Arbitrum
uniswap_v3_factory: "0x1F98431c8aD98523631AE4a59f267346ea31F984"
uniswap_v3_router: "0xE592427A0AEce92De3Edee1F18E0157C05861564"
# SushiSwap addresses
sushiswap_factory: "0xc35DADB65012eC5796536bD9864eD8773aBc74C4"
sushiswap_router: "0x1b02dA8Cb0d097eB8D57A175b88c7D8b47997506"
deployment:
network: "arbitrum"
chain_id: 42161
gas_price: "$GAS_PRICE"
gas_limit: "$GAS_LIMIT"
deployed_at: "$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
verification:
# Commands to verify contracts on Arbiscan
arbitrage_executor: "npx hardhat verify --network arbitrum $ARBITRAGE_ADDR"
flash_swapper: "npx hardhat verify --network arbitrum $FLASHSWAP_ADDR"
EOF
log_success "Configuration updated with contract addresses"
}
# Verify deployment
verify_deployment() {
log_info "Verifying deployment..."
# Check if addresses are valid
ARBITRAGE_ADDR=$(cat contracts/addresses.json | grep -A1 "ArbitrageExecutor" | tail -1 | cut -d'"' -f4)
if [[ ${#ARBITRAGE_ADDR} -eq 42 ]] && [[ $ARBITRAGE_ADDR == 0x* ]]; then
log_success "Contract addresses are valid"
else
log_error "Invalid contract addresses generated"
exit 1
fi
# Test RPC connection
if curl -s -X POST "$ARBITRUM_RPC_ENDPOINT" \
-H "Content-Type: application/json" \
-d '{"jsonrpc":"2.0","method":"eth_chainId","params":[],"id":1}' \
| grep -q "0xa4b1"; then
log_success "RPC connection verified (Arbitrum mainnet)"
else
log_warning "Could not verify RPC connection"
fi
log_success "Deployment verification complete"
}
# Main execution
main() {
echo "======================================"
echo "🚀 MEV Bot Contract Deployment"
echo "======================================"
echo
check_prerequisites
create_contract_templates
simulate_deployment
update_config
verify_deployment
echo
echo "======================================"
log_success "Deployment Complete!"
echo "======================================"
echo
echo "Next steps:"
echo "1. Fund deployer wallet with ETH for gas"
echo "2. Run actual deployment: npm run deploy:arbitrum"
echo "3. Verify contracts on Arbiscan"
echo "4. Update MEV bot configuration"
echo "5. Test arbitrage execution with small amounts"
echo
echo "Files created:"
echo " - contracts/ArbitrageExecutor.sol"
echo " - contracts/FlashSwapper.sol"
echo " - contracts/addresses.json"
echo " - config/contracts.yaml"
echo
}
# Run main function
main "$@"

View File

@@ -1,387 +0,0 @@
#!/usr/bin/env bash
# MEV Bot Smart Contract Deployment Script
# Deploys ArbitrageExecutor and FlashLoanReceiver contracts to Arbitrum
set -euo pipefail # Exit on error, undefined vars, pipe failures
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Configuration
NETWORK="${NETWORK:-arbitrum}"
VERIFY="${VERIFY:-false}"
DEPLOYMENT_LOG="logs/deployment_$(date +%Y%m%d_%H%M%S).log"
# Contract source directory (Mev-Alpha project)
CONTRACTS_DIR="${CONTRACTS_DIR:-/home/administrator/projects/Mev-Alpha}"
# Create logs directory
mkdir -p logs deployments
echo -e "${BLUE}================================${NC}"
echo -e "${BLUE}MEV Bot Contract Deployment${NC}"
echo -e "${BLUE}================================${NC}"
echo ""
# Function to print colored messages
log_info() {
echo -e "${BLUE}[INFO]${NC} $1"
echo "[INFO] $1" >> "$DEPLOYMENT_LOG"
}
log_success() {
echo -e "${GREEN}[SUCCESS]${NC} $1"
echo "[SUCCESS] $1" >> "$DEPLOYMENT_LOG"
}
log_warning() {
echo -e "${YELLOW}[WARNING]${NC} $1"
echo "[WARNING] $1" >> "$DEPLOYMENT_LOG"
}
log_error() {
echo -e "${RED}[ERROR]${NC} $1"
echo "[ERROR] $1" >> "$DEPLOYMENT_LOG"
}
# Validate environment variables
validate_env() {
log_info "Validating environment variables..."
local missing_vars=()
if [ -z "$PRIVATE_KEY" ] && [ -z "$DEPLOYER_PRIVATE_KEY" ]; then
missing_vars+=("PRIVATE_KEY or DEPLOYER_PRIVATE_KEY")
fi
if [ -z "$ARBITRUM_RPC_ENDPOINT" ]; then
missing_vars+=("ARBITRUM_RPC_ENDPOINT")
fi
if [ ${#missing_vars[@]} -ne 0 ]; then
log_error "Missing required environment variables:"
for var in "${missing_vars[@]}"; do
log_error " - $var"
done
echo ""
log_info "Please set these variables:"
echo " export PRIVATE_KEY=<your_deployer_private_key>"
echo " export ARBITRUM_RPC_ENDPOINT=<your_rpc_endpoint>"
echo ""
log_info "Optional variables:"
echo " export ARBISCAN_API_KEY=<your_arbiscan_key> # For contract verification"
echo " export VERIFY=true # Enable contract verification"
exit 1
fi
log_success "Environment variables validated"
}
# Set deployer key
set_deployer_key() {
if [ -n "$DEPLOYER_PRIVATE_KEY" ]; then
export PRIVATE_KEY="$DEPLOYER_PRIVATE_KEY"
fi
}
# Check Foundry installation
check_foundry() {
log_info "Checking Foundry installation..."
if ! command -v forge &> /dev/null; then
log_error "Foundry (forge) is not installed"
log_info "Install Foundry: curl -L https://foundry.paradigm.xyz | bash"
log_info "Then run: foundryup"
exit 1
fi
log_success "Foundry is installed: $(forge --version | head -1)"
}
# Install dependencies
install_dependencies() {
log_info "Installing contract dependencies..."
if [ ! -d "lib/openzeppelin-contracts" ]; then
log_info "Installing OpenZeppelin contracts..."
forge install OpenZeppelin/openzeppelin-contracts --no-commit
else
log_info "OpenZeppelin contracts already installed"
fi
log_success "Dependencies installed"
}
# Compile contracts
compile_contracts() {
log_info "Compiling contracts from $CONTRACTS_DIR..."
# Change to contracts directory
cd "$CONTRACTS_DIR" || {
log_error "Failed to change to contracts directory: $CONTRACTS_DIR"
exit 1
}
log_info "Working directory: $(pwd)"
# Compile contracts
forge build 2>&1 | tee -a "$DEPLOYMENT_LOG"
if [ $? -eq 0 ]; then
log_success "Contracts compiled successfully"
else
log_error "Contract compilation failed"
exit 1
fi
# Return to original directory
cd - > /dev/null
}
# Deploy ArbitrageExecutor contract
deploy_arbitrage_executor() {
log_info "Deploying ArbitrageExecutor contract..."
cd "$CONTRACTS_DIR" || exit 1
local contract_path="src/core/ArbitrageExecutor.sol:ArbitrageExecutor"
# Build forge command
local deploy_cmd="forge create \"$contract_path\" \
--rpc-url \"$ARBITRUM_RPC_ENDPOINT\" \
--private-key \"$PRIVATE_KEY\""
# Add verification if requested
if [ "$VERIFY" = "true" ] && [ -n "$ARBISCAN_API_KEY" ]; then
deploy_cmd="$deploy_cmd --verify --etherscan-api-key \"$ARBISCAN_API_KEY\""
fi
# Execute deployment
log_info "Executing deployment command..."
output=$(eval "$deploy_cmd" 2>&1)
echo "$output" >> "$(pwd)/../../mev-beta/$DEPLOYMENT_LOG"
# Extract deployed address
arbitrage_executor_address=$(echo "$output" | grep "Deployed to:" | awk '{print $3}')
cd - > /dev/null
if [ -z "$arbitrage_executor_address" ]; then
log_error "Failed to deploy ArbitrageExecutor"
echo "$output"
exit 1
fi
log_success "ArbitrageExecutor deployed to: $arbitrage_executor_address"
# Save to deployment file
cat > "deployments/arbitrage_executor_${NETWORK}.json" <<EOF
{
"network": "$NETWORK",
"contract": "ArbitrageExecutor",
"address": "$arbitrage_executor_address",
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
"deployer": "$(cast wallet address --private-key $PRIVATE_KEY 2>/dev/null || echo 'N/A')",
"verified": $VERIFY
}
EOF
echo "$arbitrage_executor_address"
}
# Deploy BaseFlashSwapper contract
deploy_base_flash_swapper() {
log_info "Deploying BaseFlashSwapper contract..."
cd "$CONTRACTS_DIR" || exit 1
local contract_path="src/core/BaseFlashSwapper.sol:BaseFlashSwapper"
# Build forge command
local deploy_cmd="forge create \"$contract_path\" \
--rpc-url \"$ARBITRUM_RPC_ENDPOINT\" \
--private-key \"$PRIVATE_KEY\""
# Add verification if requested
if [ "$VERIFY" = "true" ] && [ -n "$ARBISCAN_API_KEY" ]; then
deploy_cmd="$deploy_cmd --verify --etherscan-api-key \"$ARBISCAN_API_KEY\""
fi
# Execute deployment
log_info "Executing deployment command..."
output=$(eval "$deploy_cmd" 2>&1)
echo "$output" >> "$(pwd)/../../mev-beta/$DEPLOYMENT_LOG"
# Extract deployed address
flash_swapper_address=$(echo "$output" | grep "Deployed to:" | awk '{print $3}')
cd - > /dev/null
if [ -z "$flash_swapper_address" ]; then
log_error "Failed to deploy BaseFlashSwapper"
echo "$output"
exit 1
fi
log_success "BaseFlashSwapper deployed to: $flash_swapper_address"
# Save to deployment file
cat > "deployments/base_flash_swapper_${NETWORK}.json" <<EOF
{
"network": "$NETWORK",
"contract": "BaseFlashSwapper",
"address": "$flash_swapper_address",
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
"deployer": "$(cast wallet address --private-key $PRIVATE_KEY 2>/dev/null || echo 'N/A')",
"verified": $VERIFY
}
EOF
echo "$flash_swapper_address"
}
# Update configuration files
update_config() {
local arbitrage_executor=$1
local flash_swapper=$2
log_info "Updating configuration files..."
# Update .env.production
if [ -f ".env.production" ]; then
# Backup existing file
cp .env.production .env.production.bak
# Update contract addresses
sed -i "s|CONTRACT_ARBITRAGE_EXECUTOR=.*|CONTRACT_ARBITRAGE_EXECUTOR=\"$arbitrage_executor\"|" .env.production
sed -i "s|CONTRACT_FLASH_SWAPPER=.*|CONTRACT_FLASH_SWAPPER=\"$flash_swapper\"|" .env.production
log_success "Updated .env.production"
else
log_warning ".env.production not found, skipping update"
fi
# Update config/arbitrum_production.yaml
if [ -f "config/arbitrum_production.yaml" ]; then
# Backup existing file
cp config/arbitrum_production.yaml config/arbitrum_production.yaml.bak
# Update contract addresses in YAML
sed -i "s|arbitrage_contract_address:.*|arbitrage_contract_address: \"$arbitrage_executor\"|" config/arbitrum_production.yaml
sed -i "s|flash_swap_contract_address:.*|flash_swap_contract_address: \"$flash_swapper\"|" config/arbitrum_production.yaml
log_success "Updated config/arbitrum_production.yaml"
else
log_warning "config/arbitrum_production.yaml not found, skipping update"
fi
# Create master deployment file
cat > "deployments/deployment_${NETWORK}_$(date +%Y%m%d_%H%M%S).json" <<EOF
{
"network": "$NETWORK",
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
"contracts": {
"ArbitrageExecutor": {
"address": "$arbitrage_executor",
"verified": $VERIFY
},
"BaseFlashSwapper": {
"address": "$flash_swapper",
"verified": $VERIFY
}
},
"deployer": "$(cast wallet address --private-key $PRIVATE_KEY 2>/dev/null || echo 'N/A')",
"rpc_endpoint": "$ARBITRUM_RPC_ENDPOINT"
}
EOF
log_success "Created deployment record"
}
# Generate deployment summary
generate_summary() {
local arbitrage_executor=$1
local flash_swapper=$2
echo ""
echo -e "${GREEN}================================${NC}"
echo -e "${GREEN}Deployment Complete!${NC}"
echo -e "${GREEN}================================${NC}"
echo ""
echo -e "${BLUE}Network:${NC} $NETWORK"
echo -e "${BLUE}Timestamp:${NC} $(date -u +%Y-%m-%dT%H:%M:%SZ)"
echo -e "${BLUE}Contracts Source:${NC} $CONTRACTS_DIR"
echo ""
echo -e "${BLUE}Deployed Contracts:${NC}"
echo -e " ${GREEN}ArbitrageExecutor:${NC} $arbitrage_executor"
echo -e " ${GREEN}BaseFlashSwapper:${NC} $flash_swapper"
echo ""
if [ "$VERIFY" = "true" ]; then
echo -e "${GREEN}✓ Contracts verified on Arbiscan${NC}"
echo -e " View at: https://arbiscan.io/address/$arbitrage_executor"
echo -e " View at: https://arbiscan.io/address/$flash_swapper"
echo ""
fi
echo -e "${BLUE}Configuration Updated:${NC}"
echo -e " ${GREEN}${NC} .env.production"
echo -e " ${GREEN}${NC} config/arbitrum_production.yaml"
echo ""
echo -e "${BLUE}Next Steps:${NC}"
echo -e " 1. ${YELLOW}Verify contracts on Arbiscan (if not done)${NC}"
echo -e " 2. ${YELLOW}Test contracts with testnet funds${NC}"
echo -e " 3. ${YELLOW}Update MEV bot configuration${NC}"
echo -e " 4. ${YELLOW}Run: ./scripts/run.sh${NC}"
echo ""
echo -e "${BLUE}Deployment log:${NC} $DEPLOYMENT_LOG"
echo ""
}
# Main deployment flow
main() {
log_info "Starting deployment process..."
log_info "Target network: $NETWORK"
log_info "Verification: $VERIFY"
echo ""
# Validate and prepare
validate_env
set_deployer_key
check_foundry
install_dependencies
compile_contracts
echo ""
log_info "Ready to deploy contracts"
read -p "Continue with deployment? (y/N): " confirm
if [ "$confirm" != "y" ] && [ "$confirm" != "Y" ]; then
log_warning "Deployment cancelled by user"
exit 0
fi
echo ""
# Deploy contracts
arbitrage_executor=$(deploy_arbitrage_executor)
echo ""
flash_swapper=$(deploy_base_flash_swapper)
echo ""
# Update configuration
update_config "$arbitrage_executor" "$flash_swapper"
# Generate summary
generate_summary "$arbitrage_executor" "$flash_swapper"
log_success "Deployment complete!"
}
# Run main function
main "$@"

View File

@@ -1,96 +0,0 @@
#!/bin/bash
# Deploy FlashLoanReceiverSecure to Arbitrum Fork
# This script deploys and tests the flash loan contract on a local fork
set -e
echo "═══════════════════════════════════════════════════════════════════"
echo " FlashLoanReceiverSecure - Fork Deployment"
echo "═══════════════════════════════════════════════════════════════════"
echo ""
# Colors
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
RED='\033[0;31m'
NC='\033[0m' # No Color
# Balancer Vault address on Arbitrum
BALANCER_VAULT="0xBA12222222228d8Ba445958a75a0704d566BF2C8"
# Default deployer (Anvil account #0)
DEPLOYER="0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266"
echo -e "${YELLOW}📋 Configuration:${NC}"
echo " Balancer Vault: $BALANCER_VAULT"
echo " Deployer: $DEPLOYER"
echo ""
# Check if Arbitrum RPC endpoint is set
if [ -z "$ARBITRUM_RPC_URL" ]; then
echo -e "${RED}❌ Error: ARBITRUM_RPC_URL not set${NC}"
echo " Please set: export ARBITRUM_RPC_URL='https://arb1.arbitrum.io/rpc'"
exit 1
fi
echo -e "${YELLOW}🔄 Starting Arbitrum fork...${NC}"
echo " RPC: $ARBITRUM_RPC_URL"
echo ""
# Start anvil fork in background
ANVIL_LOG="/tmp/anvil-fork.log"
anvil --fork-url "$ARBITRUM_RPC_URL" --port 8545 --chain-id 42161 > "$ANVIL_LOG" 2>&1 &
ANVIL_PID=$!
# Wait for anvil to start
sleep 3
# Check if anvil started successfully
if ! kill -0 $ANVIL_PID 2>/dev/null; then
echo -e "${RED}❌ Failed to start Anvil fork${NC}"
cat "$ANVIL_LOG"
exit 1
fi
echo -e "${GREEN}✅ Anvil fork started (PID: $ANVIL_PID)${NC}"
echo " Listening on: http://localhost:8545"
echo " Chain ID: 42161 (Arbitrum)"
echo ""
# Cleanup function
cleanup() {
echo ""
echo -e "${YELLOW}🧹 Cleaning up...${NC}"
if kill -0 $ANVIL_PID 2>/dev/null; then
kill $ANVIL_PID
echo -e "${GREEN}✅ Anvil fork stopped${NC}"
fi
}
trap cleanup EXIT
echo -e "${YELLOW}🚀 Deploying FlashLoanReceiverSecure...${NC}"
echo ""
# Deploy contract using forge script
forge script scripts/DeployFlashLoanSecure.s.sol:DeployFlashLoanSecure \
--rpc-url http://localhost:8545 \
--private-key 0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80 \
--broadcast \
-vvv
echo ""
echo -e "${GREEN}✅ Deployment complete!${NC}"
echo ""
echo -e "${YELLOW}📝 Next steps:${NC}"
echo " 1. Note the deployed contract address above"
echo " 2. Test flash loan execution"
echo " 3. Verify slippage protection works"
echo " 4. Test with real arbitrage paths"
echo ""
echo -e "${YELLOW}💡 Tip:${NC} Fork will keep running. Press Ctrl+C to stop."
echo ""
echo "═══════════════════════════════════════════════════════════════════"
# Keep script running
wait $ANVIL_PID

View File

@@ -1,125 +0,0 @@
#!/bin/bash
# Production deployment script for multi-DEX MEV bot
# This script builds, validates, and deploys the multi-DEX enabled bot
set -e
echo "========================================="
echo "MEV Bot Multi-DEX Production Deployment"
echo "========================================="
echo ""
# Colors for output
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
RED='\033[0;31m'
NC='\033[0m' # No Color
# Step 1: Environment validation
echo -e "${YELLOW}[1/8] Validating environment...${NC}"
if [ -z "$ARBITRUM_RPC_ENDPOINT" ]; then
echo -e "${RED}ERROR: ARBITRUM_RPC_ENDPOINT not set${NC}"
exit 1
fi
if [ -z "$ARBITRUM_WS_ENDPOINT" ]; then
echo -e "${RED}ERROR: ARBITRUM_WS_ENDPOINT not set${NC}"
exit 1
fi
echo -e "${GREEN}✓ Environment variables validated${NC}"
echo ""
# Step 2: Clean build
echo -e "${YELLOW}[2/8] Cleaning previous builds...${NC}"
rm -f bin/mev-bot
rm -f mev-bot
echo -e "${GREEN}✓ Clean complete${NC}"
echo ""
# Step 3: Build DEX package
echo -e "${YELLOW}[3/8] Building multi-DEX package...${NC}"
go build ./pkg/dex/...
if [ $? -ne 0 ]; then
echo -e "${RED}ERROR: DEX package build failed${NC}"
exit 1
fi
echo -e "${GREEN}✓ DEX package built successfully${NC}"
echo ""
# Step 4: Build main bot
echo -e "${YELLOW}[4/8] Building MEV bot with multi-DEX support...${NC}"
go build -o bin/mev-bot ./cmd/mev-bot
if [ $? -ne 0 ]; then
echo -e "${RED}ERROR: MEV bot build failed${NC}"
exit 1
fi
echo -e "${GREEN}✓ MEV bot built successfully${NC}"
echo ""
# Step 5: Verify binary
echo -e "${YELLOW}[5/8] Verifying binary...${NC}"
if [ ! -f "bin/mev-bot" ]; then
echo -e "${RED}ERROR: Binary not found at bin/mev-bot${NC}"
exit 1
fi
BINARY_SIZE=$(stat -f%z bin/mev-bot 2>/dev/null || stat -c%s bin/mev-bot)
echo -e "${GREEN}✓ Binary verified (Size: $((BINARY_SIZE / 1024 / 1024))MB)${NC}"
echo ""
# Step 6: Pre-deployment check
echo -e "${YELLOW}[6/8] Running pre-deployment checks...${NC}"
echo "Checking DEX decoders..."
# TODO: Add actual test when tests are written
echo -e "${GREEN}✓ Pre-deployment checks passed${NC}"
echo ""
# Step 7: Create backup
echo -e "${YELLOW}[7/8] Creating backup...${NC}"
BACKUP_DIR="backups/pre_multi_dex_$(date +%Y%m%d_%H%M%S)"
mkdir -p "$BACKUP_DIR"
if [ -f "mev-bot" ]; then
cp mev-bot "$BACKUP_DIR/"
echo -e "${GREEN}✓ Backup created at $BACKUP_DIR${NC}"
else
echo "No existing binary to backup"
fi
echo ""
# Step 8: Deploy
echo -e "${YELLOW}[8/8] Deploying new binary...${NC}"
cp bin/mev-bot ./mev-bot
chmod +x ./mev-bot
echo -e "${GREEN}✓ Binary deployed to ./mev-bot${NC}"
echo ""
# Display deployment summary
echo "========================================="
echo " DEPLOYMENT SUMMARY"
echo "========================================="
echo ""
echo "Binary Location: ./mev-bot"
echo "Binary Size: $((BINARY_SIZE / 1024 / 1024))MB"
echo "Backup Location: $BACKUP_DIR"
echo ""
echo "Active DEXes:"
echo " 1. UniswapV3 (Concentrated Liquidity)"
echo " 2. SushiSwap (Constant Product AMM)"
echo " 3. Curve (StableSwap)"
echo " 4. Balancer (Weighted Pools)"
echo ""
echo "Expected Market Coverage: 60%+"
echo "Expected Daily Profit: $50-$500"
echo ""
echo "========================================="
echo ""
# Display next steps
echo -e "${GREEN}DEPLOYMENT SUCCESSFUL!${NC}"
echo ""
echo "Next steps:"
echo " 1. Test: LOG_LEVEL=debug ./mev-bot start"
echo " 2. Monitor: tail -f logs/mev_bot.log"
echo " 3. Validate opportunities detected across all DEXes"
echo ""
echo "To start production:"
echo " PROVIDER_CONFIG_PATH=\$PWD/config/providers_runtime.yaml ./mev-bot start"
echo ""

View File

@@ -1,318 +0,0 @@
#!/bin/bash
# Deploy PoolDetector contract to Arbitrum
# This script compiles and deploys the PoolDetector contract using forge
set -e
echo "🚀 Starting PoolDetector contract deployment..."
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
# Check if forge is installed
if ! command -v forge &> /dev/null; then
echo -e "${RED}Error: Forge not found. Installing Foundry...${NC}"
curl -L https://foundry.paradigm.xyz | bash
source ~/.bashrc
foundryup
fi
# Check environment variables
if [ -z "$ARBITRUM_RPC_ENDPOINT" ]; then
echo -e "${RED}Error: ARBITRUM_RPC_ENDPOINT not set${NC}"
echo "Please set: export ARBITRUM_RPC_ENDPOINT='your_rpc_endpoint'"
exit 1
fi
if [ -z "$PRIVATE_KEY" ]; then
echo -e "${YELLOW}Warning: PRIVATE_KEY not set. Using default test key for local deployment${NC}"
# Default test private key (DO NOT USE IN PRODUCTION)
export PRIVATE_KEY="0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80"
fi
# Create foundry project structure if it doesn't exist
if [ ! -d "contracts/foundry" ]; then
echo "Creating Foundry project structure..."
mkdir -p contracts/foundry/src
mkdir -p contracts/foundry/script
# Create foundry.toml
cat > contracts/foundry/foundry.toml << 'EOF'
[profile.default]
src = "src"
out = "out"
libs = ["lib"]
solc = "0.8.19"
optimizer = true
optimizer_runs = 200
[rpc_endpoints]
arbitrum = "${ARBITRUM_RPC_ENDPOINT}"
[etherscan]
arbitrum = { key = "${ARBISCAN_API_KEY}" }
EOF
fi
# Copy contract to foundry src
echo "Copying PoolDetector contract..."
cp contracts/PoolDetector.sol contracts/foundry/src/
# Create interfaces for the pools in foundry src
cat > contracts/foundry/src/IPoolInterfaces.sol << 'EOF'
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.19;
interface IUniswapV2Pair {
function token0() external view returns (address);
function token1() external view returns (address);
function getReserves() external view returns (uint112 reserve0, uint112 reserve1, uint32 blockTimestampLast);
}
interface IUniswapV3Pool {
function token0() external view returns (address);
function token1() external view returns (address);
function fee() external view returns (uint24);
function slot0() external view returns (
uint160 sqrtPriceX96,
int24 tick,
uint16 observationIndex,
uint16 observationCardinality,
uint16 observationCardinalityNext,
uint8 feeProtocol,
bool unlocked
);
function liquidity() external view returns (uint128);
}
interface IAlgebraPool {
function token0() external view returns (address);
function token1() external view returns (address);
function globalState() external view returns (
uint160 price,
int24 tick,
uint16 fee,
uint16 timepointIndex,
uint8 communityFeeToken0,
uint8 communityFeeToken1,
bool unlocked
);
function liquidity() external view returns (uint128);
}
interface IAlgebraIntegralPool {
function token0() external view returns (address);
function token1() external view returns (address);
function globalState() external view returns (
uint160 price,
int24 tick,
int24 prevInitializedTick,
int24 nextInitializedTick,
uint16 feeZto,
uint16 feeOtz,
uint16 timepointIndex,
uint8 communityFee,
bool unlocked
);
function liquidity() external view returns (uint128);
}
EOF
# Update PoolDetector.sol to import interfaces
sed -i '1a\\nimport "./IPoolInterfaces.sol";' contracts/foundry/src/PoolDetector.sol 2>/dev/null || \
sed -i '' '1a\\nimport "./IPoolInterfaces.sol";' contracts/foundry/src/PoolDetector.sol 2>/dev/null || true
# Create deployment script
cat > contracts/foundry/script/DeployPoolDetector.s.sol << 'EOF'
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.19;
import "forge-std/Script.sol";
import "../src/PoolDetector.sol";
contract DeployPoolDetector is Script {
function run() external {
uint256 deployerPrivateKey = vm.envUint("PRIVATE_KEY");
vm.startBroadcast(deployerPrivateKey);
PoolDetector detector = new PoolDetector();
console.log("PoolDetector deployed at:", address(detector));
vm.stopBroadcast();
}
}
EOF
# Navigate to foundry directory
cd contracts/foundry
# Install dependencies if needed
if [ ! -d "lib/forge-std" ]; then
echo "Installing Forge dependencies..."
forge install foundry-rs/forge-std --no-commit
fi
# Build the contract
echo -e "${YELLOW}Building contracts...${NC}"
forge build
# Deploy to local anvil fork for testing first
echo -e "${YELLOW}Starting Anvil fork for testing...${NC}"
# Kill any existing anvil process
pkill anvil 2>/dev/null || true
sleep 1
# Start anvil fork in background
anvil --fork-url "$ARBITRUM_RPC_ENDPOINT" --port 8545 &
ANVIL_PID=$!
sleep 5
# Deploy to local fork first
echo -e "${YELLOW}Deploying to local fork for testing...${NC}"
DEPLOYED_ADDRESS=$(forge script script/DeployPoolDetector.s.sol:DeployPoolDetector \
--rpc-url http://localhost:8545 \
--broadcast \
--private-key $PRIVATE_KEY \
2>&1 | grep "PoolDetector deployed at:" | awk '{print $NF}')
if [ -z "$DEPLOYED_ADDRESS" ]; then
echo -e "${RED}Failed to deploy contract to local fork${NC}"
kill $ANVIL_PID 2>/dev/null
exit 1
fi
echo -e "${GREEN}✅ Successfully deployed to local fork at: $DEPLOYED_ADDRESS${NC}"
# Test the deployed contract with some known pools
echo -e "${YELLOW}Testing deployed contract with known pools...${NC}"
# Create test script
cat > script/TestPoolDetector.s.sol << EOF
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.19;
import "forge-std/Script.sol";
import "../src/PoolDetector.sol";
contract TestPoolDetector is Script {
function run() external view {
address detectorAddress = vm.envAddress("POOL_DETECTOR_ADDRESS");
PoolDetector detector = PoolDetector(detectorAddress);
// Test with known UniswapV3 pool
address uniV3Pool = 0xC6962004f452bE9203591991D15f6b388e09E8D0;
string memory poolType = detector.detectPool(uniV3Pool);
console.log("Pool", uniV3Pool, "detected as:", poolType);
// Test batch detection
address[] memory pools = new address[](3);
pools[0] = 0xC6962004f452bE9203591991D15f6b388e09E8D0; // UniswapV3
pools[1] = 0xA961F0473dA4864C5eD28e00FcC53a3AAb056c1b; // UniswapV2
pools[2] = 0x0000000000000000000000000000000000000000; // Invalid
string[] memory types = detector.batchDetect(pools);
for (uint i = 0; i < types.length; i++) {
console.log("Batch result", i, ":", types[i]);
}
}
}
EOF
# Run test
POOL_DETECTOR_ADDRESS=$DEPLOYED_ADDRESS forge script script/TestPoolDetector.s.sol:TestPoolDetector \
--rpc-url http://localhost:8545 \
2>&1 | grep -E "Pool|Batch result"
# Kill anvil
kill $ANVIL_PID 2>/dev/null
echo ""
echo -e "${GREEN}Local testing complete!${NC}"
echo ""
# Ask user if they want to deploy to mainnet
read -p "Deploy to Arbitrum mainnet? (y/n) " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
echo -e "${YELLOW}⚠️ Deploying to Arbitrum mainnet...${NC}"
# Deploy to mainnet
MAINNET_ADDRESS=$(forge script script/DeployPoolDetector.s.sol:DeployPoolDetector \
--rpc-url "$ARBITRUM_RPC_ENDPOINT" \
--broadcast \
--verify \
--private-key $PRIVATE_KEY \
2>&1 | grep "PoolDetector deployed at:" | awk '{print $NF}')
if [ -z "$MAINNET_ADDRESS" ]; then
echo -e "${RED}Failed to deploy to mainnet${NC}"
exit 1
fi
DEPLOYED_ADDRESS=$MAINNET_ADDRESS
echo -e "${GREEN}✅ Successfully deployed to Arbitrum mainnet at: $DEPLOYED_ADDRESS${NC}"
else
echo -e "${YELLOW}Skipping mainnet deployment. Using local fork address.${NC}"
fi
# Return to project root
cd ../..
# Update .env file
echo -e "${YELLOW}Updating environment configuration...${NC}"
# Check if .env exists
if [ ! -f .env ]; then
cp .env.example .env 2>/dev/null || touch .env
fi
# Update or add POOL_DETECTOR_ADDRESS in .env
if grep -q "POOL_DETECTOR_ADDRESS=" .env; then
sed -i "s/POOL_DETECTOR_ADDRESS=.*/POOL_DETECTOR_ADDRESS=$DEPLOYED_ADDRESS/" .env 2>/dev/null || \
sed -i '' "s/POOL_DETECTOR_ADDRESS=.*/POOL_DETECTOR_ADDRESS=$DEPLOYED_ADDRESS/" .env
else
echo "POOL_DETECTOR_ADDRESS=$DEPLOYED_ADDRESS" >> .env
fi
# Also export for current session
export POOL_DETECTOR_ADDRESS=$DEPLOYED_ADDRESS
# Create Go binding for the deployed contract
echo -e "${YELLOW}Generating Go binding for PoolDetector...${NC}"
# Extract ABI from compiled contract
if [ -f "contracts/foundry/out/PoolDetector.sol/PoolDetector.json" ]; then
# Extract just the ABI array
jq '.abi' contracts/foundry/out/PoolDetector.sol/PoolDetector.json > contracts/abis/PoolDetector.json
# Generate Go binding
abigen --abi contracts/abis/PoolDetector.json \
--pkg pooldetector \
--type PoolDetector \
--out pkg/bindings/pooldetector/detector.go
echo -e "${GREEN}✅ Go binding generated at pkg/bindings/pooldetector/detector.go${NC}"
fi
# Summary
echo ""
echo "=========================================="
echo -e "${GREEN}🎉 PoolDetector Deployment Complete!${NC}"
echo "=========================================="
echo ""
echo "Contract Address: $DEPLOYED_ADDRESS"
echo "Environment Variable: POOL_DETECTOR_ADDRESS=$DEPLOYED_ADDRESS"
echo ""
echo "The contract has been deployed and configured."
echo "The MEV bot will now use this contract for pool detection."
echo ""
echo "To use in your code:"
echo " - Go binding: pkg/bindings/pooldetector/detector.go"
echo " - Contract address: os.Getenv(\"POOL_DETECTOR_ADDRESS\")"
echo ""

View File

@@ -1,167 +0,0 @@
#!/bin/bash
# Production contract deployment script for Arbitrum
set -e
echo "🚀 Deploying PRODUCTION MEV arbitrage contracts to Arbitrum..."
# Check environment
if [ -z "$ARBITRUM_RPC_ENDPOINT" ]; then
echo "❌ ARBITRUM_RPC_ENDPOINT not set"
exit 1
fi
if [ -z "$PRIVATE_KEY" ]; then
echo "❌ PRIVATE_KEY not set for deployment"
exit 1
fi
# Install dependencies if not present
if [ ! -d "node_modules" ]; then
echo "📦 Installing dependencies..."
npm install --save-dev @openzeppelin/contracts hardhat @nomiclabs/hardhat-ethers ethers
fi
# Create hardhat config for deployment
cat > hardhat.config.js << EOF
require("@nomiclabs/hardhat-ethers");
module.exports = {
solidity: {
version: "0.8.19",
settings: {
optimizer: {
enabled: true,
runs: 1000000 // Optimize for gas efficiency
}
}
},
networks: {
arbitrum: {
url: "${ARBITRUM_RPC_ENDPOINT}",
accounts: ["${PRIVATE_KEY}"]
}
}
};
EOF
# Create deployment script
cat > scripts/deploy.js << 'EOF'
const { ethers } = require("hardhat");
async function main() {
console.log("🏗️ Deploying ProductionArbitrageExecutor...");
const [deployer] = await ethers.getSigners();
console.log("Deploying with account:", deployer.address);
const balance = await deployer.getBalance();
console.log("Account balance:", ethers.utils.formatEther(balance), "ETH");
// Deploy ProductionArbitrageExecutor
const ArbitrageExecutor = await ethers.getContractFactory("ProductionArbitrageExecutor");
// Estimate gas
const deploymentGas = await ArbitrageExecutor.signer.estimateGas(
ArbitrageExecutor.getDeployTransaction()
);
console.log("Estimated deployment gas:", deploymentGas.toString());
const executor = await ArbitrageExecutor.deploy({
gasLimit: deploymentGas.mul(120).div(100) // 20% buffer
});
await executor.deployed();
console.log("✅ ProductionArbitrageExecutor deployed to:", executor.address);
// Verify contract is working
const minProfit = await executor.minProfitThreshold();
const maxGas = await executor.maxGasPrice();
console.log("📊 Contract Configuration:");
console.log(" Min Profit Threshold:", ethers.utils.formatEther(minProfit), "ETH");
console.log(" Max Gas Price:", ethers.utils.formatUnits(maxGas, "gwei"), "gwei");
// Update config file
const fs = require('fs');
const yaml = require('js-yaml');
try {
const configPath = 'config/arbitrum_production.yaml';
const config = yaml.load(fs.readFileSync(configPath, 'utf8'));
// Update contract addresses
config.contracts.arbitrage_executor = executor.address;
// Write updated config
fs.writeFileSync(configPath, yaml.dump(config));
console.log("✅ Updated config file with contract address");
} catch (error) {
console.log("⚠️ Could not update config file:", error.message);
console.log("📝 Manual update required:");
console.log(` arbitrage_executor: "${executor.address}"`);
}
console.log("🎯 Deployment Summary:");
console.log(" Contract Address:", executor.address);
console.log(" Network: Arbitrum One (Chain ID: 42161)");
console.log(" Gas Used:", deploymentGas.toString());
console.log(" Status: READY FOR PROFITABLE ARBITRAGE");
}
main()
.then(() => process.exit(0))
.catch((error) => {
console.error("❌ Deployment failed:", error);
process.exit(1);
});
EOF
# Create package.json if not exists
if [ ! -f "package.json" ]; then
cat > package.json << EOF
{
"name": "mev-bot-contracts",
"version": "1.0.0",
"description": "Production MEV arbitrage contracts",
"scripts": {
"deploy": "hardhat run scripts/deploy.js --network arbitrum"
},
"devDependencies": {
"@openzeppelin/contracts": "^4.9.0",
"hardhat": "^2.17.0",
"@nomiclabs/hardhat-ethers": "^2.2.0",
"ethers": "^5.7.0",
"js-yaml": "^4.1.0"
}
}
EOF
fi
# Install dependencies
echo "📦 Installing contract dependencies..."
npm install
# Compile contracts
echo "🔨 Compiling contracts..."
npx hardhat compile
# Deploy contracts
echo "🚀 Deploying to Arbitrum..."
npx hardhat run scripts/deploy.js --network arbitrum
echo ""
echo "🎉 PRODUCTION CONTRACTS DEPLOYED SUCCESSFULLY!"
echo ""
echo "⚠️ IMPORTANT: Save these addresses securely!"
echo " - Use them in your MEV bot configuration"
echo " - Verify on Arbiscan before using with large amounts"
echo " - Test thoroughly with small amounts first"
echo ""
echo "📈 Next steps:"
echo "1. Update MEV bot config with new contract address"
echo "2. Test arbitrage execution with forked environment"
echo "3. Start with small amounts on mainnet"
echo "4. Monitor profitability and adjust parameters"

View File

@@ -1,198 +0,0 @@
#!/usr/bin/env bash
# Production Docker Deployment Script for MEV Bot (Master Repo)
# This script deploys the MEV bot with Docker auto-restart and systemd on-boot startup
# Optimized for production use with the master repository
set -euo pipefail
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
PURPLE='\033[0;35m'
CYAN='\033[0;36m'
NC='\033[0m' # No Color
# Script information
echo -e "${PURPLE}🚀 MEV Bot Production Docker Deployment${NC}"
echo -e "${PURPLE}========================================${NC}"
echo ""
# Check if running from project root
if [ ! -f "go.mod" ]; then
echo -e "${RED}❌ Error: This script must be run from the project root directory${NC}"
exit 1
fi
# Load container runtime detection
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
if [ -f "$SCRIPT_DIR/container-runtime.sh" ]; then
source "$SCRIPT_DIR/container-runtime.sh" init
else
echo -e "${RED}❌ Error: container-runtime.sh not found${NC}"
exit 1
fi
if [[ -z "$CONTAINER_RUNTIME" ]]; then
echo -e "${RED}❌ Error: No container runtime found (podman or docker required)${NC}"
exit 1
fi
if [[ -z "$COMPOSE_CMD" ]]; then
echo -e "${RED}❌ Error: No compose command available${NC}"
exit 1
fi
echo -e "${GREEN}✅ Container runtime available: $CONTAINER_RUNTIME${NC}"
echo -e "${GREEN}✅ Compose command: $COMPOSE_CMD${NC}"
# Check/Create .env file
echo -e "${BLUE}🔧 Checking environment configuration...${NC}"
if [ ! -f ".env" ]; then
if [ -f ".env.production" ]; then
echo -e "${YELLOW}⚠️ .env file not found, copying from .env.production${NC}"
cp .env.production .env
echo -e "${GREEN}✅ Created .env from .env.production${NC}"
elif [ -f ".env.example" ]; then
echo -e "${YELLOW}⚠️ .env file not found, copying from .env.example${NC}"
cp .env.example .env
echo -e "${YELLOW}⚠️ Please edit .env file with your configuration before running again${NC}"
echo -e "${YELLOW} Required: ARBITRUM_RPC_ENDPOINT, ARBITRUM_WS_ENDPOINT${NC}"
exit 1
else
echo -e "${RED}❌ Error: No .env or .env.example file found${NC}"
exit 1
fi
else
echo -e "${GREEN}✅ .env file exists${NC}"
fi
# Verify critical environment variables
echo -e "${BLUE}🔍 Verifying environment variables...${NC}"
source .env
MISSING_VARS=()
if [ -z "${ARBITRUM_RPC_ENDPOINT:-}" ]; then
MISSING_VARS+=("ARBITRUM_RPC_ENDPOINT")
fi
if [ -z "${ARBITRUM_WS_ENDPOINT:-}" ]; then
MISSING_VARS+=("ARBITRUM_WS_ENDPOINT")
fi
if [ ${#MISSING_VARS[@]} -ne 0 ]; then
echo -e "${RED}❌ Error: Missing required environment variables in .env:${NC}"
for var in "${MISSING_VARS[@]}"; do
echo -e "${RED} - $var${NC}"
done
exit 1
fi
echo -e "${GREEN}✅ Required environment variables are set${NC}"
# Create required directories
echo -e "${BLUE}📁 Creating required directories...${NC}"
mkdir -p logs config data
# Stop any existing containers
echo -e "${BLUE}⏹️ Stopping any existing containers...${NC}"
$COMPOSE_CMD down 2>/dev/null || true
# Build the Docker image
echo -e "${BLUE}🔨 Building Docker image...${NC}"
$COMPOSE_CMD build
if [ $? -ne 0 ]; then
echo -e "${RED}❌ Error: Failed to build Docker image${NC}"
exit 1
fi
echo -e "${GREEN}✅ Docker image built successfully${NC}"
# Start the container
echo -e "${BLUE}🚀 Starting MEV Bot container...${NC}"
$COMPOSE_CMD up -d
if [ $? -ne 0 ]; then
echo -e "${RED}❌ Error: Failed to start container${NC}"
exit 1
fi
echo -e "${GREEN}✅ MEV Bot container started successfully${NC}"
# Wait for container to be healthy
echo -e "${BLUE}⏳ Waiting for container to be ready...${NC}"
sleep 5
# Check container status
CONTAINER_STATUS=$($COMPOSE_CMD ps --format json 2>/dev/null | grep -o '"State":"[^"]*"' | cut -d'"' -f4 || echo "unknown")
if [ "$CONTAINER_STATUS" = "running" ]; then
echo -e "${GREEN}✅ Container is running${NC}"
else
echo -e "${YELLOW}⚠️ Container status: $CONTAINER_STATUS${NC}"
fi
# Setup systemd service for auto-start on boot
echo ""
echo -e "${BLUE}🔧 Setting up systemd service for auto-start on boot...${NC}"
if [ "$EUID" -ne 0 ]; then
echo -e "${YELLOW}⚠️ Systemd setup requires root privileges${NC}"
echo -e "${YELLOW} Run the following commands to enable auto-start on boot:${NC}"
echo ""
echo -e "${CYAN} sudo cp scripts/mev-bot.service /etc/systemd/system/${NC}"
echo -e "${CYAN} sudo systemctl daemon-reload${NC}"
echo -e "${CYAN} sudo systemctl enable mev-bot.service${NC}"
echo -e "${CYAN} sudo systemctl start mev-bot.service${NC}"
echo ""
else
# Running as root, set up systemd service
cp scripts/mev-bot.service /etc/systemd/system/
systemctl daemon-reload
systemctl enable mev-bot.service
systemctl start mev-bot.service
echo -e "${GREEN}✅ Systemd service installed and enabled${NC}"
fi
# Display deployment summary
echo ""
echo -e "${GREEN}╔════════════════════════════════════════════════════════════╗${NC}"
echo -e "${GREEN}║ 🎉 MEV Bot Production Deployment Complete! ║${NC}"
echo -e "${GREEN}╚════════════════════════════════════════════════════════════╝${NC}"
echo ""
echo -e "${BLUE}📊 Container Information:${NC}"
echo -e " Container: mev-bot-production"
echo -e " Status: $CONTAINER_STATUS"
echo -e " Restart Policy: Always (auto-restart on failure)"
echo ""
echo -e "${BLUE}📝 View Logs:${NC}"
echo -e " ${CYAN}$COMPOSE_CMD logs -f mev-bot${NC}"
echo ""
echo -e "${BLUE}🔍 Container Status:${NC}"
echo -e " ${CYAN}$COMPOSE_CMD ps${NC}"
echo ""
echo -e "${BLUE}🔄 Restart Container:${NC}"
echo -e " ${CYAN}$COMPOSE_CMD restart mev-bot${NC}"
echo ""
echo -e "${BLUE}⏹️ Stop Container:${NC}"
echo -e " ${CYAN}$COMPOSE_CMD down${NC}"
echo ""
echo -e "${BLUE}🔧 Systemd Commands (if installed):${NC}"
echo -e " ${CYAN}sudo systemctl status mev-bot${NC} # Check status"
echo -e " ${CYAN}sudo systemctl restart mev-bot${NC} # Restart service"
echo -e " ${CYAN}sudo systemctl stop mev-bot${NC} # Stop service"
echo -e " ${CYAN}journalctl -u mev-bot -f${NC} # View systemd logs"
echo ""
echo -e "${YELLOW}⚠️ Production Deployment Notes:${NC}"
echo -e " - Container will auto-restart on failure (restart: always)"
echo -e " - Install systemd service for auto-start on system boot"
echo -e " - Monitor logs regularly for any issues"
echo -e " - Keep your .env file secure and never commit it"
echo ""
echo -e "${CYAN}🚀 MEV Bot is now running and monitoring Arbitrum for opportunities${NC}"

View File

@@ -1,173 +0,0 @@
#!/usr/bin/env bash
# Production Deployment Script for MEV Bot
# This script deploys the MEV bot to a production environment for live trading
# Supports: Podman (preferred) and Docker
set -euo pipefail # Exit on any error, undefined vars, pipe failures
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
PURPLE='\033[0;35m'
CYAN='\033[0;36m'
NC='\033[0m' # No Color
# Script information
echo -e "${PURPLE}🚀 MEV Bot Production Deployment Script${NC}"
echo -e "${PURPLE}=====================================${NC}"
echo ""
# Check if running from project root
if [ ! -f "go.mod" ]; then
echo -e "${RED}❌ Error: This script must be run from the project root directory${NC}"
exit 1
fi
# Check if .env.production exists
if [ ! -f ".env.production" ]; then
echo -e "${RED}❌ Error: .env.production file not found${NC}"
echo -e "${YELLOW}Please create .env.production file with production configuration${NC}"
exit 1
fi
# Load production environment variables
echo -e "${BLUE}🔧 Loading production environment variables...${NC}"
source .env.production
# Check required environment variables
echo -e "${BLUE}🔍 Checking required environment variables...${NC}"
REQUIRED_VARS=(
"ARBITRUM_RPC_ENDPOINT"
"ETHEREUM_PRIVATE_KEY"
"ETHEREUM_ACCOUNT_ADDRESS"
"CONTRACT_ARBITRAGE_EXECUTOR"
"CONTRACT_FLASH_SWAPPER"
"POSTGRES_PASSWORD"
"MEV_BOT_ENCRYPTION_KEY"
)
MISSING_VARS=()
for var in "${REQUIRED_VARS[@]}"; do
if [ -z "${!var}" ]; then
MISSING_VARS+=("$var")
fi
done
if [ ${#MISSING_VARS[@]} -ne 0 ]; then
echo -e "${RED}❌ Error: Missing required environment variables:${NC}"
for var in "${MISSING_VARS[@]}"; do
echo -e "${RED} - $var${NC}"
done
echo -e "${YELLOW}Please set these variables in .env.production${NC}"
exit 1
fi
echo -e "${GREEN}✅ All required environment variables are set${NC}"
# Create required directories
echo -e "${BLUE}📁 Creating required directories...${NC}"
mkdir -p data/production logs/production config keys
# Build the application
echo -e "${BLUE}🔨 Building MEV bot application...${NC}"
GOOS=linux GOARCH=amd64 CGO_ENABLED=0 go build -o bin/mev-bot-production cmd/mev-bot/main.go
if [ $? -eq 0 ]; then
echo -e "${GREEN}✅ Application built successfully${NC}"
else
echo -e "${RED}❌ Error: Failed to build application${NC}"
exit 1
fi
# Run tests to ensure application is working
echo -e "${BLUE}🧪 Running tests...${NC}"
go test -v ./pkg/... -short
if [ $? -eq 0 ]; then
echo -e "${GREEN}✅ Tests passed${NC}"
else
echo -e "${RED}❌ Error: Tests failed${NC}"
exit 1
fi
# Load container runtime detection
source "$(dirname "$0")/container-runtime.sh" init
if [[ -z "$CONTAINER_RUNTIME" ]]; then
echo -e "${RED}❌ Error: No container runtime found (podman or docker required)${NC}"
exit 1
fi
if [[ -z "$COMPOSE_CMD" ]]; then
echo -e "${RED}❌ Error: No compose command available${NC}"
exit 1
fi
echo -e "${GREEN}✅ Container runtime available: $CONTAINER_RUNTIME${NC}"
echo -e "${GREEN}✅ Compose command: $COMPOSE_CMD${NC}"
# Stop any existing containers
echo -e "${BLUE}⏹️ Stopping any existing production containers...${NC}"
$COMPOSE_CMD -f docker-compose.production.yaml down --remove-orphans 2>/dev/null || true
# Pull latest images
echo -e "${BLUE}⬇️ Pulling latest images...${NC}"
$COMPOSE_CMD -f docker-compose.production.yaml pull
# Build images
echo -e "${BLUE}🔨 Building production images...${NC}"
$COMPOSE_CMD -f docker-compose.production.yaml build
# Start services
echo -e "${BLUE}🚀 Starting production services...${NC}"
$COMPOSE_CMD -f docker-compose.production.yaml up -d
# Wait for services to start
echo -e "${BLUE}⏳ Waiting for services to start...${NC}"
sleep 30
# Check service status
echo -e "${BLUE}🔍 Checking service status...${NC}"
SERVICES_RUNNING=true
SERVICES=("mev-bot-arbitrum" "mev-bot-redis" "mev-bot-postgres" "mev-bot-prometheus" "mev-bot-grafana" "mev-bot-fluentd")
for service in "${SERVICES[@]}"; do
if $CONTAINER_RUNTIME ps | grep -q "$service"; then
echo -e "${GREEN}$service is running${NC}"
else
echo -e "${RED}$service is not running${NC}"
SERVICES_RUNNING=false
fi
done
if [ "$SERVICES_RUNNING" = true ]; then
echo -e "${GREEN}🎉 All production services started successfully!${NC}"
echo ""
echo -e "${BLUE}📊 Monitoring endpoints:${NC}"
echo -e " - MEV Bot Metrics: http://localhost:${METRICS_PORT:-9090}/metrics"
echo -e " - MEV Bot Health: http://localhost:${HEALTH_PORT:-8080}/health"
echo -e " - Prometheus: http://localhost:${PROMETHEUS_PORT:-9091}"
echo -e " - Grafana: http://localhost:${GRAFANA_PORT:-3000}"
echo ""
echo -e "${BLUE}📝 Logs:${NC}"
echo -e " - MEV Bot: $CONTAINER_RUNTIME logs mev-bot-arbitrum"
echo -e " - Redis: $CONTAINER_RUNTIME logs mev-bot-redis"
echo -e " - PostgreSQL: $CONTAINER_RUNTIME logs mev-bot-postgres"
echo ""
echo -e "${YELLOW}⚠️ Remember to monitor the production environment closely during initial deployment${NC}"
echo -e "${YELLOW}⚠️ Start with small position sizes to validate everything works correctly${NC}"
else
echo -e "${RED}❌ Some production services failed to start${NC}"
echo -e "${YELLOW}Check logs with: $COMPOSE_CMD -f docker-compose.production.yaml logs${NC}"
exit 1
fi
echo ""
echo -e "${GREEN}✅ Production deployment completed successfully!${NC}"
echo -e "${CYAN}🚀 MEV Bot is now running in production mode${NC}"
echo -e "${CYAN} Monitoring Arbitrum for profitable opportunities...${NC}"

View File

@@ -1,164 +0,0 @@
#!/bin/bash
# Staging Deployment Script for MEV Bot
# This script deploys the MEV bot to a staging environment for testing
set -e # Exit on any error
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Script information
echo -e "${BLUE}🚀 MEV Bot Staging Deployment Script${NC}"
echo -e "${BLUE}====================================${NC}"
echo ""
# Check if running from project root
if [ ! -f "go.mod" ]; then
echo -e "${RED}❌ Error: This script must be run from the project root directory${NC}"
exit 1
fi
# Check if .env.staging exists
if [ ! -f ".env.staging" ]; then
echo -e "${RED}❌ Error: .env.staging file not found${NC}"
echo -e "${YELLOW}Please create .env.staging file with staging configuration${NC}"
exit 1
fi
# Load staging environment variables
echo -e "${BLUE}🔧 Loading staging environment variables...${NC}"
source .env.staging
# Check required environment variables
echo -e "${BLUE}🔍 Checking required environment variables...${NC}"
REQUIRED_VARS=(
"ARBITRUM_RPC_ENDPOINT"
"ETHEREUM_PRIVATE_KEY"
"ETHEREUM_ACCOUNT_ADDRESS"
"CONTRACT_ARBITRAGE_EXECUTOR"
"CONTRACT_FLASH_SWAPPER"
"POSTGRES_PASSWORD"
)
MISSING_VARS=()
for var in "${REQUIRED_VARS[@]}"; do
if [ -z "${!var}" ]; then
MISSING_VARS+=("$var")
fi
done
if [ ${#MISSING_VARS[@]} -ne 0 ]; then
echo -e "${RED}❌ Error: Missing required environment variables:${NC}"
for var in "${MISSING_VARS[@]}"; do
echo -e "${RED} - $var${NC}"
done
echo -e "${YELLOW}Please set these variables in .env.staging${NC}"
exit 1
fi
echo -e "${GREEN}✅ All required environment variables are set${NC}"
# Create required directories
echo -e "${BLUE}📁 Creating required directories...${NC}"
mkdir -p data/staging logs/staging config keys
# Build the application
echo -e "${BLUE}🔨 Building MEV bot application...${NC}"
go build -o bin/mev-bot-staging cmd/mev-bot/main.go
if [ $? -eq 0 ]; then
echo -e "${GREEN}✅ Application built successfully${NC}"
else
echo -e "${RED}❌ Error: Failed to build application${NC}"
exit 1
fi
# Run tests to ensure application is working
echo -e "${BLUE}🧪 Running tests...${NC}"
go test -v ./pkg/... -short
if [ $? -eq 0 ]; then
echo -e "${GREEN}✅ Tests passed${NC}"
else
echo -e "${RED}❌ Error: Tests failed${NC}"
exit 1
fi
# Check if Docker is available
if ! command -v docker &> /dev/null; then
echo -e "${RED}❌ Error: Docker is not installed or not in PATH${NC}"
exit 1
fi
if ! command -v docker-compose &> /dev/null; then
echo -e "${RED}❌ Error: docker-compose is not installed or not in PATH${NC}"
exit 1
fi
echo -e "${GREEN}✅ Docker and docker-compose are available${NC}"
# Stop any existing containers
echo -e "${BLUE}⏹️ Stopping any existing staging containers...${NC}"
docker-compose -f docker-compose.staging.yaml down --remove-orphans 2>/dev/null || true
# Pull latest images
echo -e "${BLUE}⬇️ Pulling latest images...${NC}"
docker-compose -f docker-compose.staging.yaml pull
# Build images
echo -e "${BLUE}🔨 Building staging images...${NC}"
docker-compose -f docker-compose.staging.yaml build
# Start services
echo -e "${BLUE}🚀 Starting staging services...${NC}"
docker-compose -f docker-compose.staging.yaml up -d
# Wait for services to start
echo -e "${BLUE}⏳ Waiting for services to start...${NC}"
sleep 30
# Check service status
echo -e "${BLUE}🔍 Checking service status...${NC}"
SERVICES_RUNNING=true
SERVICES=("mev-bot-arbitrum-staging" "mev-bot-redis-staging" "mev-bot-postgres-staging" "mev-bot-prometheus-staging" "mev-bot-grafana-staging" "mev-bot-fluentd-staging")
for service in "${SERVICES[@]}"; do
if docker ps | grep -q "$service"; then
echo -e "${GREEN}$service is running${NC}"
else
echo -e "${RED}$service is not running${NC}"
SERVICES_RUNNING=false
fi
done
if [ "$SERVICES_RUNNING" = true ]; then
echo -e "${GREEN}🎉 All staging services started successfully!${NC}"
echo ""
echo -e "${BLUE}📊 Monitoring endpoints:${NC}"
echo -e " - MEV Bot Metrics: http://localhost:${METRICS_PORT:-9091}/metrics"
echo -e " - MEV Bot Health: http://localhost:${HEALTH_PORT:-8081}/health"
echo -e " - Prometheus: http://localhost:${PROMETHEUS_PORT:-9092}"
echo -e " - Grafana: http://localhost:${GRAFANA_PORT:-3001}"
echo ""
echo -e "${BLUE}📝 Logs:${NC}"
echo -e " - MEV Bot: docker logs mev-bot-arbitrum-staging"
echo -e " - Redis: docker logs mev-bot-redis-staging"
echo -e " - PostgreSQL: docker logs mev-bot-postgres-staging"
echo ""
echo -e "${YELLOW}⚠️ Remember to monitor the staging environment closely during testing${NC}"
echo -e "${YELLOW}⚠️ Staging uses real funds but with reduced position sizes for safety${NC}"
else
echo -e "${RED}❌ Some staging services failed to start${NC}"
echo -e "${YELLOW}Check logs with: docker-compose -f docker-compose.staging.yaml logs${NC}"
exit 1
fi
echo ""
echo -e "${GREEN}✅ Staging deployment completed successfully!${NC}"

View File

@@ -1,68 +0,0 @@
# Deprecated Scripts
These scripts have been moved here because their functionality is now handled by more comprehensive tools.
## Log Management Scripts (Superseded by log-manager.sh)
All of these scripts have been replaced by `scripts/log-manager.sh`, which provides:
- Real-time analysis with health scoring
- Performance monitoring with MEV-specific metrics
- Corruption detection and integrity validation
- Multi-channel alerting (email, Slack)
- Background monitoring daemon
- Operations dashboard generation
- Intelligent rotation (size and time-based)
- Advanced archiving with metadata
### Deprecated Scripts
1. **archive-logs.sh** → Use `./scripts/log-manager.sh archive`
2. **quick-archive.sh** → Use `./scripts/log-manager.sh full`
3. **view-latest-archive.sh** → Use `./scripts/log-manager.sh status`
4. **rotate-logs.sh** → Use `./scripts/log-manager.sh rotate`
5. **setup-log-rotation.sh** → Use `./scripts/log-manager.sh init`
## Migration Guide
**Instead of:**
```bash
./scripts/archive-logs.sh
```
**Use:**
```bash
./scripts/log-manager.sh archive
```
**Instead of:**
```bash
./scripts/quick-archive.sh --clear-logs
```
**Use:**
```bash
./scripts/log-manager.sh full
```
## Why These Were Deprecated
The individual log management scripts were created before the comprehensive `log-manager.sh` system was implemented. The new system provides:
- **Unified Interface**: Single command with multiple subcommands
- **Production Grade**: Health monitoring, alerting, and metrics
- **Better Maintenance**: One script to maintain instead of five
- **More Features**: Dashboard generation, daemon mode, performance tracking
- **Safer Operations**: Validation and corruption detection
## Can I Still Use These?
Yes, these scripts still work and are kept for backwards compatibility. However, it's recommended to migrate to `log-manager.sh` for better functionality and ongoing support.
## When Will These Be Removed?
These scripts will be kept for at least one major version release to allow for migration. They may be removed in a future version once all users have migrated to `log-manager.sh`.
---
**See:** `docs/SCRIPT_ANALYSIS_REPORT.md` for the full analysis

View File

@@ -1,334 +0,0 @@
#!/bin/bash
# MEV Bot Log Archiving Script
# Automatically archives and compresses logs with timestamp and metadata
set -euo pipefail
# Configuration
PROJECT_ROOT="/home/administrator/projects/mev-beta"
LOGS_DIR="$PROJECT_ROOT/logs"
ARCHIVE_DIR="$PROJECT_ROOT/logs/archives"
TIMESTAMP=$(date +"%Y%m%d_%H%M%S")
ARCHIVE_NAME="mev_logs_${TIMESTAMP}"
RETENTION_DAYS=30
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Logging function
log() {
echo -e "${GREEN}[$(date +'%Y-%m-%d %H:%M:%S')]${NC} $1"
}
warn() {
echo -e "${YELLOW}[$(date +'%Y-%m-%d %H:%M:%S')] WARNING:${NC} $1"
}
error() {
echo -e "${RED}[$(date +'%Y-%m-%d %H:%M:%S')] ERROR:${NC} $1"
}
# Create archive directory if it doesn't exist
create_archive_dir() {
if [[ ! -d "$ARCHIVE_DIR" ]]; then
log "Creating archive directory: $ARCHIVE_DIR"
mkdir -p "$ARCHIVE_DIR"
fi
}
# Generate archive metadata
generate_metadata() {
local archive_path="$1"
local metadata_file="$archive_path/archive_metadata.json"
log "Generating archive metadata..."
cat > "$metadata_file" << EOF
{
"archive_info": {
"timestamp": "$(date -Iseconds)",
"archive_name": "$ARCHIVE_NAME",
"created_by": "$(whoami)",
"hostname": "$(hostname)",
"mev_bot_version": "$(git rev-parse HEAD 2>/dev/null || echo 'unknown')",
"git_branch": "$(git rev-parse --abbrev-ref HEAD 2>/dev/null || echo 'unknown')"
},
"system_info": {
"os": "$(uname -s)",
"kernel": "$(uname -r)",
"architecture": "$(uname -m)",
"uptime": "$(uptime -p 2>/dev/null || echo 'unknown')"
},
"log_summary": {
"total_files": $(find "$LOGS_DIR" -type f -name "*.log" | wc -l),
"total_size_bytes": $(find "$LOGS_DIR" -type f -name "*.log" -exec stat -c%s {} + | awk '{sum+=$1} END {print sum+0}'),
"date_range": {
"oldest_file": "$(find "$LOGS_DIR" -type f -name "*.log" -printf '%T+ %p\n' | sort | head -1 | cut -d' ' -f1 || echo 'none')",
"newest_file": "$(find "$LOGS_DIR" -type f -name "*.log" -printf '%T+ %p\n' | sort | tail -1 | cut -d' ' -f1 || echo 'none')"
}
},
"archive_contents": [
$(find "$LOGS_DIR" -type f -name "*.log" -printf ' "%f",\n' | sed '$s/,$//')
]
}
EOF
}
# Archive logs with compression
archive_logs() {
local temp_archive_dir="$ARCHIVE_DIR/$ARCHIVE_NAME"
log "Creating temporary archive directory: $temp_archive_dir"
mkdir -p "$temp_archive_dir"
# Copy all log files
log "Copying log files..."
if ls "$LOGS_DIR"/*.log 1> /dev/null 2>&1; then
cp "$LOGS_DIR"/*.log "$temp_archive_dir/"
log "Copied $(ls "$LOGS_DIR"/*.log | wc -l) log files"
else
warn "No .log files found in $LOGS_DIR"
fi
# Copy diagnostic logs if they exist
if [[ -d "$LOGS_DIR/diagnostics" ]]; then
log "Copying diagnostics directory..."
cp -r "$LOGS_DIR/diagnostics" "$temp_archive_dir/"
fi
# Copy any other relevant log directories
for subdir in debug test performance audit; do
if [[ -d "$LOGS_DIR/$subdir" ]]; then
log "Copying $subdir directory..."
cp -r "$LOGS_DIR/$subdir" "$temp_archive_dir/"
fi
done
# Generate metadata
generate_metadata "$temp_archive_dir"
# Create compressed archive
log "Creating compressed archive..."
cd "$ARCHIVE_DIR"
tar -czf "${ARCHIVE_NAME}.tar.gz" "$ARCHIVE_NAME"
# Calculate archive size
local archive_size=$(stat -c%s "${ARCHIVE_NAME}.tar.gz" | numfmt --to=iec)
log "Archive created: ${ARCHIVE_NAME}.tar.gz (${archive_size})"
# Remove temporary directory
rm -rf "$temp_archive_dir"
# Create symlink to latest archive
ln -sf "${ARCHIVE_NAME}.tar.gz" "latest_archive.tar.gz"
log "Created symlink: latest_archive.tar.gz"
}
# Generate archive report
generate_report() {
local report_file="$ARCHIVE_DIR/archive_report_${TIMESTAMP}.txt"
log "Generating archive report..."
cat > "$report_file" << EOF
MEV Bot Log Archive Report
==========================
Generated: $(date)
Archive: ${ARCHIVE_NAME}.tar.gz
System Information:
- Hostname: $(hostname)
- User: $(whoami)
- OS: $(uname -s) $(uname -r)
- Architecture: $(uname -m)
Archive Contents:
$(tar -tzf "$ARCHIVE_DIR/${ARCHIVE_NAME}.tar.gz" | head -20)
$([ $(tar -tzf "$ARCHIVE_DIR/${ARCHIVE_NAME}.tar.gz" | wc -l) -gt 20 ] && echo "... and $(($(tar -tzf "$ARCHIVE_DIR/${ARCHIVE_NAME}.tar.gz" | wc -l) - 20)) more files")
Archive Statistics:
- Compressed size: $(stat -c%s "$ARCHIVE_DIR/${ARCHIVE_NAME}.tar.gz" | numfmt --to=iec)
- Files archived: $(tar -tzf "$ARCHIVE_DIR/${ARCHIVE_NAME}.tar.gz" | grep -c '\.log$' || echo '0')
Git Information:
- Branch: $(git rev-parse --abbrev-ref HEAD 2>/dev/null || echo 'unknown')
- Commit: $(git rev-parse HEAD 2>/dev/null || echo 'unknown')
- Status: $(git status --porcelain 2>/dev/null | wc -l) uncommitted changes
Recent Log Activity:
$(tail -10 "$LOGS_DIR/mev_bot.log" 2>/dev/null | head -5 || echo "No recent activity found")
Archive Location: $ARCHIVE_DIR/${ARCHIVE_NAME}.tar.gz
EOF
log "Report generated: $report_file"
}
# Clean old archives based on retention policy
cleanup_old_archives() {
log "Cleaning up archives older than $RETENTION_DAYS days..."
local deleted_count=0
while IFS= read -r -d '' archive; do
if [[ -f "$archive" ]]; then
rm "$archive"
((deleted_count++))
log "Deleted old archive: $(basename "$archive")"
fi
done < <(find "$ARCHIVE_DIR" -name "mev_logs_*.tar.gz" -mtime +$RETENTION_DAYS -print0 2>/dev/null)
# Also clean old report files
find "$ARCHIVE_DIR" -name "archive_report_*.txt" -mtime +$RETENTION_DAYS -delete 2>/dev/null || true
if [[ $deleted_count -gt 0 ]]; then
log "Cleaned up $deleted_count old archives"
else
log "No old archives to clean up"
fi
}
# Clear current logs (optional)
clear_current_logs() {
if [[ "${1:-}" == "--clear-logs" ]]; then
log "Clearing current log files..."
# Backup current running processes
local running_processes=$(ps aux | grep mev-bot | grep -v grep | wc -l)
if [[ $running_processes -gt 0 ]]; then
warn "MEV bot processes are still running. Stopping them first..."
pkill -f mev-bot || true
sleep 2
fi
# Clear main log files but keep directory structure
if ls "$LOGS_DIR"/*.log 1> /dev/null 2>&1; then
rm "$LOGS_DIR"/*.log
log "Cleared current log files"
fi
# Clear diagnostic logs
if [[ -d "$LOGS_DIR/diagnostics" ]]; then
rm -rf "$LOGS_DIR/diagnostics"/*
log "Cleared diagnostics directory"
fi
# Create fresh main log file
touch "$LOGS_DIR/mev_bot.log"
log "Created fresh log file"
fi
}
# Display archive information
show_archive_info() {
if [[ "${1:-}" == "--info" ]]; then
echo -e "${BLUE}Archive Information:${NC}"
echo "Archive directory: $ARCHIVE_DIR"
echo "Retention policy: $RETENTION_DAYS days"
echo
if [[ -d "$ARCHIVE_DIR" ]]; then
echo -e "${BLUE}Existing archives:${NC}"
ls -lah "$ARCHIVE_DIR"/*.tar.gz 2>/dev/null | while read -r line; do
echo " $line"
done
echo
echo -e "${BLUE}Total archive space used:${NC}"
du -sh "$ARCHIVE_DIR" 2>/dev/null || echo " Archive directory not found"
else
echo "No archives found (directory doesn't exist yet)"
fi
exit 0
fi
}
# Display help
show_help() {
if [[ "${1:-}" == "--help" || "${1:-}" == "-h" ]]; then
cat << EOF
MEV Bot Log Archiving Script
USAGE:
$0 [OPTIONS]
OPTIONS:
--clear-logs Archive logs and then clear current log files
--info Show information about existing archives
--help, -h Show this help message
DESCRIPTION:
Archives all MEV bot log files with timestamp, compression, and metadata.
Creates organized archives in logs/archives/ directory with automatic cleanup.
EXAMPLES:
$0 # Archive logs (keep current logs)
$0 --clear-logs # Archive and clear current logs
$0 --info # Show archive information
ARCHIVE LOCATION:
$ARCHIVE_DIR
RETENTION POLICY:
Archives older than $RETENTION_DAYS days are automatically deleted.
EOF
exit 0
fi
}
# Main execution
main() {
log "Starting MEV Bot log archiving process..."
# Check if we're in the right directory
if [[ ! -d "$PROJECT_ROOT" ]]; then
error "Project root not found: $PROJECT_ROOT"
exit 1
fi
cd "$PROJECT_ROOT"
# Check for help or info flags
show_help "$@"
show_archive_info "$@"
# Check if logs directory exists
if [[ ! -d "$LOGS_DIR" ]]; then
error "Logs directory not found: $LOGS_DIR"
exit 1
fi
# Create archive directory
create_archive_dir
# Archive logs
archive_logs
# Generate report
generate_report
# Clean up old archives
cleanup_old_archives
# Clear current logs if requested
clear_current_logs "$@"
log "Archive process completed successfully!"
log "Archive location: $ARCHIVE_DIR/${ARCHIVE_NAME}.tar.gz"
# Show final summary
echo
echo -e "${GREEN}=== ARCHIVE SUMMARY ===${NC}"
echo "Archive: ${ARCHIVE_NAME}.tar.gz"
echo "Location: $ARCHIVE_DIR"
echo "Size: $(stat -c%s "$ARCHIVE_DIR/${ARCHIVE_NAME}.tar.gz" | numfmt --to=iec)"
echo "Files: $(tar -tzf "$ARCHIVE_DIR/${ARCHIVE_NAME}.tar.gz" | grep -c '\.log$' || echo '0') log files"
echo "Latest archive symlink: $ARCHIVE_DIR/latest_archive.tar.gz"
}
# Run main function with all arguments
main "$@"

View File

@@ -1,15 +0,0 @@
#!/bin/bash
# Quick Archive - Archive logs and clear current logs for fresh start
# Usage: ./scripts/quick-archive.sh
echo "🗂️ Quick Archive: Creating archive and clearing logs for fresh start..."
echo
# Archive with clear logs option
./scripts/archive-logs.sh --clear-logs
echo
echo "✅ Quick archive complete! Ready for fresh MEV bot run."
echo "📁 Archived logs location: logs/archives/latest_archive.tar.gz"
echo "🆕 Fresh log files created and ready"

View File

@@ -1,37 +0,0 @@
#!/bin/bash
# Log rotation script for MEV Bot
# Configuration
LOG_DIR="/home/administrator/projects/mev-beta/logs"
MAX_SIZE_MB=100
RETENTION_DAYS=30
# Rotate event logs when they exceed MAX_SIZE_MB
rotate_large_logs() {
echo "Checking for large logs to rotate..."
# Find log files larger than MAX_SIZE_MB
find "$LOG_DIR/events" -name "*.jsonl" -size +${MAX_SIZE_MB}M | while read logfile; do
echo "Rotating large log: $logfile"
# Compress the log file
gzip "$logfile"
# Move to archived directory
mv "${logfile}.gz" "$LOG_DIR/archived/"
done
}
# Clean up old archived logs
cleanup_old_logs() {
echo "Cleaning up archived logs older than $RETENTION_DAYS days..."
find "$LOG_DIR/archived" -name "*.gz" -mtime +$RETENTION_DAYS -delete
}
# Main execution
echo "Starting log rotation for MEV Bot..."
rotate_large_logs
cleanup_old_logs
echo "Log rotation completed."

View File

@@ -1,24 +0,0 @@
#!/bin/bash
# Add this line to your crontab to run log rotation daily at 2 AM:
# 0 2 * * * /home/administrator/projects/mev-beta/scripts/rotate-logs.sh
# This script is meant to be run as a cron job for automatic log rotation
echo "Setting up daily log rotation for MEV Bot..."
# Get the current crontab
crontab -l > /tmp/mev_cron
# Check if our job is already in the crontab
if ! grep -q "rotate-logs.sh" /tmp/mev_cron; then
# Add the log rotation job to run daily at 2 AM
echo "0 2 * * * cd /home/administrator/projects/mev-beta && /home/administrator/projects/mev-beta/scripts/rotate-logs.sh" >> /tmp/mev_cron
# Install the new crontab
crontab /tmp/mev_cron
echo "Log rotation job added to crontab. Will run daily at 2 AM."
else
echo "Log rotation job already exists in crontab."
fi
# Clean up
rm /tmp/mev_cron

View File

@@ -1,55 +0,0 @@
#!/bin/bash
# View Latest Archive - Extract and browse the most recent log archive
# Usage: ./scripts/view-latest-archive.sh [pattern]
ARCHIVE_DIR="logs/archives"
TEMP_DIR="/tmp/mev_archive_view"
PATTERN="${1:-}"
if [[ ! -f "$ARCHIVE_DIR/latest_archive.tar.gz" ]]; then
echo "❌ No archive found. Run ./scripts/archive-logs.sh first."
exit 1
fi
echo "📂 Extracting latest archive for viewing..."
rm -rf "$TEMP_DIR"
mkdir -p "$TEMP_DIR"
cd "$TEMP_DIR"
# Extract archive
tar -xzf "$OLDPWD/$ARCHIVE_DIR/latest_archive.tar.gz"
ARCHIVE_NAME=$(ls | head -1)
cd "$ARCHIVE_NAME"
echo "✅ Archive extracted to: $TEMP_DIR/$ARCHIVE_NAME"
echo
if [[ -n "$PATTERN" ]]; then
echo "🔍 Searching for pattern: $PATTERN"
echo "================================================"
grep -r "$PATTERN" . --color=always | head -20
echo
echo "📊 Pattern summary:"
grep -r "$PATTERN" . | wc -l | xargs echo "Total matches:"
else
echo "📋 Archive contents:"
ls -la
echo
echo "📊 Archive summary:"
echo "- Log files: $(ls *.log 2>/dev/null | wc -l)"
echo "- Total size: $(du -sh . | cut -f1)"
if [[ -f "archive_metadata.json" ]]; then
echo
echo "📈 Metadata excerpt:"
cat archive_metadata.json | head -20
fi
fi
echo
echo "💡 Tips:"
echo " View specific log: cat $TEMP_DIR/$ARCHIVE_NAME/mev_bot.log"
echo " Search pattern: $0 'DIRECT PARSING'"
echo " Cleanup: rm -rf $TEMP_DIR"

View File

@@ -1,273 +0,0 @@
#!/bin/bash
# MEV Bot Development Environment Manager
# Supports branch selection and podman-in-podman development
set -e
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Script directory
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
# Default values
DEFAULT_BRANCH="master-dev"
COMPOSE_FILE="docker-compose.dev.yml"
# Function to print colored messages
print_info() {
echo -e "${BLUE}${NC} $1"
}
print_success() {
echo -e "${GREEN}${NC} $1"
}
print_warning() {
echo -e "${YELLOW}${NC} $1"
}
print_error() {
echo -e "${RED}${NC} $1"
}
# Function to show usage
usage() {
cat << EOF
${BLUE}MEV Bot Development Environment Manager${NC}
Usage: $0 [COMMAND] [OPTIONS]
Commands:
start [BRANCH] Start development environment with specified branch (default: master-dev)
stop Stop development environment
restart [BRANCH] Restart development environment with specified branch
rebuild [BRANCH] Rebuild and restart with specified branch
logs [OPTIONS] Show logs (pass options like -f for follow)
shell Open shell in running container
status Show container status
clean Clean all containers and volumes
branches List available git branches
switch BRANCH Switch to a different branch and rebuild
Options:
-h, --help Show this help message
Examples:
$0 start master-dev # Start with master-dev branch
$0 start fix-critical-arbitrage-bugs # Start with feature branch
$0 rebuild master # Rebuild with master branch
$0 logs -f # Follow logs
$0 switch feat-new-feature # Switch to new feature branch
Environment Variables:
GIT_BRANCH Git branch to use (default: master-dev)
LOG_LEVEL Logging level (default: debug)
METRICS_ENABLED Enable metrics (default: true)
EOF
}
# Function to check dependencies
check_dependencies() {
if ! command -v podman-compose &> /dev/null; then
print_error "podman-compose is not installed"
print_info "Install with: pip3 install podman-compose"
exit 1
fi
}
# Function to list available branches
list_branches() {
print_info "Available branches:"
cd "$PROJECT_ROOT"
git branch -a | grep -v HEAD | sed 's/^..../ /'
}
# Function to start development environment
start_dev() {
local branch="${1:-$DEFAULT_BRANCH}"
print_info "Starting MEV Bot development environment on branch: $branch"
cd "$PROJECT_ROOT"
# Export branch for docker-compose
export GIT_BRANCH="$branch"
# Build and start
print_info "Building image for branch: $branch"
podman-compose -f "$COMPOSE_FILE" build --build-arg GIT_BRANCH="$branch"
print_info "Starting container..."
podman-compose -f "$COMPOSE_FILE" up -d
print_success "Development environment started on branch: $branch"
print_info "View logs with: $0 logs -f"
print_info "Access shell with: $0 shell"
}
# Function to stop development environment
stop_dev() {
print_info "Stopping development environment..."
cd "$PROJECT_ROOT"
podman-compose -f "$COMPOSE_FILE" down
print_success "Development environment stopped"
}
# Function to restart development environment
restart_dev() {
local branch="${1:-$DEFAULT_BRANCH}"
stop_dev
start_dev "$branch"
}
# Function to rebuild development environment
rebuild_dev() {
local branch="${1:-$DEFAULT_BRANCH}"
print_info "Rebuilding development environment on branch: $branch"
cd "$PROJECT_ROOT"
export GIT_BRANCH="$branch"
# Stop existing containers
podman-compose -f "$COMPOSE_FILE" down
# Rebuild with no cache
print_info "Building image (no cache) for branch: $branch"
podman-compose -f "$COMPOSE_FILE" build --no-cache --build-arg GIT_BRANCH="$branch"
# Start
print_info "Starting container..."
podman-compose -f "$COMPOSE_FILE" up -d
print_success "Development environment rebuilt and started on branch: $branch"
}
# Function to show logs
show_logs() {
cd "$PROJECT_ROOT"
podman-compose -f "$COMPOSE_FILE" logs "$@"
}
# Function to open shell
open_shell() {
cd "$PROJECT_ROOT"
local container=$(podman-compose -f "$COMPOSE_FILE" ps -q | head -1)
if [ -z "$container" ]; then
print_error "No running container found"
exit 1
fi
print_info "Opening shell in container..."
podman exec -it "$container" /bin/bash || podman exec -it "$container" /bin/sh
}
# Function to show status
show_status() {
cd "$PROJECT_ROOT"
print_info "Development environment status:"
podman-compose -f "$COMPOSE_FILE" ps
}
# Function to clean everything
clean_all() {
print_warning "This will remove all containers, images, and volumes. Continue? (y/N)"
read -r response
if [[ "$response" =~ ^[Yy]$ ]]; then
cd "$PROJECT_ROOT"
podman-compose -f "$COMPOSE_FILE" down -v
podman images | grep mev-bot | awk '{print $3}' | xargs -r podman rmi -f
print_success "Cleaned all containers, images, and volumes"
else
print_info "Cancelled"
fi
}
# Function to switch branches
switch_branch() {
local branch="$1"
if [ -z "$branch" ]; then
print_error "Please specify a branch"
usage
exit 1
fi
print_info "Switching to branch: $branch"
cd "$PROJECT_ROOT"
# Check if branch exists
if ! git rev-parse --verify "$branch" &> /dev/null; then
print_error "Branch '$branch' does not exist"
list_branches
exit 1
fi
# Stop current environment
stop_dev
# Checkout branch
print_info "Checking out branch: $branch"
git checkout "$branch"
# Rebuild and start
rebuild_dev "$branch"
}
# Main script logic
main() {
check_dependencies
case "${1:-}" in
start)
start_dev "${2:-$DEFAULT_BRANCH}"
;;
stop)
stop_dev
;;
restart)
restart_dev "${2:-$DEFAULT_BRANCH}"
;;
rebuild)
rebuild_dev "${2:-$DEFAULT_BRANCH}"
;;
logs)
shift
show_logs "$@"
;;
shell)
open_shell
;;
status)
show_status
;;
clean)
clean_all
;;
branches)
list_branches
;;
switch)
switch_branch "$2"
;;
-h|--help|help)
usage
;;
*)
print_error "Unknown command: ${1:-}"
echo ""
usage
exit 1
;;
esac
}
# Run main function
main "$@"

View File

@@ -1,267 +0,0 @@
#!/usr/bin/env bash
# Enable MEV Bot Execution Mode
# Updates configuration to allow live trading with flash loans
set -euo pipefail
CONFIG_FILE="config/bot_config.yaml"
KEYSTORE_DIR="keystore/production"
ENV_FILE=".env.production"
echo "═══════════════════════════════════════════════════════════"
echo "🚀 Enable MEV Bot Execution Mode"
echo "═══════════════════════════════════════════════════════════"
echo ""
# Verify prerequisites
echo "🔍 Verifying prerequisites..."
echo ""
# Check keystore exists
if [ ! -d "$KEYSTORE_DIR" ] || [ ! -f "$KEYSTORE_DIR/executor_wallet.json" ]; then
echo "❌ Error: Keystore not found!"
echo " Please run ./scripts/setup-keystore.sh first"
exit 1
fi
echo "✅ Keystore configured: $KEYSTORE_DIR/executor_wallet.json"
# Check encryption key
if ! grep -q "MEV_BOT_ENCRYPTION_KEY" "$ENV_FILE"; then
echo "❌ Error: Encryption key not set in $ENV_FILE"
exit 1
fi
echo "✅ Encryption key configured"
# Check wallet balance
echo ""
echo "💰 Checking wallet balance..."
if ./scripts/check-wallet-balance.sh > /dev/null 2>&1; then
echo "✅ Wallet is funded and ready"
else
echo "⚠️ Warning: Wallet balance check failed or insufficient funds"
echo " Continuing anyway (you can fund later)..."
fi
echo ""
echo "═══════════════════════════════════════════════════════════"
echo "⚙️ Updating Bot Configuration"
echo "═══════════════════════════════════════════════════════════"
echo ""
# Backup current config
BACKUP_FILE="$CONFIG_FILE.backup.$(date +%Y%m%d_%H%M%S)"
cp "$CONFIG_FILE" "$BACKUP_FILE"
echo "✅ Backed up config to: $BACKUP_FILE"
# Check if using micro-funding mode
if [ -f "config/bot_config_micro.yaml" ]; then
echo "📝 Using micro-funding configuration (0.001 ETH mode)"
cp config/bot_config_micro.yaml "$CONFIG_FILE"
echo "✅ Configuration updated: $CONFIG_FILE"
exit 0
fi
# Update configuration to enable execution
cat > "$CONFIG_FILE" << 'EOF'
# MEV Bot Configuration - Execution Mode Enabled
# Bot Mode
mode: "execution" # Changed from "monitoring" to "execution"
# Execution Settings
execution:
enabled: true # Enable live trading
dry_run: false # Set to true for testing without real transactions
# Profitability Thresholds
min_profit_usd: 10.0 # Minimum profit in USD to execute (after gas)
min_profit_percentage: 0.1 # Minimum 0.1% profit
max_profit_percentage: 50.0 # Maximum expected profit (safety check)
# Gas Management
max_gas_price_gwei: 0.5 # Maximum gas price willing to pay (Arbitrum is cheap)
max_gas_cost_usd: 1.0 # Maximum gas cost per transaction
gas_estimation_buffer: 1.2 # 20% buffer on gas estimates
# Execution Limits
max_position_size_eth: 10.0 # Maximum flash loan size
max_trades_per_minute: 5 # Rate limiting for safety
max_daily_trades: 200 # Daily execution limit
# Safety Settings
enable_slippage_protection: true # Protect against slippage
max_slippage_percentage: 1.0 # Maximum 1% slippage tolerance
enable_front_run_protection: true # Monitor mempool for front-running
# Flash Loan Settings
flash_loan_enabled: true # Use flash loans for capital-free trading
preferred_flash_loan_provider: "balancer" # "balancer" (0% fee) or "uniswap" (0.09%)
flash_loan_fallback: ["uniswap", "aave"] # Fallback providers
# Keystore Configuration
keystore_path: "keystore/production/executor_wallet.json"
keystore_encryption_key_env: "MEV_BOT_ENCRYPTION_KEY"
# Arbitrage Detection
arbitrage:
min_profit_threshold: 0.1 # 0.1% minimum profit
max_hops: 3 # Allow up to 3-hop arbitrage (A→B→C→A)
enable_multi_hop: true # Enable multi-hop opportunities
# Opportunity Scoring
score_by_profit: true # Prioritize by profit amount
score_by_confidence: true # Weight by confidence score
min_confidence_score: 0.7 # Minimum 70% confidence to execute
# Network Configuration
network:
chain_id: 42161 # Arbitrum One
name: "Arbitrum One"
# RPC Configuration (loads from config/providers.yaml)
provider_config_path: "config/providers.yaml"
# Connection Settings
max_retries: 3
retry_delay_ms: 1000
connection_timeout_seconds: 30
request_timeout_seconds: 10
# Monitoring & Logging
monitoring:
enable_metrics: true # Enable Prometheus metrics
metrics_port: 9090
health_check_interval_seconds: 30
# Performance Tracking
track_execution_latency: true
track_gas_usage: true
track_profit_loss: true
# Alerting (optional - configure if needed)
enable_alerts: false
alert_webhook_url: ""
alert_on_failed_execution: true
alert_on_low_balance: true
low_balance_threshold_eth: 0.005
# Logging
logging:
level: "info" # "debug", "info", "warn", "error"
format: "json" # "json" or "text"
output: "logs/mev_bot.log"
enable_console: true
enable_file: true
# Log Rotation
max_size_mb: 100
max_backups: 10
max_age_days: 30
compress: true
# DEX Configuration
dexes:
# Uniswap V3 (Primary)
- name: "uniswap_v3"
enabled: true
router_address: "0xE592427A0AEce92De3Edee1F18E0157C05861564"
factory_address: "0x1F98431c8aD98523631AE4a59f267346ea31F984"
priority: 1
# SushiSwap
- name: "sushiswap"
enabled: true
router_address: "0x1b02dA8Cb0d097eB8D57A175b88c7D8b47997506"
factory_address: "0xc35DADB65012eC5796536bD9864eD8773aBc74C4"
priority: 2
# Camelot
- name: "camelot"
enabled: true
router_address: "0xc873fEcbd354f5A56E00E710B90EF4201db2448d"
factory_address: "0x6EcCab422D763aC031210895C81787E87B43A652"
priority: 3
# Balancer V2 (Flash Loans)
- name: "balancer_v2"
enabled: true
vault_address: "0xBA12222222228d8Ba445958a75a0704d566BF2C8"
priority: 4
# Token Configuration
tokens:
# Wrapped Ether (WETH)
- symbol: "WETH"
address: "0x82aF49447D8a07e3bd95BD0d56f35241523fBab1"
decimals: 18
enabled: true
# USD Coin (USDC)
- symbol: "USDC"
address: "0xaf88d065e77c8cC2239327C5EDb3A432268e5831"
decimals: 6
enabled: true
# Tether (USDT)
- symbol: "USDT"
address: "0xFd086bC7CD5C481DCC9C85ebE478A1C0b69FCbb9"
decimals: 6
enabled: true
# Arbitrum (ARB)
- symbol: "ARB"
address: "0x912CE59144191C1204E64559FE8253a0e49E6548"
decimals: 18
enabled: true
# Smart Contracts (Deployed)
contracts:
arbitrage_executor: "0x6C2B1c6Eb0e5aB73d8C60944c74A62bfE629c418"
flash_swapper: "0x7Cc97259cBe0D02Cd0b8A80c2E1f79C7265808b4"
data_fetcher: "0xC6BD82306943c0F3104296a46113ca0863723cBD"
EOF
echo "✅ Configuration updated: $CONFIG_FILE"
echo ""
# Show configuration diff
echo "📝 Configuration Changes:"
echo " • Mode: monitoring → execution"
echo " • Execution enabled: false → true"
echo " • Dry run: true → false (LIVE TRADING)"
echo " • Flash loans: enabled (Balancer 0% fee preferred)"
echo " • Multi-hop arbitrage: enabled (up to 3 hops)"
echo " • Min profit: \$2 USD or 0.05% (MICRO-FUNDING MODE)"
echo " • Max gas: \$1 per trade (0.0005 ETH)"
echo " • Funding: 0.001 ETH (~2 trades capacity)"
echo " • Keystore: $KEYSTORE_DIR/executor_wallet.json"
echo ""
echo "═══════════════════════════════════════════════════════════"
echo "✅ Execution Mode Enabled!"
echo "═══════════════════════════════════════════════════════════"
echo ""
echo "⚠️ **IMPORTANT: You are about to enable LIVE TRADING**"
echo ""
echo "📋 Pre-Flight Checklist:"
echo " ✅ Keystore configured and encrypted"
echo " ✅ Wallet funded with gas (check with ./scripts/check-wallet-balance.sh)"
echo " ✅ Flash loan contracts deployed and verified"
echo " ✅ Configuration updated with execution settings"
echo " ✅ Safety limits in place (max gas, min profit, slippage protection)"
echo ""
echo "🚀 To start live trading:"
echo " 1. Review configuration: cat $CONFIG_FILE"
echo " 2. Restart bot: pkill -f mev-beta && GO_ENV=production nohup ./bin/mev-beta start &"
echo " 3. Monitor logs: tail -f logs/mev_bot.log"
echo " 4. Watch for: 'Executing arbitrage' messages"
echo ""
echo "📊 Monitor execution:"
echo " • Logs: tail -f logs/mev_bot.log | grep 'EXECUTION\\|Profit'"
echo " • Metrics: http://localhost:9090/metrics"
echo " • Wallet: https://arbiscan.io/address/<your-wallet-address>"
echo ""
echo "🛑 Emergency stop:"
echo " • Kill bot: pkill -f mev-beta"
echo " • Disable execution: Edit $CONFIG_FILE, set execution.enabled=false"
echo ""

View File

@@ -1,17 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
LOG_FILE=${1:-logs/diagnostics/multicall_samples.log}
LIMIT=${2:-10}
if [[ ! -f "$LOG_FILE" ]]; then
echo "No diagnostic log found at $LOG_FILE" >&2
exit 1
fi
echo "Found entries (showing up to $LIMIT):"
awk '{print NR"|"$0}' "$LOG_FILE" | tail -n "$LIMIT"
echo "\nTo fetch calldata for an entry (requires network + API key):"
echo " export ARBISCAN_API_KEY=<your_key>"
echo " ./scripts/fetch_arbiscan_tx.sh <tx_hash> | jq -r '.result.input'"

View File

@@ -1,26 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
if [[ $# -ne 1 ]]; then
echo "Usage: $0 <tx_hash>" >&2
exit 1
fi
if [[ -z "${ARBISCAN_API_KEY:-}" ]]; then
echo "Error: ARBISCAN_API_KEY environment variable is required." >&2
exit 1
fi
TX_HASH="$1"
# Prefer the newer V2 endpoint; fall back to V1 if it fails (for backwards compatibility)
response=$(curl -s "https://api.arbiscan.io/v2/api?module=proxy&action=eth_getTransactionByHash&txhash=${TX_HASH}&apikey=${ARBISCAN_API_KEY}")
status=$(echo "$response" | jq -r '.status' 2>/dev/null || echo "")
if [[ "$status" == "1" || "$status" == "0" && "$response" == *"result"* ]]; then
echo "$response"
exit 0
fi
# Fallback to legacy V1 endpoint
curl -s "https://api.arbiscan.io/api?module=proxy&action=eth_getTransactionByHash&txhash=${TX_HASH}&apikey=${ARBISCAN_API_KEY}"

View File

@@ -1,144 +0,0 @@
#!/bin/bash
# Fix decimal handling and profit thresholds to enable proper arbitrage detection
set -e
echo "🔧 Applying critical fixes for decimal handling and profit thresholds..."
# 1. Fix profit calculator to use proper decimals
echo "📐 Fixing profit calculator decimal handling..."
cat > /tmp/profit_calc_fix.go << 'EOF'
// Add to pkg/profitcalc/profit_calc.go after imports
import "github.com/fraktal/mev-beta/pkg/tokens"
// Update CalculateProfit to use proper decimals
func (pc *ProfitCalculator) CalculateProfitWithDecimals(
amountIn *big.Int,
amountOut *big.Int,
tokenIn common.Address,
tokenOut common.Address,
gasEstimate uint64,
) (*big.Int, error) {
// Convert to normalized values (18 decimals) for calculation
normalizedIn := tokens.NormalizeAmount(amountIn, tokenIn, tokens.WETH)
normalizedOut := tokens.NormalizeAmount(amountOut, tokenOut, tokens.WETH)
// Calculate profit
profit := new(big.Int).Sub(normalizedOut, normalizedIn)
// Subtract gas costs
gasCost := new(big.Int).Mul(pc.gasPrice, big.NewInt(int64(gasEstimate)))
netProfit := new(big.Int).Sub(profit, gasCost)
return netProfit, nil
}
EOF
# 2. Update arbitrage service to count opportunities properly
echo "📊 Fixing opportunity counting..."
cat > /tmp/arbitrage_counting_fix.patch << 'EOF'
--- a/pkg/arbitrage/service.go
+++ b/pkg/arbitrage/service.go
@@ -700,6 +700,9 @@ func (sas *ArbitrageService) detectArbitrageOpportunities(event *SimpleSwapEven
// Process opportunity
sas.processOpportunity(opportunity)
+
+ // Increment counter
+ atomic.AddUint64(&sas.opportunitiesDetected, 1)
}
duration := time.Since(start)
EOF
# 3. Lower thresholds significantly
echo "📉 Lowering profit thresholds..."
cat > /tmp/threshold_fix.patch << 'EOF'
--- a/pkg/profitcalc/profit_calc.go
+++ b/pkg/profitcalc/profit_calc.go
@@ -59,7 +59,7 @@ func NewProfitCalculator(logger *logger.Logger) *ProfitCalculator {
return &ProfitCalculator{
logger: logger,
- minProfitThreshold: big.NewInt(1000000000000000), // 0.001 ETH
+ minProfitThreshold: big.NewInt(10000000000000), // 0.00001 ETH (~$0.02)
maxSlippage: 0.03,
gasPrice: big.NewInt(100000000), // 0.1 gwei
EOF
# 4. Fix the ROI calculation
echo "💰 Fixing ROI calculations..."
cat > /tmp/roi_fix.go << 'EOF'
// Update ROI calculation to be reasonable
func calculateROI(profit, amountIn *big.Int) float64 {
if amountIn.Sign() == 0 {
return 0
}
// Convert to float for percentage calculation
profitFloat := new(big.Float).SetInt(profit)
amountFloat := new(big.Float).SetInt(amountIn)
// ROI = (profit / amountIn) * 100
roi := new(big.Float).Quo(profitFloat, amountFloat)
roi.Mul(roi, big.NewFloat(100))
result, _ := roi.Float64()
// Cap at reasonable maximum (100% ROI)
if result > 100 {
return 100
}
return result
}
EOF
# 5. Apply configuration updates
echo "⚙️ Updating configuration files..."
cat > /home/administrator/projects/mev-beta/config/arbitrage_config.yaml << 'EOF'
# Optimized arbitrage detection settings
detection:
min_profit_threshold_eth: 0.00001 # $0.02 at $2000/ETH
min_profit_threshold_usd: 0.01 # $0.01 minimum
min_roi_percentage: 0.01 # 0.01% minimum ROI
max_price_impact: 0.05 # 5% max price impact
gas_price_gwei: 0.1 # Arbitrum typical
gas_estimate_swap: 150000 # Typical swap gas
decimal_handling:
normalize_to_18: true
token_decimals:
WETH: 18
USDC: 6
USDT: 6
WBTC: 8
DAI: 18
ARB: 18
execution:
enabled: false # Start in monitoring mode
max_position_size_eth: 1.0
slippage_tolerance: 0.005 # 0.5%
deadline_seconds: 60
EOF
# 6. Build and restart
echo "🔨 Building bot with fixes..."
cd /home/administrator/projects/mev-beta
go build -o mev-bot cmd/mev-bot/main.go
echo "✅ Fixes applied successfully!"
echo ""
echo "📋 Summary of changes:"
echo " • Decimal handling integrated for USDC(6), USDT(6), WBTC(8)"
echo " • Profit threshold lowered to 0.00001 ETH (~$0.02)"
echo " • ROI calculation fixed and capped at 100%"
echo " • Opportunity counting fixed"
echo " • Configuration optimized for Arbitrum"
echo ""
echo "🚀 To start the bot with fixes:"
echo " ./mev-bot start"
echo ""
echo "📊 Monitor for opportunities:"
echo " tail -f logs/mev_bot.log | grep -E 'Detected:|opportunity|profit'"

View File

@@ -1,131 +0,0 @@
#!/bin/bash
# Critical fix for profit calculations to enable arbitrage detection
# This script applies immediate fixes to get the bot detecting opportunities
set -e
echo "🔧 Applying critical profit calculation fixes..."
# 1. Update profit calculator to use lower threshold
echo "📉 Lowering profit threshold..."
cat > /tmp/profit_fix.patch << 'EOF'
--- a/pkg/profitcalc/profit_calc.go
+++ b/pkg/profitcalc/profit_calc.go
@@ -59,7 +59,7 @@ func NewProfitCalculator(logger *logger.Logger) *ProfitCalculator {
return &ProfitCalculator{
logger: logger,
- minProfitThreshold: big.NewInt(1000000000000000), // 0.001 ETH minimum (lowered for testing)
+ minProfitThreshold: big.NewInt(100000000000000), // 0.0001 ETH minimum (~$0.20 at $2000/ETH)
maxSlippage: 0.03, // 3% max slippage
gasPrice: big.NewInt(100000000), // 0.1 gwei default (Arbitrum typical)
@@ -179,7 +179,7 @@ func (spc *ProfitCalculator) AnalyzeSwapOpportunity(
// Estimate a small price differential based on typical DEX spreads
// Most DEX pairs have 0.3% fee, arbitrage typically happens at 0.5-1% spread
- typicalSpreadBps := int64(30) // 0.3% typical spread
+ typicalSpreadBps := int64(10) // 0.1% typical spread (lowered for better detection)
spreadFactor := new(big.Float).Quo(big.NewFloat(float64(typicalSpreadBps)), big.NewFloat(10000))
EOF
# Apply the patch
cd /home/administrator/projects/mev-beta
patch -p1 < /tmp/profit_fix.patch 2>/dev/null || echo "Patch may already be applied or file differs"
# 2. Update configuration files to use lower thresholds
echo "📝 Updating configuration thresholds..."
# Update local.yaml (already done but double-check)
sed -i 's/min_profit_threshold: 10.0/min_profit_threshold: 0.5/' config/local.yaml 2>/dev/null || true
# 3. Create a hotfix for decimal handling in scanner
echo "🔢 Creating decimal handling hotfix..."
cat > pkg/scanner/decimal_fix.go << 'EOF'
package scanner
import (
"math/big"
"github.com/ethereum/go-ethereum/common"
)
// TokenDecimalMap provides decimal information for common tokens
var TokenDecimalMap = map[common.Address]int{
common.HexToAddress("0x82aF49447D8a07e3bd95BD0d56f35241523fBab1"): 18, // WETH
common.HexToAddress("0xFF970A61A04b1cA14834A43f5dE4533eBDDB5CC8"): 6, // USDC
common.HexToAddress("0xFd086bC7CD5C481DCC9C85ebE478A1C0b69FCbb9"): 6, // USDT
common.HexToAddress("0x2f2a2543B76A4166549F7aaB2e75Bef0aefC5B0f"): 8, // WBTC
common.HexToAddress("0x912CE59144191C1204E64559FE8253a0e49E6548"): 18, // ARB
}
// GetTokenDecimals returns decimals for a token (defaults to 18)
func GetTokenDecimals(token common.Address) int {
if decimals, ok := TokenDecimalMap[token]; ok {
return decimals
}
return 18
}
// NormalizeToEther converts token amount to ether equivalent considering decimals
func NormalizeToEther(amount *big.Int, token common.Address) *big.Float {
if amount == nil {
return big.NewFloat(0)
}
decimals := GetTokenDecimals(token)
divisor := new(big.Int).Exp(big.NewInt(10), big.NewInt(int64(decimals)), nil)
result := new(big.Float).Quo(
new(big.Float).SetInt(amount),
new(big.Float).SetInt(divisor),
)
return result
}
EOF
# 4. Update the arbitrage detection engine configuration
echo "⚙️ Updating detection engine settings..."
cat > /tmp/detection_config.yaml << 'EOF'
# Optimized detection settings for Arbitrum
detection:
min_spread_bps: 5 # 0.05% minimum spread (5 basis points)
min_profit_usd: 0.50 # $0.50 minimum profit
max_gas_price_gwei: 0.5 # 0.5 gwei max on Arbitrum
include_flash_loan_fee: true # 0.09% Aave fee
slippage_tolerance: 0.005 # 0.5% slippage
# Fee tiers for UniswapV3 (in basis points)
uniswap_v3_fees:
- 5 # 0.05%
- 30 # 0.30%
- 100 # 1.00%
- 10000 # 100% (special pools)
# Token decimal registry
token_decimals:
WETH: 18
USDC: 6
USDT: 6
WBTC: 8
DAI: 18
ARB: 18
EOF
echo "✅ Critical fixes applied!"
echo ""
echo "📊 Summary of changes:"
echo " • Lowered minimum profit threshold from 0.001 ETH to 0.0001 ETH"
echo " • Reduced typical spread assumption from 0.3% to 0.1%"
echo " • Added proper token decimal handling"
echo " • Updated configuration thresholds"
echo ""
echo "🔄 Next steps:"
echo " 1. Rebuild the bot: go build -o mev-bot cmd/mev-bot/main.go"
echo " 2. Restart the bot: ./mev-bot start"
echo " 3. Monitor for opportunities: tail -f logs/mev_bot.log | grep -i 'opportunity\|profit'"
echo ""
echo "⚠️ Note: These are temporary fixes. Permanent solution requires:"
echo " • Full integration of decimal handling throughout codebase"
echo " • Dynamic fee calculation based on actual pool fees"
echo " • Real-time gas price monitoring"
echo " • Comprehensive testing with different token pairs"

View File

@@ -1,234 +0,0 @@
#!/bin/bash
###############################################################################
# RPC Configuration Fix Script
#
# This script fixes the critical RPC rate limiting issue by ensuring
# the bot uses the paid Chainstack endpoint instead of the public endpoint.
#
# Usage: ./scripts/fix-rpc-config.sh
###############################################################################
set -euo pipefail
# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
echo -e "${BLUE}═══════════════════════════════════════════════════════════════${NC}"
echo -e "${BLUE} MEV Bot RPC Configuration Fix${NC}"
echo -e "${BLUE}═══════════════════════════════════════════════════════════════${NC}"
echo ""
###############################################################################
# Step 1: Check Current Configuration
###############################################################################
echo -e "${YELLOW}[1/5] Checking current RPC configuration...${NC}"
echo ""
# Check if environment variables are set
if [ -z "${ARBITRUM_RPC_ENDPOINT:-}" ]; then
echo -e "${RED}✗ ARBITRUM_RPC_ENDPOINT not set${NC}"
ENV_SET=false
else
echo -e "${GREEN}✓ ARBITRUM_RPC_ENDPOINT is set${NC}"
echo " Current value: ${ARBITRUM_RPC_ENDPOINT}"
ENV_SET=true
fi
if [ -z "${ARBITRUM_WS_ENDPOINT:-}" ]; then
echo -e "${RED}✗ ARBITRUM_WS_ENDPOINT not set${NC}"
WS_SET=false
else
echo -e "${GREEN}✓ ARBITRUM_WS_ENDPOINT is set${NC}"
echo " Current value: ${ARBITRUM_WS_ENDPOINT}"
WS_SET=true
fi
echo ""
###############################################################################
# Step 2: Detect Issue
###############################################################################
echo -e "${YELLOW}[2/5] Detecting RPC issues...${NC}"
echo ""
# Check if using public endpoint
if [[ "${ARBITRUM_RPC_ENDPOINT:-}" == *"arb1.arbitrum.io"* ]]; then
echo -e "${RED}✗ CRITICAL: Using public RPC endpoint!${NC}"
echo " This will cause rate limiting (429 errors)"
USING_PUBLIC=true
elif [[ "${ARBITRUM_RPC_ENDPOINT:-}" == *"chainstack.com"* ]]; then
echo -e "${GREEN}✓ Using paid Chainstack endpoint${NC}"
USING_PUBLIC=false
else
echo -e "${YELLOW}⚠ Unknown RPC endpoint${NC}"
USING_PUBLIC=unknown
fi
# Check recent logs for 429 errors
if [ -f "logs/mev_bot.log" ]; then
RATE_LIMIT_COUNT=$(tail -1000 logs/mev_bot.log | grep -c "429 Too Many Requests" || echo "0")
if [ "$RATE_LIMIT_COUNT" -gt 10 ]; then
echo -e "${RED}✗ High rate limiting detected: $RATE_LIMIT_COUNT errors in last 1000 lines${NC}"
HAS_RATE_LIMIT=true
elif [ "$RATE_LIMIT_COUNT" -gt 0 ]; then
echo -e "${YELLOW}⚠ Some rate limiting detected: $RATE_LIMIT_COUNT errors${NC}"
HAS_RATE_LIMIT=true
else
echo -e "${GREEN}✓ No rate limiting detected${NC}"
HAS_RATE_LIMIT=false
fi
else
echo -e "${YELLOW}⚠ No log file found, cannot check for rate limiting${NC}"
HAS_RATE_LIMIT=unknown
fi
echo ""
###############################################################################
# Step 3: Load Correct Configuration
###############################################################################
echo -e "${YELLOW}[3/5] Loading correct configuration...${NC}"
echo ""
# Check if .env.production exists
if [ -f ".env.production" ]; then
echo -e "${GREEN}✓ Found .env.production${NC}"
# Source it
set -a
source .env.production
set +a
echo -e "${GREEN}✓ Loaded .env.production${NC}"
# Verify it has the right endpoint
if [[ "${ARBITRUM_RPC_ENDPOINT:-}" == *"chainstack.com"* ]]; then
echo -e "${GREEN}✓ Chainstack endpoint configured in .env.production${NC}"
else
echo -e "${RED}✗ .env.production does not have Chainstack endpoint!${NC}"
echo " Please edit .env.production and set:"
echo " ARBITRUM_RPC_ENDPOINT=wss://arbitrum-mainnet.core.chainstack.com/YOUR_KEY"
exit 1
fi
else
echo -e "${RED}✗ .env.production not found!${NC}"
echo ""
echo "Creating .env.production from template..."
# Create from .env.example
if [ -f ".env.example" ]; then
cp .env.example .env.production
echo -e "${YELLOW}⚠ Created .env.production from .env.example${NC}"
echo -e "${RED}✗ You must edit .env.production and set your Chainstack API key!${NC}"
echo ""
echo "Steps:"
echo " 1. Edit .env.production"
echo " 2. Replace YOUR_KEY_HERE with your actual Chainstack API key"
echo " 3. Run this script again"
exit 1
else
echo -e "${RED}✗ .env.example not found either!${NC}"
exit 1
fi
fi
echo ""
###############################################################################
# Step 4: Stop Bot if Running
###############################################################################
echo -e "${YELLOW}[4/5] Stopping MEV bot if running...${NC}"
echo ""
# Check if bot is running
BOT_PID=$(pgrep -f "mev-bot start" || echo "")
if [ -n "$BOT_PID" ]; then
echo -e "${YELLOW}⚠ Found running bot process (PID: $BOT_PID)${NC}"
echo "Stopping bot..."
kill -TERM "$BOT_PID" 2>/dev/null || true
# Wait up to 10 seconds for graceful shutdown
for i in {1..10}; do
if ! kill -0 "$BOT_PID" 2>/dev/null; then
echo -e "${GREEN}✓ Bot stopped gracefully${NC}"
break
fi
sleep 1
done
# Force kill if still running
if kill -0 "$BOT_PID" 2>/dev/null; then
echo -e "${YELLOW}⚠ Force killing bot...${NC}"
kill -9 "$BOT_PID" 2>/dev/null || true
fi
else
echo -e "${GREEN}✓ Bot is not running${NC}"
fi
echo ""
###############################################################################
# Step 5: Verify Configuration
###############################################################################
echo -e "${YELLOW}[5/5] Verifying configuration...${NC}"
echo ""
# Display final configuration
echo "Configuration:"
echo " ARBITRUM_RPC_ENDPOINT: ${ARBITRUM_RPC_ENDPOINT}"
echo " ARBITRUM_WS_ENDPOINT: ${ARBITRUM_WS_ENDPOINT:-NOT_SET}"
echo " PROVIDER_CONFIG_PATH: ${PROVIDER_CONFIG_PATH:-$PWD/config/providers_runtime.yaml}"
echo ""
# Verify it's not the public endpoint
if [[ "${ARBITRUM_RPC_ENDPOINT}" == *"arb1.arbitrum.io"* ]]; then
echo -e "${RED}✗ STILL using public endpoint!${NC}"
echo " Configuration fix failed"
exit 1
else
echo -e "${GREEN}✓ Using paid endpoint${NC}"
fi
echo ""
###############################################################################
# Summary and Next Steps
###############################################################################
echo -e "${BLUE}═══════════════════════════════════════════════════════════════${NC}"
echo -e "${GREEN}✓ RPC Configuration Fix Complete${NC}"
echo -e "${BLUE}═══════════════════════════════════════════════════════════════${NC}"
echo ""
echo -e "${YELLOW}Next Steps:${NC}"
echo ""
echo "1. Start the bot with correct configuration:"
echo " ${GREEN}PROVIDER_CONFIG_PATH=\$PWD/config/providers_runtime.yaml ./bin/mev-bot start${NC}"
echo ""
echo "2. Monitor for rate limiting errors:"
echo " ${GREEN}tail -f logs/mev_bot.log | grep \"429 Too Many Requests\"${NC}"
echo " ${YELLOW}(Should show NO results if fix is working)${NC}"
echo ""
echo "3. Verify block processing:"
echo " ${GREEN}tail -f logs/mev_bot.log | grep \"Processing block\"${NC}"
echo " ${YELLOW}(Should show continuous block processing)${NC}"
echo ""
echo "4. Check connection status:"
echo " ${GREEN}tail -f logs/mev_bot.log | grep -i \"connected\"${NC}"
echo " ${YELLOW}(Should show successful connection to Chainstack)${NC}"
echo ""
echo -e "${BLUE}═══════════════════════════════════════════════════════════════${NC}"
echo ""

View File

@@ -1,216 +0,0 @@
#!/bin/bash
# Fund MEV Bot Wallet Using Cast
# Sends minimum required ETH from a funded source wallet
set -e
ARBITRUM_RPC="https://arb-mainnet.g.alchemy.com/v2/d6VAHgzkOI3NgLGem6uBMiADT1E9rROB"
BOT_WALLET_ADDRESS="0x40091653f652a259747D86d7Cbe3e2848082a051"
MIN_AMOUNT="0.001" # Minimum ETH to send
SAFETY_BUFFER="0.001" # Keep this much in source wallet
echo "═══════════════════════════════════════════════════════════"
echo "💸 Fund MEV Bot Wallet Using Cast"
echo "═══════════════════════════════════════════════════════════"
echo ""
echo "Target Bot Wallet: $BOT_WALLET_ADDRESS"
echo "Amount to Send: $MIN_AMOUNT ETH"
echo "Network: Arbitrum One"
echo ""
# Check if cast is installed
if ! command -v cast &> /dev/null; then
echo "❌ Error: cast (Foundry) not found"
echo " Install: curl -L https://foundry.paradigm.xyz | bash"
exit 1
fi
echo "📋 You need a source wallet with funds on Arbitrum One"
echo ""
echo "Options:"
echo " 1. Provide private key directly (not recommended for large amounts)"
echo " 2. Use Foundry keystore (recommended, encrypted)"
echo " 3. Exit and fund manually via MetaMask"
echo ""
read -p "Choose option (1/2/3): " OPTION
case $OPTION in
1)
echo ""
read -sp "Enter source wallet private key (0x...): " SOURCE_PRIVATE_KEY
echo ""
# Derive source address
SOURCE_ADDRESS=$(cast wallet address "$SOURCE_PRIVATE_KEY")
echo "✅ Source Address: $SOURCE_ADDRESS"
;;
2)
echo ""
echo "📂 Available Foundry keystores:"
if [ -d "$HOME/.foundry/keystores" ]; then
ls -1 "$HOME/.foundry/keystores" 2>/dev/null || echo " (none found)"
else
echo " (no keystore directory)"
fi
echo ""
read -p "Enter keystore name (or path): " KEYSTORE_NAME
# Try to use keystore
SOURCE_ADDRESS=$(cast wallet address --keystore "$KEYSTORE_NAME" 2>/dev/null || {
echo "❌ Failed to access keystore"
exit 1
})
echo "✅ Source Address: $SOURCE_ADDRESS"
# For keystore, we'll need to use --keystore flag in send
USE_KEYSTORE=true
;;
3)
echo ""
echo "Exiting. To fund manually:"
echo " 1. Open MetaMask on Arbitrum One"
echo " 2. Send $MIN_AMOUNT ETH to: $BOT_WALLET_ADDRESS"
echo " 3. Run: ./scripts/check-wallet-balance.sh"
exit 0
;;
*)
echo "❌ Invalid option"
exit 1
;;
esac
echo ""
echo "🔍 Checking source wallet balance..."
# Check source wallet balance
SOURCE_BALANCE_WEI=$(cast balance "$SOURCE_ADDRESS" --rpc-url "$ARBITRUM_RPC")
SOURCE_BALANCE_ETH=$(cast --to-unit "$SOURCE_BALANCE_WEI" ether)
echo " Balance: $SOURCE_BALANCE_ETH ETH"
# Check if sufficient balance
REQUIRED=$(echo "$MIN_AMOUNT + $SAFETY_BUFFER + 0.001" | bc) # +0.001 for gas
SUFFICIENT=$(echo "$SOURCE_BALANCE_ETH >= $REQUIRED" | bc)
if [ "$SUFFICIENT" -eq 0 ]; then
echo ""
echo "❌ Insufficient balance!"
echo " Current: $SOURCE_BALANCE_ETH ETH"
echo " Required: $REQUIRED ETH ($MIN_AMOUNT + $SAFETY_BUFFER safety + 0.001 gas)"
echo ""
echo "Please add funds to: $SOURCE_ADDRESS"
exit 1
fi
echo "✅ Sufficient balance to proceed"
echo ""
# Check bot wallet current balance
echo "🔍 Checking bot wallet current balance..."
BOT_BALANCE_WEI=$(cast balance "$BOT_WALLET_ADDRESS" --rpc-url "$ARBITRUM_RPC")
BOT_BALANCE_ETH=$(cast --to-unit "$BOT_BALANCE_WEI" ether)
echo " Current Bot Balance: $BOT_BALANCE_ETH ETH"
if [ "$(echo "$BOT_BALANCE_ETH >= $MIN_AMOUNT" | bc)" -eq 1 ]; then
echo ""
echo "✅ Bot wallet already has sufficient funds!"
echo ""
echo "Current balance ($BOT_BALANCE_ETH ETH) meets minimum ($MIN_AMOUNT ETH)"
echo ""
read -p "Send additional funds anyway? (y/N): " PROCEED
if [[ ! "$PROCEED" =~ ^[Yy]$ ]]; then
echo "Exiting. Run ./scripts/check-wallet-balance.sh to verify."
exit 0
fi
fi
echo ""
echo "═══════════════════════════════════════════════════════════"
echo "💸 Transaction Summary"
echo "═══════════════════════════════════════════════════════════"
echo ""
echo "From: $SOURCE_ADDRESS"
echo "To: $BOT_WALLET_ADDRESS"
echo "Amount: $MIN_AMOUNT ETH"
echo "Network: Arbitrum One (Chain ID: 42161)"
echo ""
echo "After Transaction:"
echo " Source Balance: ~$(echo "$SOURCE_BALANCE_ETH - $MIN_AMOUNT - 0.001" | bc) ETH"
echo " Bot Balance: ~$(echo "$BOT_BALANCE_ETH + $MIN_AMOUNT" | bc) ETH"
echo ""
read -p "⚠️ Confirm transaction? (yes/no): " CONFIRM
if [ "$CONFIRM" != "yes" ]; then
echo "Transaction cancelled"
exit 0
fi
echo ""
echo "📤 Sending transaction..."
# Send transaction
if [ "$USE_KEYSTORE" = true ]; then
# Use keystore
TXHASH=$(cast send "$BOT_WALLET_ADDRESS" \
--value "${MIN_AMOUNT}ether" \
--keystore "$KEYSTORE_NAME" \
--rpc-url "$ARBITRUM_RPC" \
--legacy)
else
# Use private key
TXHASH=$(cast send "$BOT_WALLET_ADDRESS" \
--value "${MIN_AMOUNT}ether" \
--private-key "$SOURCE_PRIVATE_KEY" \
--rpc-url "$ARBITRUM_RPC" \
--legacy)
fi
if [ $? -eq 0 ]; then
echo ""
echo "✅ Transaction sent successfully!"
echo ""
echo "Transaction Hash: $TXHASH"
echo "View on Arbiscan: https://arbiscan.io/tx/$TXHASH"
echo ""
echo "⏳ Waiting for confirmation (typically 1-2 minutes)..."
# Wait for receipt
sleep 3
cast receipt "$TXHASH" --rpc-url "$ARBITRUM_RPC" > /dev/null 2>&1
if [ $? -eq 0 ]; then
echo "✅ Transaction confirmed!"
echo ""
# Check new balance
NEW_BALANCE_WEI=$(cast balance "$BOT_WALLET_ADDRESS" --rpc-url "$ARBITRUM_RPC")
NEW_BALANCE_ETH=$(cast --to-unit "$NEW_BALANCE_WEI" ether)
echo "═══════════════════════════════════════════════════════════"
echo "✅ Bot Wallet Funded Successfully!"
echo "═══════════════════════════════════════════════════════════"
echo ""
echo "Bot Wallet: $BOT_WALLET_ADDRESS"
echo "New Balance: $NEW_BALANCE_ETH ETH"
echo ""
echo "⏭️ Next Steps:"
echo " 1. Configure keystore: ./scripts/setup-keystore.sh"
echo " 2. Enable execution: ./scripts/enable-execution-mode.sh"
echo " 3. Start bot: pkill -f mev-beta && GO_ENV=production nohup ./bin/mev-beta start &"
echo ""
else
echo "⏳ Transaction pending, check Arbiscan for status"
fi
else
echo ""
echo "❌ Transaction failed!"
echo "Check error message above for details"
exit 1
fi

View File

@@ -1,44 +0,0 @@
#!/bin/bash
echo "Generating Go Contract Bindings"
echo "==============================="
# Create bindings directory
mkdir -p pkg/bindings/{uniswapv2,uniswapv3,algebra,algebraintegral,pooldetector}
# Check if abigen is installed
if ! command -v abigen &> /dev/null; then
echo "Installing abigen..."
go install github.com/ethereum/go-ethereum/cmd/abigen@latest
fi
echo ""
echo "Generating UniswapV2 bindings..."
abigen --abi contracts/abis/UniswapV2Pair.json \
--pkg uniswapv2 \
--type UniswapV2Pair \
--out pkg/bindings/uniswapv2/pair.go
echo "Generating UniswapV3 bindings..."
abigen --abi contracts/abis/UniswapV3Pool.json \
--pkg uniswapv3 \
--type UniswapV3Pool \
--out pkg/bindings/uniswapv3/pool.go
echo "Generating Algebra V1.9 bindings..."
abigen --abi contracts/abis/AlgebraPool.json \
--pkg algebra \
--type AlgebraPool \
--out pkg/bindings/algebra/pool.go
echo "Generating Algebra Integral bindings..."
abigen --abi contracts/abis/AlgebraIntegralPool.json \
--pkg algebraintegral \
--type AlgebraIntegralPool \
--out pkg/bindings/algebraintegral/pool.go
echo ""
echo "Bindings generated successfully!"
echo ""
echo "Available bindings:"
ls -la pkg/bindings/*/

View File

@@ -1,73 +0,0 @@
//go:build tools
// +build tools
package main
import (
"fmt"
"math/big"
"os"
"github.com/fraktal/mev-beta/internal/logger"
"github.com/fraktal/mev-beta/pkg/security"
)
func main() {
// Get encryption key from environment
encryptionKey := os.Getenv("MEV_BOT_ENCRYPTION_KEY")
if encryptionKey == "" {
fmt.Println("❌ MEV_BOT_ENCRYPTION_KEY environment variable is required")
os.Exit(1)
}
// Create key manager configuration
keyManagerConfig := &security.KeyManagerConfig{
KeystorePath: "keystore",
EncryptionKey: encryptionKey,
KeyRotationDays: 30,
MaxSigningRate: 100,
SessionTimeout: 3600,
AuditLogPath: "logs/audit.log",
BackupPath: "backups",
}
// Initialize logger
log := logger.New("info", "text", "")
// Create key manager
fmt.Println("🔑 Creating key manager...")
keyManager, err := security.NewKeyManager(keyManagerConfig, log)
if err != nil {
fmt.Printf("❌ Failed to create key manager: %v\n", err)
os.Exit(1)
}
// Generate a trading key
fmt.Println("🔑 Generating trading key...")
permissions := security.KeyPermissions{
CanSign: true,
CanTransfer: true,
MaxTransferWei: big.NewInt(1000000000000000000), // 1 ETH
AllowedContracts: []string{},
RequireConfirm: false,
}
address, err := keyManager.GenerateKey("trading", permissions)
if err != nil {
fmt.Printf("❌ Failed to generate key: %v\n", err)
os.Exit(1)
}
fmt.Printf("✅ Trading key generated successfully: %s\n", address.Hex())
// Test retrieving the key
fmt.Println("🔍 Testing key retrieval...")
_, err = keyManager.GetActivePrivateKey()
if err != nil {
fmt.Printf("❌ Failed to retrieve key: %v\n", err)
os.Exit(1)
}
fmt.Println("✅ Key retrieval successful!")
fmt.Printf("📋 Key manager ready for production use\n")
}

View File

@@ -1,270 +0,0 @@
#!/bin/bash
# Generate comprehensive test report from 24-hour run
set -e
LOG_DIR="logs/24h_test"
LATEST_LOG=$(ls -t ${LOG_DIR}/test_*.log 2>/dev/null | head -1)
if [ -z "${LATEST_LOG}" ]; then
echo "❌ No log file found"
exit 1
fi
REPORT_FILE="${LOG_DIR}/report_$(date +%Y%m%d_%H%M%S).md"
echo "📊 Generating test report from: ${LATEST_LOG}"
echo " Output: ${REPORT_FILE}"
cat > "${REPORT_FILE}" << EOF
# MEV Bot 24-Hour Validation Test Report
## Generated: $(date)
---
## Test Configuration
**Log File:** ${LATEST_LOG}
**Test Duration:** $(stat -c %y "${LATEST_LOG}" 2>/dev/null || stat -f %Sm "${LATEST_LOG}" 2>/dev/null) - $(date)
**Binary:** bin/mev-bot ($(ls -lh bin/mev-bot | awk '{print $5}'))
---
## Performance Statistics
### Block Processing
EOF
# Block stats
TOTAL_BLOCKS=$(grep -c "Processing.*transactions" "${LATEST_LOG}" 2>/dev/null || echo "0")
echo "- **Total Blocks Processed:** ${TOTAL_BLOCKS}" >> "${REPORT_FILE}"
# DEX transaction stats
TOTAL_DEX=$(grep -c "DEX Transaction detected" "${LATEST_LOG}" 2>/dev/null || echo "0")
echo "- **DEX Transactions:** ${TOTAL_DEX}" >> "${REPORT_FILE}"
# Calculate rate
if [ "${TOTAL_BLOCKS}" -gt "0" ]; then
DEX_RATE=$(awk "BEGIN {printf \"%.2f\", (${TOTAL_DEX} / ${TOTAL_BLOCKS}) * 100}")
echo "- **DEX Transaction Rate:** ${DEX_RATE}%" >> "${REPORT_FILE}"
fi
cat >> "${REPORT_FILE}" << EOF
### Arbitrage Opportunities
EOF
# Opportunity stats
TOTAL_OPPS=$(grep -c "ARBITRAGE OPPORTUNITY" "${LATEST_LOG}" 2>/dev/null || echo "0")
PROFITABLE=$(grep "ARBITRAGE OPPORTUNITY" "${LATEST_LOG}" 2>/dev/null | grep -c "isExecutable:true" || echo "0")
REJECTED=$(grep "ARBITRAGE OPPORTUNITY" "${LATEST_LOG}" 2>/dev/null | grep -c "isExecutable:false" || echo "0")
echo "- **Total Opportunities Detected:** ${TOTAL_OPPS}" >> "${REPORT_FILE}"
echo "- **Profitable (Executable):** ${PROFITABLE}" >> "${REPORT_FILE}"
echo "- **Rejected (Unprofitable):** ${REJECTED}" >> "${REPORT_FILE}"
if [ "${TOTAL_OPPS}" -gt "0" ]; then
SUCCESS_RATE=$(awk "BEGIN {printf \"%.2f\", (${PROFITABLE} / ${TOTAL_OPPS}) * 100}")
echo "- **Success Rate:** ${SUCCESS_RATE}%" >> "${REPORT_FILE}"
fi
cat >> "${REPORT_FILE}" << EOF
### Cache Performance
EOF
# Cache stats
CACHE_LOGS=$(grep "Reserve cache metrics" "${LATEST_LOG}" 2>/dev/null | tail -1)
if [ -n "${CACHE_LOGS}" ]; then
echo "\`\`\`" >> "${REPORT_FILE}"
echo "${CACHE_LOGS}" >> "${REPORT_FILE}"
echo "\`\`\`" >> "${REPORT_FILE}"
else
echo "- **Status:** No cache metrics logged (multihop scanner not triggered)" >> "${REPORT_FILE}"
fi
cat >> "${REPORT_FILE}" << EOF
### Error Analysis
EOF
# Error stats
TOTAL_ERRORS=$(grep -c "\[ERROR\]" "${LATEST_LOG}" 2>/dev/null || echo "0")
TOTAL_WARNS=$(grep -c "\[WARN\]" "${LATEST_LOG}" 2>/dev/null || echo "0")
echo "- **Total Errors:** ${TOTAL_ERRORS}" >> "${REPORT_FILE}"
echo "- **Total Warnings:** ${TOTAL_WARNS}" >> "${REPORT_FILE}"
if [ "${TOTAL_ERRORS}" -gt "0" ]; then
echo "" >> "${REPORT_FILE}"
echo "**Recent Errors:**" >> "${REPORT_FILE}"
echo "\`\`\`" >> "${REPORT_FILE}"
grep "\[ERROR\]" "${LATEST_LOG}" 2>/dev/null | tail -10 >> "${REPORT_FILE}"
echo "\`\`\`" >> "${REPORT_FILE}"
fi
cat >> "${REPORT_FILE}" << EOF
---
## Top Opportunities
EOF
# Extract top opportunities by profit
echo "### Most Profitable Opportunities (Top 10)" >> "${REPORT_FILE}"
echo "" >> "${REPORT_FILE}"
grep "ARBITRAGE OPPORTUNITY" "${LATEST_LOG}" 2>/dev/null | \
grep -o 'netProfitETH:[^ ]*' | \
sort -t: -k2 -rn | \
head -10 | \
nl | \
sed 's/^/- /' >> "${REPORT_FILE}" || echo "- No opportunities found" >> "${REPORT_FILE}"
cat >> "${REPORT_FILE}" << EOF
---
## Protocol Distribution
EOF
# Protocol breakdown
echo "### Transactions by Protocol" >> "${REPORT_FILE}"
echo "" >> "${REPORT_FILE}"
grep "protocol:" "${LATEST_LOG}" 2>/dev/null | \
grep -o 'protocol:[A-Za-z0-9_]*' | \
sort | uniq -c | sort -rn | \
awk '{printf "- **%s:** %d transactions\n", $2, $1}' >> "${REPORT_FILE}" || \
echo "- No protocol data available" >> "${REPORT_FILE}"
cat >> "${REPORT_FILE}" << EOF
---
## System Stability
### Uptime
EOF
# Check if still running
PID_FILE="${LOG_DIR}/mev-bot.pid"
if [ -f "${PID_FILE}" ]; then
PID=$(cat "${PID_FILE}")
if ps -p "${PID}" > /dev/null 2>&1; then
UPTIME=$(ps -o etime= -p "${PID}" | tr -d ' ')
echo "- **Status:** ✅ Running" >> "${REPORT_FILE}"
echo "- **Uptime:** ${UPTIME}" >> "${REPORT_FILE}"
else
echo "- **Status:** ❌ Not Running" >> "${REPORT_FILE}"
fi
else
echo "- **Status:** ⚠️ Unknown (PID file not found)" >> "${REPORT_FILE}"
fi
### Crashes
CRASHES=$(grep -c "panic\|fatal" "${LATEST_LOG}" 2>/dev/null || echo "0")
echo "- **Crashes:** ${CRASHES}" >> "${REPORT_FILE}"
cat >> "${REPORT_FILE}" << EOF
---
## Profit Calculation Validation
### Calculation Accuracy
EOF
# Check for overflows
OVERFLOWS=$(grep "ROI:" "${LATEST_LOG}" 2>/dev/null | \
awk -F'ROI:' '{print $2}' | \
awk '{if ($1 > 1000000) print $0}' | \
wc -l)
echo "- **Overflow Errors:** ${OVERFLOWS}" >> "${REPORT_FILE}"
if [ "${OVERFLOWS}" -eq "0" ]; then
echo "- **Status:** ✅ No calculation overflows detected" >> "${REPORT_FILE}"
else
echo "- **Status:** ⚠️ Calculation issues detected" >> "${REPORT_FILE}"
fi
cat >> "${REPORT_FILE}" << EOF
### Gas Cost Calculations
EOF
# Sample gas costs
echo "\`\`\`" >> "${REPORT_FILE}"
grep "gasCostETH:" "${LATEST_LOG}" 2>/dev/null | head -5 >> "${REPORT_FILE}" || echo "No gas cost data" >> "${REPORT_FILE}"
echo "\`\`\`" >> "${REPORT_FILE}"
cat >> "${REPORT_FILE}" << EOF
---
## Recommendations
EOF
# Generate recommendations
if [ "${PROFITABLE}" -gt "0" ]; then
echo "✅ **PROFIT READY** - Detected ${PROFITABLE} profitable opportunities" >> "${REPORT_FILE}"
echo "" >> "${REPORT_FILE}"
echo "**Next Steps:**" >> "${REPORT_FILE}"
echo "1. Review profitable opportunities for execution" >> "${REPORT_FILE}"
echo "2. Implement execution path with flash loans" >> "${REPORT_FILE}"
echo "3. Test execution on fork/testnet" >> "${REPORT_FILE}"
elif [ "${TOTAL_OPPS}" -gt "0" ]; then
echo "⏳ **DETECTION WORKING** - Found ${TOTAL_OPPS} opportunities, all rejected as unprofitable" >> "${REPORT_FILE}"
echo "" >> "${REPORT_FILE}"
echo "**Next Steps:**" >> "${REPORT_FILE}"
echo "1. Continue monitoring during high volatility periods" >> "${REPORT_FILE}"
echo "2. Consider lowering profit thresholds (currently rejecting small profits)" >> "${REPORT_FILE}"
echo "3. Verify gas cost calculations are accurate" >> "${REPORT_FILE}"
else
echo "⚠️ **NO OPPORTUNITIES** - No arbitrage opportunities detected" >> "${REPORT_FILE}"
echo "" >> "${REPORT_FILE}"
echo "**Possible Reasons:**" >> "${REPORT_FILE}"
echo "1. Low market volatility during test period" >> "${REPORT_FILE}"
echo "2. Efficient markets (arbitrage opportunities filled quickly)" >> "${REPORT_FILE}"
echo "3. Detection parameters need tuning" >> "${REPORT_FILE}"
fi
if [ "${TOTAL_ERRORS}" -gt "50" ]; then
echo "" >> "${REPORT_FILE}"
echo "⚠️ **HIGH ERROR RATE** - ${TOTAL_ERRORS} errors logged" >> "${REPORT_FILE}"
echo "Review error logs and fix issues before production deployment" >> "${REPORT_FILE}"
fi
if [ -z "${CACHE_LOGS}" ]; then
echo "" >> "${REPORT_FILE}"
echo "📊 **CACHE NOT VALIDATED** - Multihop scanner not triggered during test" >> "${REPORT_FILE}"
echo "Cache performance metrics unavailable - consider extending test duration" >> "${REPORT_FILE}"
fi
cat >> "${REPORT_FILE}" << EOF
---
## Raw Data
**Log File:** \`${LATEST_LOG}\`
**Report Generated:** $(date)
---
*This report was automatically generated by the MEV bot test harness*
EOF
echo "✅ Report generated: ${REPORT_FILE}"
echo ""
echo "📊 Summary:"
echo " Blocks: ${TOTAL_BLOCKS}"
echo " DEX Txs: ${TOTAL_DEX}"
echo " Opportunities: ${TOTAL_OPPS} (${PROFITABLE} profitable)"
echo " Errors: ${TOTAL_ERRORS}"
echo ""
cat "${REPORT_FILE}"

View File

@@ -1,716 +0,0 @@
#!/usr/bin/env bash
# Enhanced Git Workflow for Local Self-Contained Development
# Provides full branching, forking, PR simulation, and merge workflows
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
cd "$PROJECT_ROOT"
# Configuration
DEFAULT_MAIN_BRANCH="master"
DEFAULT_DEV_BRANCH="develop"
LOCAL_FORK_PREFIX="fork"
BACKUP_DIR="$PROJECT_ROOT/.git-backups"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
log() {
echo -e "${GREEN}[GIT-WORKFLOW]${NC} $*"
}
warn() {
echo -e "${YELLOW}[WARNING]${NC} $*"
}
error() {
echo -e "${RED}[ERROR]${NC} $*"
}
info() {
echo -e "${BLUE}[INFO]${NC} $*"
}
usage() {
cat << EOF
Enhanced Git Workflow for MEV Bot - Self-Contained Development Stack
USAGE: $0 <command> [args]
BRANCH OPERATIONS:
feature <name> - Create feature branch from develop
fix <name> - Create hotfix branch from master
release <version> - Create release branch
branch-list - List all branches with status
branch-clean - Clean merged branches
FORK & PR SIMULATION:
fork-create <name> - Create local fork simulation
fork-list - List all local forks
pr-create <base> - Create PR simulation (local merge preparation)
pr-review - Review changes for PR
pr-merge <branch> - Merge PR with validation
MERGE & REBASE:
merge <branch> - Smart merge with CI validation
rebase <base> - Interactive rebase with conflict resolution
sync - Sync current branch with upstream
CI INTEGRATION:
ci-branch - Run CI on current branch
ci-pr <branch> - Run CI for PR validation
pre-commit - Manual pre-commit validation
pre-push - Manual pre-push validation
BACKUP & RESTORE:
backup - Create full git backup
restore <backup> - Restore from backup
checkpoint <name> - Create named checkpoint
ADVANCED WORKFLOWS:
flow-init - Initialize gitflow-style workflows
hotfix <name> - Complete hotfix workflow
release-finish <version> - Complete release workflow
conflict-resolve - Interactive conflict resolution
EXAMPLES:
$0 feature add-math-optimizations
$0 fork-create experimental-trading
$0 pr-create develop
$0 merge feature/new-arbitrage-engine
$0 ci-pr feature/fix-parser-bug
EOF
}
# Check if we're in a git repository
check_git_repo() {
if ! git rev-parse --git-dir >/dev/null 2>&1; then
error "Not in a git repository"
exit 1
fi
}
# Get current branch
get_current_branch() {
git rev-parse --abbrev-ref HEAD
}
# Check if branch exists
branch_exists() {
git show-ref --verify --quiet "refs/heads/$1"
}
# Run CI pipeline for branch validation
run_ci_for_branch() {
local branch="$1"
log "Running CI validation for branch: $branch"
# Switch to branch temporarily if not current
local current_branch
current_branch=$(get_current_branch)
if [[ "$current_branch" != "$branch" ]]; then
git checkout "$branch"
fi
# Run appropriate CI level based on branch type
if [[ "$branch" =~ ^(feature|fix)/ ]]; then
log "Running development CI for $branch"
make ci-dev
elif [[ "$branch" =~ ^release/ ]] || [[ "$branch" == "$DEFAULT_MAIN_BRANCH" ]]; then
log "Running full CI for $branch"
make ci-full
else
log "Running quick CI for $branch"
make ci-quick
fi
# Switch back if we changed branches
if [[ "$current_branch" != "$branch" ]]; then
git checkout "$current_branch"
fi
}
# Create feature branch
create_feature_branch() {
local feature_name="$1"
if [[ -z "$feature_name" ]]; then
error "Feature name required"
echo "Usage: $0 feature <name>"
exit 1
fi
local branch_name="feature/$feature_name"
if branch_exists "$branch_name"; then
error "Branch $branch_name already exists"
exit 1
fi
log "Creating feature branch: $branch_name"
# Ensure we're on develop and it's up to date
if branch_exists "$DEFAULT_DEV_BRANCH"; then
git checkout "$DEFAULT_DEV_BRANCH"
git pull origin "$DEFAULT_DEV_BRANCH" 2>/dev/null || true
else
warn "Develop branch doesn't exist, creating from master"
git checkout "$DEFAULT_MAIN_BRANCH"
git checkout -b "$DEFAULT_DEV_BRANCH"
fi
# Create and switch to feature branch
git checkout -b "$branch_name"
log "✅ Feature branch '$branch_name' created and active"
log "💡 Run tests with: make ci-dev"
}
# Create hotfix branch
create_fix_branch() {
local fix_name="$1"
if [[ -z "$fix_name" ]]; then
error "Fix name required"
echo "Usage: $0 fix <name>"
exit 1
fi
local branch_name="fix/$fix_name"
if branch_exists "$branch_name"; then
error "Branch $branch_name already exists"
exit 1
fi
log "Creating hotfix branch: $branch_name"
# Create from master
git checkout "$DEFAULT_MAIN_BRANCH"
git pull origin "$DEFAULT_MAIN_BRANCH" 2>/dev/null || true
git checkout -b "$branch_name"
log "✅ Hotfix branch '$branch_name' created and active"
log "💡 Run tests with: make ci-full"
}
# Create local fork simulation
create_fork() {
local fork_name="$1"
if [[ -z "$fork_name" ]]; then
error "Fork name required"
echo "Usage: $0 fork-create <name>"
exit 1
fi
local fork_branch="${LOCAL_FORK_PREFIX}/$fork_name"
if branch_exists "$fork_branch"; then
error "Fork $fork_branch already exists"
exit 1
fi
log "Creating local fork: $fork_branch"
# Create orphan branch for fork simulation
git checkout --orphan "$fork_branch"
git reset --hard "$DEFAULT_MAIN_BRANCH"
# Create initial commit for fork
git commit --allow-empty -m "chore: initialize fork '$fork_name'"
log "✅ Local fork '$fork_name' created"
log "💡 This simulates a separate fork for experimental development"
}
# List all branches with status
list_branches() {
log "Repository branch overview:"
echo ""
echo "📋 Local Branches:"
git branch -v
echo ""
echo "🔀 Remote Branches:"
git branch -rv 2>/dev/null || echo "No remote branches"
echo ""
echo "🍴 Local Forks:"
git branch | grep "^ $LOCAL_FORK_PREFIX/" || echo "No local forks"
echo ""
echo "📊 Branch Status:"
local current_branch
current_branch=$(get_current_branch)
echo "Current: $current_branch"
# Check if current branch has uncommitted changes
if ! git diff --quiet || ! git diff --cached --quiet; then
warn "Current branch has uncommitted changes"
fi
# Check if current branch is ahead/behind
local upstream
upstream=$(git rev-parse --abbrev-ref @{u} 2>/dev/null || echo "")
if [[ -n "$upstream" ]]; then
local ahead behind
ahead=$(git rev-list --count "$upstream..HEAD" 2>/dev/null || echo "0")
behind=$(git rev-list --count "HEAD..$upstream" 2>/dev/null || echo "0")
if [[ "$ahead" -gt 0 ]]; then
info "Current branch is $ahead commits ahead of $upstream"
fi
if [[ "$behind" -gt 0 ]]; then
warn "Current branch is $behind commits behind $upstream"
fi
fi
}
# Create PR simulation (prepare for merge)
create_pr() {
local target_branch="$1"
local current_branch
current_branch=$(get_current_branch)
if [[ -z "$target_branch" ]]; then
error "Target branch required"
echo "Usage: $0 pr-create <target-branch>"
exit 1
fi
if ! branch_exists "$target_branch"; then
error "Target branch '$target_branch' does not exist"
exit 1
fi
log "Creating PR simulation: $current_branch$target_branch"
# Run CI validation for PR
run_ci_for_branch "$current_branch"
# Show diff summary
echo ""
log "PR Summary:"
echo "From: $current_branch"
echo "To: $target_branch"
echo ""
echo "📝 Changed files:"
git diff --name-status "$target_branch..$current_branch"
echo ""
echo "📊 Commit summary:"
git log --oneline "$target_branch..$current_branch"
echo ""
echo "🔍 Conflict check:"
if git merge-tree "$(git merge-base "$target_branch" "$current_branch")" "$target_branch" "$current_branch" | grep -q "<<<<<<< "; then
warn "Potential merge conflicts detected"
echo "Run: $0 conflict-resolve"
else
log "No merge conflicts detected"
fi
echo ""
log "✅ PR simulation ready"
log "Next steps:"
log " - Review changes: $0 pr-review"
log " - Merge PR: $0 pr-merge $target_branch"
}
# Review PR changes
review_pr() {
local current_branch
current_branch=$(get_current_branch)
log "Reviewing changes for: $current_branch"
# Show detailed diff
echo ""
echo "📝 Detailed changes:"
git diff develop.."$current_branch" --stat
echo ""
read -p "View full diff? (y/N): " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
git diff develop.."$current_branch"
fi
echo ""
log "Code quality checks:"
# Run linting
if make ci-quick; then
log "✅ Code quality checks passed"
else
error "❌ Code quality checks failed"
echo "Fix issues before merging"
exit 1
fi
}
# Merge PR with validation
merge_pr() {
local target_branch="$1"
local current_branch
current_branch=$(get_current_branch)
if [[ -z "$target_branch" ]]; then
error "Target branch required"
echo "Usage: $0 pr-merge <target-branch>"
exit 1
fi
log "Merging PR: $current_branch$target_branch"
# Final CI validation
log "Running final CI validation..."
run_ci_for_branch "$current_branch"
# Create backup before merge
local backup_name="before-merge-$(date +%Y%m%d-%H%M%S)"
create_backup "$backup_name"
# Switch to target branch and merge
git checkout "$target_branch"
# Update target branch
git pull origin "$target_branch" 2>/dev/null || true
# Perform merge
if git merge --no-ff "$current_branch" -m "Merge branch '$current_branch' into $target_branch
🤖 Generated with [Claude Code](https://claude.com/claude-code)
Co-Authored-By: Claude <noreply@anthropic.com>"; then
log "✅ Merge completed successfully"
# Run post-merge CI
log "Running post-merge validation..."
run_ci_for_branch "$target_branch"
# Optional: Delete merged branch
read -p "Delete merged branch '$current_branch'? (y/N): " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
git branch -d "$current_branch"
log "Deleted merged branch: $current_branch"
fi
else
error "Merge failed - conflicts need resolution"
echo "Run: $0 conflict-resolve"
exit 1
fi
}
# Smart merge with CI validation
smart_merge() {
local source_branch="$1"
local current_branch
current_branch=$(get_current_branch)
if [[ -z "$source_branch" ]]; then
error "Source branch required"
echo "Usage: $0 merge <source-branch>"
exit 1
fi
log "Smart merge: $source_branch$current_branch"
# Validate both branches
run_ci_for_branch "$current_branch"
run_ci_for_branch "$source_branch"
# Create backup
local backup_name="before-smart-merge-$(date +%Y%m%d-%H%M%S)"
create_backup "$backup_name"
# Perform merge
if git merge "$source_branch"; then
log "✅ Smart merge completed"
run_ci_for_branch "$current_branch"
else
error "Merge conflicts detected"
echo "Run: $0 conflict-resolve"
exit 1
fi
}
# Interactive conflict resolution
resolve_conflicts() {
log "Interactive conflict resolution"
# Check if we're in a merge
if [[ ! -f .git/MERGE_HEAD ]]; then
error "No merge in progress"
exit 1
fi
# Show conflicted files
echo ""
echo "📝 Conflicted files:"
git diff --name-only --diff-filter=U
echo ""
echo "🔧 Resolution options:"
echo "1) Open merge tool (if configured)"
echo "2) Show conflicts in terminal"
echo "3) Abort merge"
read -p "Choose option (1-3): " -n 1 -r
echo
case $REPLY in
1)
if git config merge.tool >/dev/null; then
git mergetool
else
warn "No merge tool configured"
echo "Configure with: git config merge.tool <tool>"
fi
;;
2)
echo ""
echo "📝 Conflicts:"
git diff --diff-filter=U
;;
3)
git merge --abort
log "Merge aborted"
return
;;
*)
error "Invalid option"
return
;;
esac
# After resolution
echo ""
read -p "Have conflicts been resolved? (y/N): " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
git add .
git commit --no-edit
log "✅ Merge completed"
# Run CI after merge
run_ci_for_branch "$(get_current_branch)"
fi
}
# Create backup
create_backup() {
local backup_name="${1:-backup-$(date +%Y%m%d-%H%M%S)}"
mkdir -p "$BACKUP_DIR"
log "Creating backup: $backup_name"
# Create git bundle
git bundle create "$BACKUP_DIR/$backup_name.bundle" --all
# Create stash backup if there are changes
if ! git diff --quiet || ! git diff --cached --quiet; then
git stash push -m "Backup stash: $backup_name"
echo "backup-stash-$backup_name" > "$BACKUP_DIR/$backup_name.stash"
fi
# Save current branch info
echo "$(get_current_branch)" > "$BACKUP_DIR/$backup_name.branch"
log "✅ Backup created: $backup_name"
log "Location: $BACKUP_DIR/"
}
# Restore from backup
restore_backup() {
local backup_name="$1"
if [[ -z "$backup_name" ]]; then
error "Backup name required"
echo "Available backups:"
ls -1 "$BACKUP_DIR"/*.bundle 2>/dev/null | sed 's/.*\///' | sed 's/\.bundle$//' || echo "No backups found"
exit 1
fi
local bundle_file="$BACKUP_DIR/$backup_name.bundle"
if [[ ! -f "$bundle_file" ]]; then
error "Backup not found: $backup_name"
exit 1
fi
warn "This will restore git state from backup: $backup_name"
read -p "Continue? (y/N): " -n 1 -r
echo
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
echo "Restore cancelled"
exit 0
fi
log "Restoring from backup: $backup_name"
# Restore from bundle
git fetch "$bundle_file"
# Restore branch if saved
if [[ -f "$BACKUP_DIR/$backup_name.branch" ]]; then
local saved_branch
saved_branch=$(cat "$BACKUP_DIR/$backup_name.branch")
if branch_exists "$saved_branch"; then
git checkout "$saved_branch"
fi
fi
log "✅ Backup restored: $backup_name"
}
# Run CI for current branch
run_ci_branch() {
local current_branch
current_branch=$(get_current_branch)
run_ci_for_branch "$current_branch"
}
# Run CI for PR validation
run_ci_pr() {
local branch="$1"
if [[ -z "$branch" ]]; then
branch=$(get_current_branch)
fi
log "Running PR validation CI for: $branch"
run_ci_for_branch "$branch"
# Additional PR-specific checks
log "Running PR-specific validations..."
# Check commit messages
log "Checking commit message format..."
git log --oneline "develop..$branch" | while read -r line; do
if [[ ! "$line" =~ ^[a-f0-9]+\ (feat|fix|chore|docs|style|refactor|perf|test)(\(.+\))?:\ .+ ]]; then
warn "Commit message format: $line"
warn "Should follow: type(scope): description"
fi
done
log "✅ PR validation completed"
}
# Clean merged branches
clean_branches() {
log "Cleaning merged branches..."
# Get merged branches (excluding master, develop, and current)
local current_branch
current_branch=$(get_current_branch)
local merged_branches
merged_branches=$(git branch --merged "$DEFAULT_MAIN_BRANCH" | grep -v "$DEFAULT_MAIN_BRANCH" | grep -v "$DEFAULT_DEV_BRANCH" | grep -v "^* $current_branch" | grep -v "^ $current_branch" | sed 's/^[ *]*//')
if [[ -z "$merged_branches" ]]; then
log "No merged branches to clean"
return
fi
echo "📝 Merged branches to delete:"
echo "$merged_branches"
echo ""
read -p "Delete these branches? (y/N): " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
echo "$merged_branches" | while read -r branch; do
if [[ -n "$branch" ]]; then
git branch -d "$branch"
log "Deleted: $branch"
fi
done
fi
}
# Main command dispatcher
main() {
check_git_repo
if [[ $# -eq 0 ]]; then
usage
exit 0
fi
local command="$1"
shift
case "$command" in
feature)
create_feature_branch "$@"
;;
fix)
create_fix_branch "$@"
;;
fork-create)
create_fork "$@"
;;
fork-list)
git branch | grep "^ $LOCAL_FORK_PREFIX/" || echo "No local forks"
;;
pr-create)
create_pr "$@"
;;
pr-review)
review_pr
;;
pr-merge)
merge_pr "$@"
;;
merge)
smart_merge "$@"
;;
conflict-resolve)
resolve_conflicts
;;
branch-list)
list_branches
;;
branch-clean)
clean_branches
;;
backup)
create_backup "$@"
;;
restore)
restore_backup "$@"
;;
ci-branch)
run_ci_branch
;;
ci-pr)
run_ci_pr "$@"
;;
pre-commit)
make ci-precommit
;;
pre-push)
make ci-dev
;;
*)
error "Unknown command: $command"
usage
exit 1
;;
esac
}
# Run main function
main "$@"

View File

@@ -1,293 +0,0 @@
#!/usr/bin/env bash
# Git Hooks Setup for MEV Bot CI/CD Integration
# Creates git hooks that integrate with the CI/CD pipeline
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
HOOKS_DIR="$PROJECT_ROOT/.git/hooks"
cd "$PROJECT_ROOT"
log() {
echo "[HOOKS-SETUP] $*"
}
error() {
echo "[ERROR] $*" >&2
}
# Check if we're in a git repository
if ! git rev-parse --git-dir >/dev/null 2>&1; then
error "Not in a git repository"
exit 1
fi
log "Setting up Git hooks integration with CI/CD pipeline..."
# Create hooks directory if it doesn't exist
mkdir -p "$HOOKS_DIR"
# Pre-commit hook
cat > "$HOOKS_DIR/pre-commit" << 'EOF'
#!/usr/bin/env bash
# Pre-commit hook - Fast validation before commit
set -euo pipefail
echo "🔍 Running pre-commit validation..."
# Check if we have staged files
if git diff --cached --quiet; then
echo "No staged changes to validate"
exit 0
fi
# Run fast CI validation
if command -v make >/dev/null 2>&1; then
echo "Running pre-commit CI pipeline..."
make ci-precommit
else
echo "Running basic checks..."
# Basic Go checks
if command -v go >/dev/null 2>&1; then
echo "Building..."
go build ./cmd/mev-bot
echo "Running tests..."
go test ./pkg/... -short
echo "Checking format..."
if ! gofmt -l . | grep -q .; then
echo "✅ Code formatting is clean"
else
echo "❌ Code needs formatting:"
gofmt -l .
echo "Run: gofmt -w ."
exit 1
fi
echo "Running vet..."
go vet ./...
fi
fi
echo "✅ Pre-commit validation passed"
EOF
# Pre-push hook
cat > "$HOOKS_DIR/pre-push" << 'EOF'
#!/usr/bin/env bash
# Pre-push hook - Comprehensive validation before push
set -euo pipefail
echo "🚀 Running pre-push validation..."
# Get the branch being pushed
branch=$(git rev-parse --abbrev-ref HEAD)
echo "Validating branch: $branch"
# Run appropriate CI based on branch type
if command -v make >/dev/null 2>&1; then
if [[ "$branch" =~ ^(feature|fix)/ ]]; then
echo "Running development CI for feature/fix branch..."
make ci-dev
elif [[ "$branch" =~ ^release/ ]] || [[ "$branch" == "master" ]] || [[ "$branch" == "main" ]]; then
echo "Running full CI for release/main branch..."
make ci-full
else
echo "Running quick CI for other branches..."
make ci-quick
fi
else
echo "Running basic validation..."
if command -v go >/dev/null 2>&1; then
echo "Building..."
go build ./cmd/mev-bot
echo "Running full test suite..."
go test ./...
echo "Running static analysis..."
go vet ./...
fi
fi
echo "✅ Pre-push validation passed"
EOF
# Post-commit hook
cat > "$HOOKS_DIR/post-commit" << 'EOF'
#!/usr/bin/env bash
# Post-commit hook - Optional post-commit actions
set -euo pipefail
# Get commit info
commit_hash=$(git rev-parse HEAD)
commit_msg=$(git log -1 --pretty=%B)
branch=$(git rev-parse --abbrev-ref HEAD)
echo "📝 Post-commit: $commit_hash on $branch"
# Optional: Run quick smoke test after commit
if [[ "$commit_msg" =~ ^(feat|fix|perf): ]]; then
echo "Running smoke test for significant changes..."
if command -v make >/dev/null 2>&1; then
timeout 30 make ci-precommit || echo "Smoke test completed"
fi
fi
EOF
# Prepare-commit-msg hook
cat > "$HOOKS_DIR/prepare-commit-msg" << 'EOF'
#!/usr/bin/env bash
# Prepare commit message hook - Add conventional commit format help
set -euo pipefail
commit_file="$1"
commit_source="${2:-}"
# Only add template for regular commits (not merges, amendments, etc.)
if [[ "$commit_source" == "" ]] || [[ "$commit_source" == "template" ]]; then
# Get the branch name
branch=$(git rev-parse --abbrev-ref HEAD 2>/dev/null || echo "main")
# Add conventional commit template if message is empty
if [[ ! -s "$commit_file" ]]; then
cat >> "$commit_file" << 'TEMPLATE'
# Conventional Commits Format:
# type(scope): description
#
# Types: feat, fix, docs, style, refactor, perf, test, chore
# Example: feat(parser): add support for multicall transactions
#
# Body (optional):
# - Explain what and why vs. how
# - Include motivation for the change
# - Contrast with previous behavior
#
# Footer (optional):
# - Breaking changes: BREAKING CHANGE: <description>
# - Issues: Closes #123, Fixes #456
TEMPLATE
fi
fi
EOF
# Post-merge hook
cat > "$HOOKS_DIR/post-merge" << 'EOF'
#!/usr/bin/env bash
# Post-merge hook - Actions after merge
set -euo pipefail
echo "🔀 Post-merge validation..."
# Run CI after merge to ensure integration is clean
if command -v make >/dev/null 2>&1; then
echo "Running post-merge CI validation..."
make ci-dev
else
echo "Running basic post-merge checks..."
if command -v go >/dev/null 2>&1; then
go build ./cmd/mev-bot
go test ./pkg/... -short
fi
fi
# Check if dependencies changed
if git diff HEAD@{1} --name-only | grep -q "go.mod\|go.sum"; then
echo "📦 Dependencies changed, updating..."
go mod tidy
go mod verify
fi
echo "✅ Post-merge validation completed"
EOF
# Pre-rebase hook
cat > "$HOOKS_DIR/pre-rebase" << 'EOF'
#!/usr/bin/env bash
# Pre-rebase hook - Validation before rebase
set -euo pipefail
upstream="$1"
branch="${2:-}"
echo "🔄 Pre-rebase validation..."
echo "Rebasing: ${branch:-$(git rev-parse --abbrev-ref HEAD)} onto $upstream"
# Warn about rebasing public branches
current_branch=${branch:-$(git rev-parse --abbrev-ref HEAD)}
if [[ "$current_branch" == "master" ]] || [[ "$current_branch" == "main" ]] || [[ "$current_branch" == "develop" ]]; then
echo "⚠️ WARNING: Rebasing public branch '$current_branch'"
echo "This may rewrite history. Continue? (y/N)"
read -r response
if [[ ! "$response" =~ ^[Yy]$ ]]; then
echo "Rebase cancelled"
exit 1
fi
fi
# Run quick validation
if command -v make >/dev/null 2>&1; then
make ci-precommit
fi
echo "✅ Pre-rebase validation passed"
EOF
# Make all hooks executable
chmod +x "$HOOKS_DIR"/*
log "✅ Git hooks installed:"
log " - pre-commit: Fast validation (build, test, format)"
log " - pre-push: Comprehensive CI validation"
log " - post-commit: Optional smoke tests"
log " - prepare-commit-msg: Conventional commit template"
log " - post-merge: Integration validation"
log " - pre-rebase: Safety checks for public branches"
echo ""
log "🎯 Hook Integration Features:"
log " - Automatic CI pipeline integration"
log " - Branch-specific validation levels"
log " - Conventional commit message formatting"
log " - Dependency change detection"
log " - Safety checks for public branch operations"
echo ""
log "💡 To disable hooks temporarily:"
log " git commit --no-verify"
log " git push --no-verify"
echo ""
log "🔧 To customize hooks, edit files in: .git/hooks/"
# Test the hooks
echo ""
log "Testing hook installation..."
if [[ -x "$HOOKS_DIR/pre-commit" ]]; then
log "✅ Pre-commit hook installed and executable"
else
error "❌ Pre-commit hook installation failed"
fi
if [[ -x "$HOOKS_DIR/pre-push" ]]; then
log "✅ Pre-push hook installed and executable"
else
error "❌ Pre-push hook installation failed"
fi
log "🎉 Git hooks setup completed successfully!"
log "Next commit will use the new validation pipeline."

View File

@@ -1,96 +0,0 @@
#!/bin/bash
# Git post-merge hook
# This hook runs after 'git pull' or 'git merge'
# Automatically rebuilds and restarts the MEV bot
set -e
HOOK_DIR=$(dirname "$0")
PROJECT_DIR=$(cd "$HOOK_DIR/../.." && pwd)
echo "========================================="
echo "Post-Merge Hook: Auto-Rebuild & Restart"
echo "========================================="
echo "Project: $PROJECT_DIR"
echo ""
# Color codes
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
RED='\033[0;31m'
NC='\033[0m' # No Color
# Change to project directory
cd "$PROJECT_DIR"
# Check if Docker Compose is available
if ! command -v docker &> /dev/null; then
echo -e "${RED}Error: Docker not found${NC}"
exit 1
fi
# Log the update
LOG_FILE="$PROJECT_DIR/logs/auto-update.log"
mkdir -p "$PROJECT_DIR/logs"
echo "[$(date)] Post-merge hook triggered" >> "$LOG_FILE"
# Get the latest commit info
LATEST_COMMIT=$(git log -1 --pretty=format:"%h - %s")
echo -e "${YELLOW}Latest commit: $LATEST_COMMIT${NC}"
echo "[$(date)] Latest commit: $LATEST_COMMIT" >> "$LOG_FILE"
# Rebuild the Docker image
echo ""
echo -e "${YELLOW}Rebuilding Docker image...${NC}"
if docker compose build --no-cache >> "$LOG_FILE" 2>&1; then
echo -e "${GREEN}✓ Docker image rebuilt${NC}"
echo "[$(date)] Docker image rebuilt successfully" >> "$LOG_FILE"
else
echo -e "${RED}✗ Docker build failed - check logs${NC}"
echo "[$(date)] ERROR: Docker build failed" >> "$LOG_FILE"
exit 1
fi
# Restart the container
echo ""
echo -e "${YELLOW}Restarting container...${NC}"
if docker compose up -d >> "$LOG_FILE" 2>&1; then
echo -e "${GREEN}✓ Container restarted${NC}"
echo "[$(date)] Container restarted successfully" >> "$LOG_FILE"
else
echo -e "${RED}✗ Container restart failed - check logs${NC}"
echo "[$(date)] ERROR: Container restart failed" >> "$LOG_FILE"
exit 1
fi
# Wait a few seconds for the container to start
sleep 5
# Check container status
echo ""
echo -e "${YELLOW}Container status:${NC}"
docker compose ps
# Show recent logs
echo ""
echo -e "${YELLOW}Recent logs:${NC}"
docker compose logs --tail=20 mev-bot
echo ""
echo -e "${GREEN}========================================="
echo "Auto-update complete!"
echo "=========================================${NC}"
echo ""
echo "View full logs: tail -f $LOG_FILE"
echo "View container logs: docker compose logs -f mev-bot"
echo ""
# Send notification if curl is available (optional)
if command -v curl &> /dev/null && [ -n "$WEBHOOK_URL" ]; then
curl -X POST "$WEBHOOK_URL" \
-H "Content-Type: application/json" \
-d "{\"text\":\"MEV Bot updated to: $LATEST_COMMIT\"}" \
>> "$LOG_FILE" 2>&1 || true
fi
exit 0

View File

@@ -1,523 +0,0 @@
#!/usr/bin/env bash
# Local Git Server Simulation for Team Workflows
# Creates a local bare repository to simulate a remote server for testing git workflows
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
LOCAL_SERVER_DIR="$PROJECT_ROOT/.git-local-server"
BARE_REPO_DIR="$LOCAL_SERVER_DIR/mev-bot.git"
# Colors
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
RED='\033[0;31m'
NC='\033[0m'
log() {
echo -e "${GREEN}[GIT-SERVER]${NC} $*"
}
warn() {
echo -e "${YELLOW}[WARNING]${NC} $*"
}
error() {
echo -e "${RED}[ERROR]${NC} $*"
}
info() {
echo -e "${BLUE}[INFO]${NC} $*"
}
usage() {
cat << EOF
Local Git Server Simulation for MEV Bot Development
USAGE: $0 <command> [args]
SERVER MANAGEMENT:
init - Initialize local git server
start - Start local git server (if using daemon)
stop - Stop local git server
status - Show server status
clean - Clean server data
REPOSITORY OPERATIONS:
clone-fresh <name> - Clone fresh copy from local server
push-all - Push all branches to local server
pull-all - Pull all branches from local server
sync-remotes - Sync with local server
TEAM SIMULATION:
simulate-dev <name> - Simulate developer workflow
simulate-pr <branch> - Simulate PR workflow
simulate-conflict - Create merge conflict scenario
simulate-team - Multi-developer simulation
EXAMPLES:
$0 init # Setup local git server
$0 clone-fresh alice # Create alice's working copy
$0 simulate-dev bob # Simulate bob's development
$0 simulate-pr feature/new-algo # Simulate PR workflow
EOF
}
# Initialize local git server
init_server() {
log "Initializing local git server..."
# Create server directory
mkdir -p "$LOCAL_SERVER_DIR"
# Create bare repository
if [[ -d "$BARE_REPO_DIR" ]]; then
warn "Local server already exists at: $BARE_REPO_DIR"
read -p "Reinitialize? (y/N): " -n 1 -r
echo
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
exit 0
fi
rm -rf "$BARE_REPO_DIR"
fi
# Initialize bare repository
git init --bare "$BARE_REPO_DIR"
# Push current repository to server
cd "$PROJECT_ROOT"
# Add local server as remote if not exists
if ! git remote get-url local-server >/dev/null 2>&1; then
git remote add local-server "$BARE_REPO_DIR"
else
git remote set-url local-server "$BARE_REPO_DIR"
fi
# Push all branches
log "Pushing current repository to local server..."
git push local-server --all 2>/dev/null || true
git push local-server --tags 2>/dev/null || true
# Create post-receive hook for CI integration
cat > "$BARE_REPO_DIR/hooks/post-receive" << 'EOF'
#!/usr/bin/env bash
# Post-receive hook - Run CI on pushes to main branches
while read oldrev newrev refname; do
branch=$(echo $refname | sed -n 's/^refs\/heads\///p')
if [[ "$branch" == "master" ]] || [[ "$branch" == "main" ]] || [[ "$branch" == "develop" ]]; then
echo "🚀 Running CI for $branch branch..."
# Create temporary checkout
temp_dir=$(mktemp -d)
git --git-dir="$PWD" --work-tree="$temp_dir" checkout -f "$branch"
# Run CI in the temporary directory
(
cd "$temp_dir"
if [[ -f "Makefile" ]] && command -v make >/dev/null 2>&1; then
echo "Running CI pipeline..."
make ci-quick || echo "CI failed for $branch"
else
echo "No CI configuration found"
fi
)
# Cleanup
rm -rf "$temp_dir"
fi
done
EOF
chmod +x "$BARE_REPO_DIR/hooks/post-receive"
log "✅ Local git server initialized at: $BARE_REPO_DIR"
log "📍 Remote added as: local-server"
log "🎯 Server URL: $BARE_REPO_DIR"
# Create server info file
cat > "$LOCAL_SERVER_DIR/server-info.txt" << EOF
MEV Bot Local Git Server
========================
Repository: $BARE_REPO_DIR
Remote name: local-server
Initialized: $(date)
Available commands:
git clone $BARE_REPO_DIR <directory>
git remote add origin $BARE_REPO_DIR
git push local-server <branch>
git pull local-server <branch>
Hooks installed:
- post-receive: CI integration for main branches
EOF
log "📋 Server info saved to: $LOCAL_SERVER_DIR/server-info.txt"
}
# Show server status
show_status() {
if [[ ! -d "$BARE_REPO_DIR" ]]; then
error "Local git server not initialized"
echo "Run: $0 init"
exit 1
fi
log "Local Git Server Status"
echo ""
echo "📍 Server Location: $BARE_REPO_DIR"
echo "📊 Repository Size: $(du -sh "$BARE_REPO_DIR" 2>/dev/null | cut -f1)"
echo ""
echo "🌿 Branches:"
git --git-dir="$BARE_REPO_DIR" branch -a || echo "No branches"
echo ""
echo "🏷️ Tags:"
git --git-dir="$BARE_REPO_DIR" tag -l | head -10 || echo "No tags"
echo ""
echo "📈 Recent Activity:"
git --git-dir="$BARE_REPO_DIR" log --oneline --all -10 2>/dev/null || echo "No commits"
echo ""
echo "🔗 Remote Configuration in Main Repo:"
cd "$PROJECT_ROOT"
if git remote get-url local-server >/dev/null 2>&1; then
echo "✅ local-server: $(git remote get-url local-server)"
else
echo "❌ local-server remote not configured"
fi
}
# Clone fresh copy from local server
clone_fresh() {
local clone_name="$1"
if [[ -z "$clone_name" ]]; then
error "Clone name required"
echo "Usage: $0 clone-fresh <name>"
exit 1
fi
if [[ ! -d "$BARE_REPO_DIR" ]]; then
error "Local git server not initialized"
echo "Run: $0 init"
exit 1
fi
local clone_dir="$LOCAL_SERVER_DIR/clones/$clone_name"
if [[ -d "$clone_dir" ]]; then
warn "Clone already exists: $clone_dir"
read -p "Remove and re-clone? (y/N): " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
rm -rf "$clone_dir"
else
exit 0
fi
fi
log "Creating fresh clone: $clone_name"
# Create clone directory
mkdir -p "$(dirname "$clone_dir")"
# Clone from local server
git clone "$BARE_REPO_DIR" "$clone_dir"
# Set up useful configuration
cd "$clone_dir"
# Configure git for this clone
git config user.name "Developer $clone_name"
git config user.email "$clone_name@mev-bot.local"
# Add convenience aliases
git config alias.st status
git config alias.co checkout
git config alias.br branch
git config alias.ci commit
git config alias.up "pull --rebase"
log "✅ Fresh clone created: $clone_dir"
log "💡 To use this clone:"
log " cd $clone_dir"
log " git checkout -b feature/your-feature"
log " # ... make changes ..."
log " git push origin feature/your-feature"
}
# Simulate developer workflow
simulate_dev() {
local dev_name="$1"
if [[ -z "$dev_name" ]]; then
error "Developer name required"
echo "Usage: $0 simulate-dev <name>"
exit 1
fi
log "Simulating developer workflow for: $dev_name"
# Create or use existing clone
local clone_dir="$LOCAL_SERVER_DIR/clones/$dev_name"
if [[ ! -d "$clone_dir" ]]; then
log "Creating clone for $dev_name..."
clone_fresh "$dev_name"
fi
cd "$clone_dir"
# Simulate development workflow
log "🔄 Pulling latest changes..."
git checkout develop 2>/dev/null || git checkout master
git pull origin "$(git branch --show-current)"
# Create feature branch
local feature_name="feature/$dev_name-$(date +%H%M%S)"
log "🌿 Creating feature branch: $feature_name"
git checkout -b "$feature_name"
# Simulate some development
echo "// Development by $dev_name at $(date)" >> "dev-notes-$dev_name.txt"
echo "Implemented new feature for MEV optimization" >> "dev-notes-$dev_name.txt"
git add "dev-notes-$dev_name.txt"
# Commit with conventional format
git commit -m "feat($dev_name): implement MEV optimization feature
- Add new optimization algorithm
- Improve performance by 15%
- Update documentation
Closes #123"
# Run CI before push
log "🧪 Running CI validation..."
if [[ -f "../../../Makefile" ]]; then
make -C "../../.." ci-quick 2>/dev/null || warn "CI validation failed"
fi
# Push feature branch
log "🚀 Pushing feature branch..."
git push origin "$feature_name"
log "✅ Developer workflow completed for $dev_name"
log "📂 Working directory: $clone_dir"
log "🌿 Feature branch: $feature_name"
log "💡 Next: Simulate PR with: $0 simulate-pr $feature_name"
}
# Simulate PR workflow
simulate_pr() {
local branch_name="$1"
if [[ -z "$branch_name" ]]; then
error "Branch name required"
echo "Usage: $0 simulate-pr <branch-name>"
exit 1
fi
log "Simulating PR workflow for branch: $branch_name"
# Find which clone has this branch
local clone_dir=""
for dir in "$LOCAL_SERVER_DIR/clones"/*; do
if [[ -d "$dir" ]]; then
cd "$dir"
if git show-ref --verify --quiet "refs/heads/$branch_name" 2>/dev/null; then
clone_dir="$dir"
break
fi
fi
done
if [[ -z "$clone_dir" ]]; then
error "Branch $branch_name not found in any clone"
exit 1
fi
cd "$clone_dir"
log "📋 PR Simulation: $branch_name → develop"
# Show PR summary
echo ""
echo "📝 PR Summary:"
git log --oneline "develop..$branch_name" 2>/dev/null || git log --oneline "master..$branch_name"
echo ""
echo "📊 Changed files:"
git diff --name-status "develop..$branch_name" 2>/dev/null || git diff --name-status "master..$branch_name"
# Run PR validation
log "🔍 Running PR validation..."
git checkout "$branch_name"
# Simulate CI for PR
if [[ -f "../../../Makefile" ]]; then
log "Running CI pipeline..."
make -C "../../.." ci-dev 2>/dev/null || warn "CI validation had issues"
fi
# Check for conflicts
log "🔀 Checking for merge conflicts..."
local target_branch="develop"
if ! git show-ref --verify --quiet "refs/heads/$target_branch" 2>/dev/null; then
target_branch="master"
fi
if git merge-tree "$(git merge-base "$target_branch" "$branch_name")" "$target_branch" "$branch_name" | grep -q "<<<<<<< "; then
warn "⚠️ Merge conflicts detected"
echo "Conflicts would need resolution before merge"
else
log "✅ No merge conflicts detected"
fi
log "📋 PR Status: Ready for review and merge"
log "💡 To merge: $0 merge-pr $branch_name $target_branch"
}
# Simulate merge conflicts
simulate_conflict() {
log "Simulating merge conflict scenario..."
# Create two conflicting branches
local dev1_clone="$LOCAL_SERVER_DIR/clones/dev1"
local dev2_clone="$LOCAL_SERVER_DIR/clones/dev2"
# Ensure clones exist
[[ ! -d "$dev1_clone" ]] && clone_fresh "dev1"
[[ ! -d "$dev2_clone" ]] && clone_fresh "dev2"
# Dev1 makes changes
cd "$dev1_clone"
git checkout develop 2>/dev/null || git checkout master
git pull origin "$(git branch --show-current)"
git checkout -b "feature/dev1-conflict"
echo "// Configuration by dev1" > shared-config.go
echo "const DefaultValue = 100" >> shared-config.go
git add shared-config.go
git commit -m "feat(config): set default value to 100"
git push origin "feature/dev1-conflict"
# Dev2 makes conflicting changes
cd "$dev2_clone"
git checkout develop 2>/dev/null || git checkout master
git pull origin "$(git branch --show-current)"
git checkout -b "feature/dev2-conflict"
echo "// Configuration by dev2" > shared-config.go
echo "const DefaultValue = 200" >> shared-config.go
git add shared-config.go
git commit -m "feat(config): set default value to 200"
git push origin "feature/dev2-conflict"
log "✅ Conflict scenario created"
log "🔀 Two branches with conflicting changes:"
log " - feature/dev1-conflict (sets value to 100)"
log " - feature/dev2-conflict (sets value to 200)"
log "💡 Try merging one into develop to see conflict resolution"
}
# Push all branches to local server
push_all() {
cd "$PROJECT_ROOT"
if ! git remote get-url local-server >/dev/null 2>&1; then
error "local-server remote not configured"
echo "Run: $0 init"
exit 1
fi
log "Pushing all branches to local server..."
git push local-server --all
git push local-server --tags
log "✅ All branches and tags pushed to local server"
}
# Clean server data
clean_server() {
warn "This will remove all local server data"
read -p "Continue? (y/N): " -n 1 -r
echo
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
echo "Cancelled"
exit 0
fi
log "Cleaning local git server data..."
# Remove server directory
if [[ -d "$LOCAL_SERVER_DIR" ]]; then
rm -rf "$LOCAL_SERVER_DIR"
log "✅ Server data removed"
fi
# Remove local-server remote from main repo
cd "$PROJECT_ROOT"
if git remote get-url local-server >/dev/null 2>&1; then
git remote remove local-server
log "✅ local-server remote removed"
fi
log "🧹 Cleanup completed"
}
# Main command dispatcher
main() {
if [[ $# -eq 0 ]]; then
usage
exit 0
fi
local command="$1"
shift
case "$command" in
init)
init_server
;;
status)
show_status
;;
clone-fresh)
clone_fresh "$@"
;;
simulate-dev)
simulate_dev "$@"
;;
simulate-pr)
simulate_pr "$@"
;;
simulate-conflict)
simulate_conflict
;;
push-all)
push_all
;;
clean)
clean_server
;;
*)
error "Unknown command: $command"
usage
exit 1
;;
esac
}
# Run main function
main "$@"

View File

@@ -1,269 +0,0 @@
#!/bin/bash
# git-workflow.sh - Helper script for Git workflow in MEV Bot project
set -e # Exit on any error
echo "MEV Bot Git Workflow Helper"
echo "=========================="
# Function to display usage
usage() {
echo "Usage: $0 [command]"
echo ""
echo "Commands:"
echo " status - Show git status"
echo " commit - Commit changes with conventional commits"
echo " push - Push changes with pre-push checks"
echo " feature NAME - Create new feature branch"
echo " fix NAME - Create new fix branch"
echo " sync - Sync current branch with develop"
echo " pr - Prepare for pull request"
echo ""
echo "Examples:"
echo " $0 commit"
echo " $0 feature add-market-scanner"
echo " $0 fix resolve-memory-leak"
}
# Function to check if we're in the right directory
check_directory() {
if [ ! -f "go.mod" ]; then
echo "Error: This script must be run from the project root directory"
exit 1
fi
}
# Function to show git status
show_status() {
echo "Git Status:"
echo "==========="
git status
}
# Function to commit changes
commit_changes() {
echo "Committing changes..."
echo "Available file types to add:"
# Show what files are modified
if [ -n "$(git diff --name-only)" ]; then
echo "Modified files:"
git diff --name-only
fi
if [ -n "$(git diff --name-only --cached)" ]; then
echo "Staged files:"
git diff --name-only --cached
fi
# Ask user what to do
echo ""
echo "Options:"
echo "1) Add all changes and commit"
echo "2) Add specific files and commit"
echo "3) Commit already staged changes"
echo "4) Cancel"
read -p "Choose option (1-4): " choice
case $choice in
1)
git add .
;;
2)
echo "Enter files to add (space separated):"
read files
git add $files
;;
3)
# Use already staged files
;;
4)
echo "Commit cancelled"
return
;;
*)
echo "Invalid option"
return
;;
esac
# Run pre-commit hook manually to show what it does
echo "Running pre-commit checks..."
.git/hooks/pre-commit
# Get commit message
echo ""
echo "Enter commit message (follow conventional commits format):"
echo "Examples:"
echo " feat(market): add new arbitrage detection algorithm"
echo " fix(parser): resolve race condition in transaction parsing"
echo " perf(pricing): optimize Uniswap V3 calculations"
read -p "Commit message: " commit_msg
if [ -n "$commit_msg" ]; then
git commit -m "$commit_msg"
echo "Changes committed successfully!"
else
echo "No commit message provided. Commit cancelled."
fi
}
# Function to push changes
push_changes() {
echo "Pushing changes..."
# Run pre-push hook manually to show what it does
echo "Running pre-push checks..."
.git/hooks/pre-push
# Get current branch
branch=$(git rev-parse --abbrev-ref HEAD)
# Push to origin
git push -u origin $branch
echo "Changes pushed successfully!"
}
# Function to create feature branch
create_feature_branch() {
local feature_name=$1
if [ -z "$feature_name" ]; then
echo "Error: Feature name required"
echo "Usage: $0 feature <feature-name>"
return 1
fi
echo "Creating feature branch: feature/$feature_name"
# Switch to develop and pull latest
git checkout develop
git pull origin develop
# Create new feature branch
git checkout -b feature/$feature_name
echo "Feature branch 'feature/$feature_name' created and switched to."
}
# Function to create fix branch
create_fix_branch() {
local fix_name=$1
if [ -z "$fix_name" ]; then
echo "Error: Fix name required"
echo "Usage: $0 fix <fix-name>"
return 1
fi
echo "Creating fix branch: fix/$fix_name"
# Switch to develop and pull latest
git checkout develop
git pull origin develop
# Create new fix branch
git checkout -b fix/$fix_name
echo "Fix branch 'fix/$fix_name' created and switched to."
}
# Function to sync with develop
sync_with_develop() {
echo "Syncing current branch with develop..."
# Get current branch
current_branch=$(git rev-parse --abbrev-ref HEAD)
# Stash any changes
if [ -n "$(git status --porcelain)" ]; then
echo "Stashing changes..."
git stash
stashed=true
fi
# Switch to develop and pull
git checkout develop
git pull origin develop
# Switch back to original branch and rebase
git checkout $current_branch
git rebase develop
# Pop stash if we had one
if [ "$stashed" = true ]; then
echo "Restoring stashed changes..."
git stash pop
fi
echo "Sync completed!"
}
# Function to prepare for PR
prepare_pr() {
echo "Preparing for Pull Request..."
# Run tests
echo "Running tests..."
go test ./...
# Run linter if available
if command -v golangci-lint >/dev/null 2>&1; then
echo "Running linter..."
golangci-lint run
else
echo "golangci-lint not found. Skipping linting."
fi
# Check git status
echo ""
echo "Git status:"
git status
echo ""
echo "Ready for Pull Request!"
echo "Next steps:"
echo "1. Push your branch: git push -u origin \$(git rev-parse --abbrev-ref HEAD)"
echo "2. Create PR on GitHub/GitLab"
echo "3. Request review from team members"
}
# Main script logic
check_directory
if [ $# -eq 0 ]; then
usage
exit 0
fi
COMMAND=$1
shift
case $COMMAND in
status)
show_status
;;
commit)
commit_changes
;;
push)
push_changes
;;
feature)
create_feature_branch $1
;;
fix)
create_fix_branch $1
;;
sync)
sync_with_develop
;;
pr)
prepare_pr
;;
*)
echo "Unknown command: $COMMAND"
usage
exit 1
;;
esac

View File

@@ -1,207 +0,0 @@
package main
import (
"context"
"fmt"
"log"
"time"
"github.com/ethereum/go-ethereum"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/ethclient"
)
func main() {
// Pool to identify
poolAddr := common.HexToAddress("0xC6962004f452bE9203591991D15f6b388e09E8D0")
fmt.Println("========================================")
fmt.Printf("Identifying Pool: %s\n", poolAddr.Hex())
fmt.Println("========================================")
fmt.Println()
// Connect to Arbitrum
client, err := ethclient.Dial("https://arb1.arbitrum.io/rpc")
if err != nil {
log.Fatal("Failed to connect:", err)
}
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
// First check if contract exists
code, err := client.CodeAt(ctx, poolAddr, nil)
if err != nil {
log.Fatal("Failed to get contract code:", err)
}
if len(code) == 0 {
fmt.Println("❌ NO CONTRACT AT THIS ADDRESS")
return
}
fmt.Printf("✅ Contract exists (%d bytes)\n", len(code))
fmt.Println()
// Method selectors for detection
selectors := map[string][]byte{
"token0": {0x0d, 0xfe, 0x16, 0x81}, // Common to many DEXs
"token1": {0xd2, 0x12, 0x20, 0xa7}, // Correct selector for token1()
"fee": {0xdd, 0xca, 0x3f, 0x43}, // UniswapV3
"slot0": {0x38, 0x50, 0xc7, 0xbd}, // UniswapV3
"globalState": {0x13, 0xaf, 0x40, 0x35}, // Algebra
"getReserves": {0x09, 0x02, 0xf1, 0xac}, // UniswapV2
"liquidity": {0x1a, 0x68, 0x6d, 0x0f}, // UniswapV3
"factory": {0xc4, 0x5a, 0x01, 0x55}, // Common
"tickSpacing": {0xd0, 0xc9, 0x38, 0x91}, // UniswapV3
"maxLiquidityPerTick": {0x70, 0xcf, 0x75, 0x4a}, // UniswapV3
"sqrtPriceX96": {0x88, 0x5a, 0xdb, 0x02}, // Some V3 variants
"observations": {0x25, 0x2c, 0x09, 0xd7}, // UniswapV3
"feeGrowthGlobal0X128": {0xf3, 0x05, 0x83, 0x99}, // UniswapV3
"feeGrowthGlobal1X128": {0x46, 0x14, 0x16, 0x27}, // UniswapV3
}
fmt.Println("Testing Method Signatures:")
fmt.Println("--------------------------")
results := make(map[string]bool)
tokenAddresses := make(map[string]common.Address)
for name, selector := range selectors {
result, err := client.CallContract(ctx, ethereum.CallMsg{
To: &poolAddr,
Data: selector,
}, nil)
if err == nil && len(result) > 0 {
results[name] = true
fmt.Printf("✅ %s(): SUCCESS", name)
// Extract token addresses if applicable
if name == "token0" || name == "token1" {
if len(result) >= 32 {
addr := common.BytesToAddress(result[12:32])
tokenAddresses[name] = addr
fmt.Printf(" -> %s", addr.Hex())
}
}
// Show fee value
if name == "fee" && len(result) >= 32 {
// Fee is uint24, stored in the last 3 bytes of the 32-byte word
fee := uint32(result[29])<<16 | uint32(result[30])<<8 | uint32(result[31])
fmt.Printf(" -> %d (%.2f%%)", fee, float64(fee)/10000)
}
fmt.Println()
} else {
results[name] = false
if err != nil {
fmt.Printf("❌ %s(): FAILED - %v\n", name, err)
} else {
fmt.Printf("❌ %s(): FAILED - empty result\n", name)
}
}
}
fmt.Println()
fmt.Println("Analysis:")
fmt.Println("---------")
// Analyze results
hasToken0 := results["token0"]
hasToken1 := results["token1"]
hasFee := results["fee"]
hasSlot0 := results["slot0"]
hasGlobalState := results["globalState"]
hasGetReserves := results["getReserves"]
hasLiquidity := results["liquidity"]
hasTickSpacing := results["tickSpacing"]
hasMaxLiquidityPerTick := results["maxLiquidityPerTick"]
hasFeeGrowthGlobal0 := results["feeGrowthGlobal0X128"]
hasFeeGrowthGlobal1 := results["feeGrowthGlobal1X128"]
fmt.Printf("Has token0/token1: %v/%v\n", hasToken0, hasToken1)
fmt.Printf("Has fee: %v\n", hasFee)
fmt.Printf("Has slot0: %v\n", hasSlot0)
fmt.Printf("Has globalState: %v\n", hasGlobalState)
fmt.Printf("Has getReserves: %v\n", hasGetReserves)
fmt.Printf("Has liquidity: %v\n", hasLiquidity)
fmt.Printf("Has tickSpacing: %v\n", hasTickSpacing)
fmt.Printf("Has maxLiquidityPerTick: %v\n", hasMaxLiquidityPerTick)
fmt.Printf("Has feeGrowthGlobal0/1: %v/%v\n", hasFeeGrowthGlobal0, hasFeeGrowthGlobal1)
fmt.Println()
fmt.Println("========================================")
fmt.Println("IDENTIFICATION RESULT:")
fmt.Println("========================================")
// Identification logic
if hasToken0 && hasToken1 && hasSlot0 && hasFee && hasTickSpacing && hasMaxLiquidityPerTick {
fmt.Println("✅ Pool Type: UNISWAP V3")
fmt.Println(" Confidence: 95%")
fmt.Println(" Reason: Has all UniswapV3 signature methods")
if token0, ok := tokenAddresses["token0"]; ok {
fmt.Printf(" Token0: %s\n", token0.Hex())
}
if token1, ok := tokenAddresses["token1"]; ok {
fmt.Printf(" Token1: %s\n", token1.Hex())
}
} else if hasToken0 && hasToken1 && hasGlobalState && !hasSlot0 {
fmt.Println("✅ Pool Type: ALGEBRA-BASED (Camelot/QuickSwap V3)")
fmt.Println(" Confidence: 90%")
fmt.Println(" Reason: Has globalState instead of slot0")
} else if hasToken0 && hasToken1 && hasGetReserves && !hasSlot0 && !hasGlobalState {
fmt.Println("✅ Pool Type: UNISWAP V2 / SUSHISWAP")
fmt.Println(" Confidence: 85%")
fmt.Println(" Reason: Has getReserves, no slot0/globalState")
} else if hasToken0 && hasToken1 {
fmt.Println("⚠️ Pool Type: UNKNOWN DEX")
fmt.Println(" Confidence: 30%")
fmt.Println(" Reason: Has basic token methods but doesn't match known patterns")
} else {
fmt.Println("❌ Pool Type: NOT A STANDARD AMM POOL")
fmt.Println(" Reason: Missing basic token methods")
}
// Additional checks
fmt.Println()
fmt.Println("Additional Information:")
fmt.Println("----------------------")
// Check factory
factorySelector := []byte{0xc4, 0x5a, 0x01, 0x55}
factoryResult, err := client.CallContract(ctx, ethereum.CallMsg{
To: &poolAddr,
Data: factorySelector,
}, nil)
if err == nil && len(factoryResult) >= 32 {
factory := common.BytesToAddress(factoryResult[12:32])
fmt.Printf("Factory: %s\n", factory.Hex())
// Check known factories
knownFactories := map[common.Address]string{
common.HexToAddress("0x1F98431c8aD98523631AE4a59f267346ea31F984"): "UniswapV3 Factory",
common.HexToAddress("0xc35DADB65012eC5796536bD9864eD8773aBc74C4"): "Sushiswap Factory",
common.HexToAddress("0x0BFbCF9fa4f9C56B0F40a671Ad40E0805A091865"): "PancakeSwap V3 Factory",
}
if name, known := knownFactories[factory]; known {
fmt.Printf(" ✅ Known Factory: %s\n", name)
}
}
// Try to get current price/state
if hasSlot0 {
slot0Selector := []byte{0x38, 0x50, 0xc7, 0xbd}
slot0Result, err := client.CallContract(ctx, ethereum.CallMsg{
To: &poolAddr,
Data: slot0Selector,
}, nil)
if err == nil && len(slot0Result) >= 32 {
fmt.Println("Slot0 data available (price and tick info)")
}
}
}

View File

@@ -1,237 +0,0 @@
#!/bin/bash
# implementation-checker.sh - Script to find placeholder, mock, and erroneous implementations
# Usage: ./scripts/implementation-checker.sh [output_dir]
# If output_dir is not provided, defaults to logs/
set -euo pipefail
# Default output directory
OUTPUT_DIR="${1:-logs}"
# Create output directory if it doesn't exist
mkdir -p "$OUTPUT_DIR"
# Output files
LOG_FILE="$OUTPUT_DIR/implementation_check.log"
TODO_FILE="TODOs.md"
# Colors for output
RED='\033[0;31m'
YELLOW='\033[1;33m'
GREEN='\033[0;32m'
NC='\033[0m' # No Color
echo "🔍 Implementation Checker Script"
echo "==============================="
echo "Output directory: $OUTPUT_DIR"
echo ""
# Function to print section headers
print_section() {
echo -e "${YELLOW}=== $1 ===${NC}"
}
# Function to print success messages
print_success() {
echo -e "${GREEN}$1${NC}"
}
# Function to print warning messages
print_warning() {
echo -e "${YELLOW}$1${NC}"
}
# Function to print error messages
print_error() {
echo -e "${RED}$1${NC}"
}
# Check if ripgrep is installed
if ! command -v rg &> /dev/null; then
print_error "ripgrep (rg) is not installed. Please install it first."
exit 1
fi
print_section "Checking for placeholder, mock, and erroneous implementations"
# Create fresh log file
echo "FILE LIST:" > "$LOG_FILE"
# Search patterns
PATTERNS="placeholder|simple|simplified|mock|emulated|simulated|hallucinated|erroneous|todo|not implemented|niy|stub|fallback|sophisticated.*calculation|placeholder.*implementation|fallback.*implementation|simplified.*calculation|crude.*approximation|rough.*estimate|quick.*hack|bullshit|fucking|damn|darn"
# File types to search (excluding test files, vendor, bindings, etc.)
FILE_TYPES="--type go --type yaml --type json --type md"
# Globs to exclude based on .gitignore and .dockerignore
EXCLUDE_GLOBS=(
"--glob=!test/*"
"--glob=!tests/*"
"--glob=!vendor/*"
"--glob=!bindings/*"
"--glob=!logs/*"
"--glob=!data/*"
"--glob=!backup/*"
"--glob=!backups/*"
"--glob=!bin/*"
"--glob=!coverage.*"
"--glob=!*.log"
"--glob=!*.db"
"--glob=!*.test"
"--glob=!go.work"
"--glob=!go.mod"
"--glob=!go.sum"
"--glob=!*.swp"
"--glob=!*.swo"
"--glob=!*.DS_Store"
"--glob=!Thumbs.db"
)
echo "Searching for patterns: $PATTERNS"
echo "Excluding directories: test, tests, vendor, bindings, logs, data, backup, backups, bin"
echo ""
# Find files with suspicious patterns
print_section "Finding Files with Suspicious Patterns"
FILES_WITH_PATTERNS=$(rg ${FILE_TYPES} ${EXCLUDE_GLOBS[@]} -i -l "$PATTERNS" ./ 2>/dev/null || true)
if [ -n "$FILES_WITH_PATTERNS" ]; then
echo "$FILES_WITH_PATTERNS" | sort -u | tee -a "$LOG_FILE"
FILE_COUNT=$(echo "$FILES_WITH_PATTERNS" | wc -l)
print_warning "Found $FILE_COUNT files with suspicious patterns"
else
echo "No files with suspicious patterns found" | tee -a "$LOG_FILE"
print_success "No suspicious files found"
fi
echo "" | tee -a "$LOG_FILE"
# Find specific patterns with context
print_section "Detailed Pattern Matches"
echo "TOFIX:" >> "$LOG_FILE"
# Get detailed matches with context
rg ${FILE_TYPES} ${EXCLUDE_GLOBS[@]} -i -A 6 -B 6 "$PATTERNS" ./ 2>/dev/null | \
grep -v -E "^[0-9]\|^--$" | \
tee -a "$LOG_FILE" || true
# Count total matches
TOTAL_MATCHES=$(rg ${FILE_TYPES} ${EXCLUDE_GLOBS[@]} -i -c "$PATTERNS" ./ 2>/dev/null | paste -sd+ | bc 2>/dev/null || echo "0")
print_section "Summary"
echo "Total pattern matches found: $TOTAL_MATCHES"
echo "Check $LOG_FILE for detailed results"
# Create/update TODOs.md file
print_section "Generating TODOs.md"
{
echo "# Implementation Issues and TODOs"
echo ""
echo "This file was automatically generated by scripts/implementation-checker.sh"
echo "Last updated: $(date)"
echo ""
if [ "$TOTAL_MATCHES" -gt 0 ]; then
echo "## Summary"
echo "- Total files with issues: $(echo "$FILES_WITH_PATTERNS" | wc -l)"
echo "- Total pattern matches: $TOTAL_MATCHES"
echo ""
echo "## Files with Issues"
echo ""
if [ -n "$FILES_WITH_PATTERNS" ]; then
echo "$FILES_WITH_PATTERNS" | sort -u | while read -r file; do
echo "- [$file]($file)"
done
else
echo "No files with issues found"
fi
echo ""
echo "## Detailed Matches"
echo ""
echo "### Pattern Matches with Context"
echo ""
# Get matches in a more readable format
rg ${FILE_TYPES} ${EXCLUDE_GLOBS[@]} -i -A 3 -B 3 "$PATTERNS" ./ 2>/dev/null | \
grep -v -E "^[0-9]\|--$" | \
sed 's/^/ /' || true
echo ""
echo "## Categories of Issues"
echo ""
echo "### Placeholder Implementations"
echo ""
rg ${FILE_TYPES} ${EXCLUDE_GLOBS[@]} -i -A 2 -B 2 "placeholder" ./ 2>/dev/null | \
grep -v -E "^[0-9]\|--$" | \
sed 's/^/ /' || true
echo ""
echo "### Mock Implementations"
echo ""
rg ${FILE_TYPES} ${EXCLUDE_GLOBS[@]} -i -A 2 -B 2 "mock" ./ 2>/dev/null | \
grep -v -E "^[0-9]\|--$" | \
sed 's/^/ /' || true
echo ""
echo "### Simplified/Incomplete Implementations"
echo ""
rg ${FILE_TYPES} ${EXCLUDE_GLOBS[@]} -i -A 2 -B 2 "simplified\|simple\|stub\|fallback" ./ 2>/dev/null | \
grep -v -E "^[0-9]\|--$" | \
sed 's/^/ /' || true
echo ""
echo "### TODO Items"
echo ""
rg ${FILE_TYPES} ${EXCLUDE_GLOBS[@]} -i -A 2 -B 2 "todo" ./ 2>/dev/null | \
grep -v -E "^[0-9]\|--$" | \
sed 's/^/ /' || true
echo ""
echo "### Not Implemented Errors"
echo ""
rg ${FILE_TYPES} ${EXCLUDE_GLOBS[@]} -i -A 2 -B 2 "not implemented" ./ 2>/dev/null | \
grep -v -E "^[0-9]\|--$" | \
sed 's/^/ /' || true
echo ""
echo "### Profanity or Negative Comments"
echo ""
rg ${FILE_TYPES} ${EXCLUDE_GLOBS[@]} -i -A 2 -B 2 "bullshit\|fucking\|damn\|darn" ./ 2>/dev/null | \
grep -v -E "^[0-9]\|--$" | \
sed 's/^/ /' || true
else
echo "## No Issues Found"
echo ""
echo "No placeholder, mock, or erroneous implementations were detected."
fi
echo ""
echo "## Recommendations"
echo ""
echo "1. Review all placeholder implementations and replace with proper code"
echo "2. Replace mock implementations with real implementations where needed"
echo "3. Remove or address all TODO items"
echo "4. Fix all 'not implemented' errors"
echo "5. Remove profanity and improve code comments"
echo "6. Enhance simplified implementations with proper functionality"
echo ""
echo "Generated by implementation-checker.sh on $(date)"
} > "$TODO_FILE"
print_success "Created/updated $TODO_FILE with $TOTAL_MATCHES matches"
echo ""
print_section "Done"
echo "Results saved to:"
echo "- $LOG_FILE (detailed log)"
echo "- $TODO_FILE (organized TODO list)"
echo ""
echo "Review these files to identify and fix placeholder, mock, and erroneous implementations."
exit 0

View File

@@ -1,117 +0,0 @@
-- MEV Bot Database Initialization Script
-- Creates necessary tables for opportunity persistence
-- Arbitrage opportunities table
CREATE TABLE IF NOT EXISTS arbitrage_opportunities (
id TEXT PRIMARY KEY,
path TEXT NOT NULL,
pools TEXT,
amount_in BIGINT NOT NULL,
profit BIGINT NOT NULL,
net_profit BIGINT NOT NULL,
gas_estimate BIGINT NOT NULL,
gas_cost BIGINT NOT NULL,
estimated_profit BIGINT NOT NULL,
required_amount BIGINT NOT NULL,
roi REAL NOT NULL,
protocol TEXT,
execution_time INTEGER,
confidence REAL,
price_impact REAL,
max_slippage REAL,
token_in TEXT NOT NULL,
token_out TEXT NOT NULL,
timestamp BIGINT NOT NULL,
detected_at TIMESTAMP NOT NULL,
expires_at TIMESTAMP NOT NULL,
urgency INTEGER,
risk REAL,
profitable BOOLEAN,
executed BOOLEAN DEFAULT FALSE,
execution_tx_hash TEXT,
execution_status TEXT,
execution_gas_used BIGINT,
execution_profit BIGINT,
execution_error TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
-- Index for fast lookups
CREATE INDEX IF NOT EXISTS idx_opportunities_timestamp ON arbitrage_opportunities(timestamp DESC);
CREATE INDEX IF NOT EXISTS idx_opportunities_profitable ON arbitrage_opportunities(profitable, profit DESC);
CREATE INDEX IF NOT EXISTS idx_opportunities_executed ON arbitrage_opportunities(executed, timestamp DESC);
CREATE INDEX IF NOT EXISTS idx_opportunities_token_pair ON arbitrage_opportunities(token_in, token_out);
-- Execution history table
CREATE TABLE IF NOT EXISTS execution_history (
id SERIAL PRIMARY KEY,
opportunity_id TEXT NOT NULL REFERENCES arbitrage_opportunities(id),
tx_hash TEXT UNIQUE,
block_number BIGINT,
status TEXT NOT NULL,
gas_used BIGINT,
gas_price BIGINT,
actual_profit BIGINT,
error_message TEXT,
executed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_execution_history_opportunity ON execution_history(opportunity_id);
CREATE INDEX IF NOT EXISTS idx_execution_history_status ON execution_history(status, executed_at DESC);
-- Statistics table for tracking performance
CREATE TABLE IF NOT EXISTS bot_statistics (
id SERIAL PRIMARY KEY,
opportunities_detected INTEGER DEFAULT 0,
opportunities_executed INTEGER DEFAULT 0,
total_profit_wei BIGINT DEFAULT 0,
total_gas_cost_wei BIGINT DEFAULT 0,
success_rate REAL DEFAULT 0,
average_roi REAL DEFAULT 0,
period_start TIMESTAMP NOT NULL,
period_end TIMESTAMP NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
-- Pool cache table
CREATE TABLE IF NOT EXISTS pool_cache (
address TEXT PRIMARY KEY,
token0 TEXT NOT NULL,
token1 TEXT NOT NULL,
fee INTEGER,
protocol TEXT NOT NULL,
liquidity BIGINT,
reserve0 BIGINT,
reserve1 BIGINT,
last_updated TIMESTAMP NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_pool_cache_tokens ON pool_cache(token0, token1);
CREATE INDEX IF NOT EXISTS idx_pool_cache_protocol ON pool_cache(protocol);
-- Market events table for historical analysis
CREATE TABLE IF NOT EXISTS market_events (
id SERIAL PRIMARY KEY,
event_type TEXT NOT NULL,
pool_address TEXT NOT NULL,
token_in TEXT NOT NULL,
token_out TEXT NOT NULL,
amount_in BIGINT NOT NULL,
amount_out BIGINT NOT NULL,
price_impact REAL,
tx_hash TEXT NOT NULL,
block_number BIGINT NOT NULL,
timestamp TIMESTAMP NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_market_events_pool ON market_events(pool_address, timestamp DESC);
CREATE INDEX IF NOT EXISTS idx_market_events_tokens ON market_events(token_in, token_out);
CREATE INDEX IF NOT EXISTS idx_market_events_timestamp ON market_events(timestamp DESC);
COMMENT ON TABLE arbitrage_opportunities IS 'Stores detected arbitrage opportunities for analysis and execution tracking';
COMMENT ON TABLE execution_history IS 'Tracks execution attempts and results for all opportunities';
COMMENT ON TABLE bot_statistics IS 'Aggregated performance metrics over time periods';
COMMENT ON TABLE pool_cache IS 'Cached pool data to reduce RPC calls';
COMMENT ON TABLE market_events IS 'Historical market events for analysis and pattern detection';

View File

@@ -1,46 +0,0 @@
#!/bin/bash
# Install Git Hooks for Auto-Update
set -e
PROJECT_DIR=$(cd "$(dirname "$0")/.." && pwd)
HOOKS_SRC_DIR="$PROJECT_DIR/scripts/git-hooks"
HOOKS_DEST_DIR="$PROJECT_DIR/.git/hooks"
echo "========================================="
echo "Installing Git Hooks"
echo "========================================="
echo ""
# Check if .git directory exists
if [ ! -d "$PROJECT_DIR/.git" ]; then
echo "Error: Not a git repository"
exit 1
fi
# Create hooks directory if it doesn't exist
mkdir -p "$HOOKS_DEST_DIR"
# Install post-merge hook
if [ -f "$HOOKS_SRC_DIR/post-merge" ]; then
echo "Installing post-merge hook..."
cp "$HOOKS_SRC_DIR/post-merge" "$HOOKS_DEST_DIR/post-merge"
chmod +x "$HOOKS_DEST_DIR/post-merge"
echo "✓ post-merge hook installed"
else
echo "✗ post-merge hook not found at $HOOKS_SRC_DIR/post-merge"
exit 1
fi
echo ""
echo "========================================="
echo "Git Hooks Installed Successfully!"
echo "========================================="
echo ""
echo "The following hooks are now active:"
echo " - post-merge: Auto-rebuild and restart after 'git pull'"
echo ""
echo "Hooks will run automatically when you:"
echo " - Run 'git pull'"
echo " - Run './scripts/auto-update.sh'"
echo ""

View File

@@ -1,149 +0,0 @@
#!/usr/bin/env bash
# Systemd Service Installation Script for MEV Bot
# Installs and enables MEV Bot to auto-start on system boot
set -euo pipefail
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
CYAN='\033[0;36m'
NC='\033[0m' # No Color
echo -e "${BLUE}🔧 MEV Bot Systemd Service Installation${NC}"
echo -e "${BLUE}========================================${NC}"
echo ""
# Check if running as root
if [ "$EUID" -ne 0 ]; then
echo -e "${RED}❌ Error: This script must be run as root or with sudo${NC}"
echo -e "${YELLOW}Usage: sudo $0${NC}"
exit 1
fi
# Get the directory where the script is located
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
echo -e "${BLUE}📁 Project root: ${PROJECT_ROOT}${NC}"
# Check if Docker is installed
if ! command -v docker &> /dev/null; then
echo -e "${RED}❌ Error: Docker is not installed${NC}"
exit 1
fi
# Check if docker-compose.yml exists
if [ ! -f "$PROJECT_ROOT/docker-compose.yml" ]; then
echo -e "${RED}❌ Error: docker-compose.yml not found in $PROJECT_ROOT${NC}"
exit 1
fi
echo -e "${GREEN}✅ Docker and docker-compose.yml found${NC}"
# Create systemd service file with actual project path
echo -e "${BLUE}📝 Creating systemd service file...${NC}"
SERVICE_FILE="/etc/systemd/system/mev-bot.service"
cat > "$SERVICE_FILE" << EOF
[Unit]
Description=MEV Bot Production Service
Requires=docker.service
After=docker.service network-online.target
Wants=network-online.target
[Service]
Type=oneshot
RemainAfterExit=yes
WorkingDirectory=$PROJECT_ROOT
ExecStartPre=/usr/bin/docker compose pull --quiet
ExecStart=/usr/bin/docker compose up -d
ExecStop=/usr/bin/docker compose down
ExecReload=/usr/bin/docker compose restart
TimeoutStartSec=0
Restart=on-failure
RestartSec=10s
# Security settings
User=root
Group=docker
[Install]
WantedBy=multi-user.target
EOF
echo -e "${GREEN}✅ Service file created at $SERVICE_FILE${NC}"
# Reload systemd daemon
echo -e "${BLUE}🔄 Reloading systemd daemon...${NC}"
systemctl daemon-reload
# Enable the service
echo -e "${BLUE}✅ Enabling MEV Bot service...${NC}"
systemctl enable mev-bot.service
echo -e "${GREEN}✅ MEV Bot service enabled for auto-start on boot${NC}"
# Ask if user wants to start the service now
echo ""
read -p "Do you want to start the MEV Bot service now? (y/n) " -n 1 -r
echo ""
if [[ $REPLY =~ ^[Yy]$ ]]; then
echo -e "${BLUE}🚀 Starting MEV Bot service...${NC}"
systemctl start mev-bot.service
# Wait a moment for service to start
sleep 3
# Check status
if systemctl is-active --quiet mev-bot.service; then
echo -e "${GREEN}✅ MEV Bot service is running${NC}"
else
echo -e "${YELLOW}⚠️ Service may not be running properly${NC}"
echo -e "${YELLOW} Check status with: sudo systemctl status mev-bot${NC}"
fi
fi
# Display usage information
echo ""
echo -e "${GREEN}╔════════════════════════════════════════════════════════════╗${NC}"
echo -e "${GREEN}║ 🎉 Systemd Service Installation Complete! ║${NC}"
echo -e "${GREEN}╚════════════════════════════════════════════════════════════╝${NC}"
echo ""
echo -e "${BLUE}📋 Systemd Service Commands:${NC}"
echo ""
echo -e " ${CYAN}sudo systemctl status mev-bot${NC}"
echo -e " └─ Check service status"
echo ""
echo -e " ${CYAN}sudo systemctl start mev-bot${NC}"
echo -e " └─ Start the service"
echo ""
echo -e " ${CYAN}sudo systemctl stop mev-bot${NC}"
echo -e " └─ Stop the service"
echo ""
echo -e " ${CYAN}sudo systemctl restart mev-bot${NC}"
echo -e " └─ Restart the service"
echo ""
echo -e " ${CYAN}sudo systemctl reload mev-bot${NC}"
echo -e " └─ Reload (restart container)"
echo ""
echo -e " ${CYAN}journalctl -u mev-bot -f${NC}"
echo -e " └─ View service logs (follow mode)"
echo ""
echo -e " ${CYAN}sudo systemctl disable mev-bot${NC}"
echo -e " └─ Disable auto-start on boot"
echo ""
echo -e "${BLUE}📊 Docker Commands:${NC}"
echo ""
echo -e " ${CYAN}docker compose logs -f mev-bot${NC}"
echo -e " └─ View container logs"
echo ""
echo -e " ${CYAN}docker compose ps${NC}"
echo -e " └─ Check container status"
echo ""
echo -e "${GREEN}✅ MEV Bot will now auto-start on system boot${NC}"

View File

@@ -1,14 +0,0 @@
#!/usr/bin/env bash
# Safely kill the MEV bot process
set -euo pipefail
# Find and kill MEV bot processes
if pgrep -f "mev-bot|mev-beta" >/dev/null 2>&1; then
echo "Killing MEV bot processes..."
pkill -f "mev-bot|mev-beta" && echo "✅ MEV bot stopped" || echo "❌ Failed to stop MEV bot"
else
echo "No MEV bot processes found"
exit 1
fi

View File

@@ -1,137 +0,0 @@
//go:build tools
// +build tools
package main
import (
"encoding/json"
"fmt"
"os"
"time"
)
type PoolSeed struct {
Address string `json:"address"`
Token0 string `json:"token0"`
Token1 string `json:"token1"`
Fee uint32 `json:"fee"`
Protocol string `json:"protocol"`
Factory string `json:"factory"`
Name string `json:"name"`
Description string `json:"description"`
}
type TokenInfo struct {
Symbol string `json:"symbol"`
Name string `json:"name"`
Decimals uint8 `json:"decimals"`
}
type SeedData struct {
Pools []PoolSeed `json:"pools"`
Tokens map[string]TokenInfo `json:"tokens"`
Metadata map[string]interface{} `json:"metadata"`
}
type Pool struct {
Address string `json:"address"`
Token0 string `json:"token0"`
Token1 string `json:"token1"`
Fee uint32 `json:"fee"`
Protocol string `json:"protocol"`
Factory string `json:"factory"`
LastUpdated time.Time `json:"lastUpdated"`
TotalVolume string `json:"totalVolume"`
SwapCount uint64 `json:"swapCount"`
CreatedAt time.Time `json:"createdAt"`
BlockNumber uint64 `json:"blockNumber"`
}
func main() {
// Read seed data
seedData, err := os.ReadFile("data/pools_seed.json")
if err != nil {
fmt.Printf("Error reading seed data: %v\n", err)
os.Exit(1)
}
var seed SeedData
if err := json.Unmarshal(seedData, &seed); err != nil {
fmt.Printf("Error parsing seed data: %v\n", err)
os.Exit(1)
}
// Convert to pool format
pools := make(map[string]Pool)
now := time.Now()
for _, poolSeed := range seed.Pools {
pools[poolSeed.Address] = Pool{
Address: poolSeed.Address,
Token0: poolSeed.Token0,
Token1: poolSeed.Token1,
Fee: poolSeed.Fee,
Protocol: poolSeed.Protocol,
Factory: poolSeed.Factory,
LastUpdated: now,
TotalVolume: "0",
SwapCount: 0,
CreatedAt: now,
BlockNumber: 0,
}
}
// Write to pools.json
poolsJSON, err := json.MarshalIndent(pools, "", " ")
if err != nil {
fmt.Printf("Error marshaling pools: %v\n", err)
os.Exit(1)
}
if err := os.WriteFile("data/pools.json", poolsJSON, 0644); err != nil {
fmt.Printf("Error writing pools.json: %v\n", err)
os.Exit(1)
}
// Write tokens.json
type TokenMetadata struct {
Address string `json:"address"`
Symbol string `json:"symbol"`
Name string `json:"name"`
Decimals uint8 `json:"decimals"`
Verified bool `json:"verified"`
FirstSeen time.Time `json:"firstSeen"`
LastSeen time.Time `json:"lastSeen"`
SeenCount uint64 `json:"seenCount"`
}
tokens := make([]TokenMetadata, 0, len(seed.Tokens))
for address, info := range seed.Tokens {
tokens = append(tokens, TokenMetadata{
Address: address,
Symbol: info.Symbol,
Name: info.Name,
Decimals: info.Decimals,
Verified: true,
FirstSeen: now,
LastSeen: now,
SeenCount: 1,
})
}
tokensJSON, err := json.MarshalIndent(tokens, "", " ")
if err != nil {
fmt.Printf("Error marshaling tokens: %v\n", err)
os.Exit(1)
}
if err := os.WriteFile("data/tokens.json", tokensJSON, 0644); err != nil {
fmt.Printf("Error writing tokens.json: %v\n", err)
os.Exit(1)
}
fmt.Printf("✅ Loaded %d pools and %d tokens successfully!\n", len(pools), len(tokens))
fmt.Printf("📁 Files created:\n")
fmt.Printf(" - data/pools.json (%d pools)\n", len(pools))
fmt.Printf(" - data/tokens.json (%d tokens)\n", len(tokens))
}

View File

@@ -1,833 +0,0 @@
#!/bin/bash
# MEV Bot Production Log Manager
# Comprehensive log management with real-time monitoring, alerting, and analytics
set -euo pipefail
# Production Configuration
PROJECT_ROOT="/home/administrator/projects/mev-beta"
LOGS_DIR="$PROJECT_ROOT/logs"
ARCHIVE_DIR="$PROJECT_ROOT/logs/archives"
ANALYTICS_DIR="$PROJECT_ROOT/logs/analytics"
ALERTS_DIR="$PROJECT_ROOT/logs/alerts"
CONFIG_FILE="$PROJECT_ROOT/config/log-manager.conf"
# Default Configuration
DEFAULT_RETENTION_DAYS=30
DEFAULT_ARCHIVE_SIZE_LIMIT="10G"
DEFAULT_LOG_SIZE_LIMIT="1G"
DEFAULT_ERROR_THRESHOLD=100
DEFAULT_ALERT_EMAIL=""
DEFAULT_SLACK_WEBHOOK=""
DEFAULT_MONITORING_INTERVAL=60
# Colors and formatting
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
PURPLE='\033[0;35m'
CYAN='\033[0;36m'
BOLD='\033[1m'
NC='\033[0m'
# Performance metrics
declare -A METRICS=(
["archives_created"]=0
["logs_rotated"]=0
["alerts_sent"]=0
["errors_detected"]=0
["corruption_found"]=0
["performance_issues"]=0
)
# Initialize configuration
init_config() {
if [[ ! -f "$CONFIG_FILE" ]]; then
mkdir -p "$(dirname "$CONFIG_FILE")"
cat > "$CONFIG_FILE" << EOF
# MEV Bot Log Manager Configuration
RETENTION_DAYS=${DEFAULT_RETENTION_DAYS}
ARCHIVE_SIZE_LIMIT=${DEFAULT_ARCHIVE_SIZE_LIMIT}
LOG_SIZE_LIMIT=${DEFAULT_LOG_SIZE_LIMIT}
ERROR_THRESHOLD=${DEFAULT_ERROR_THRESHOLD}
ALERT_EMAIL=${DEFAULT_ALERT_EMAIL}
SLACK_WEBHOOK=${DEFAULT_SLACK_WEBHOOK}
MONITORING_INTERVAL=${DEFAULT_MONITORING_INTERVAL}
AUTO_ROTATE=true
AUTO_ANALYZE=true
AUTO_ALERT=true
COMPRESS_LEVEL=9
HEALTH_CHECK_ENABLED=true
PERFORMANCE_TRACKING=true
EOF
log "Created default configuration: $CONFIG_FILE"
fi
source "$CONFIG_FILE"
}
# Logging functions with levels
log() {
echo -e "${GREEN}[$(date +'%Y-%m-%d %H:%M:%S')] INFO:${NC} $1" | tee -a "$LOGS_DIR/log-manager.log"
}
warn() {
echo -e "${YELLOW}[$(date +'%Y-%m-%d %H:%M:%S')] WARN:${NC} $1" | tee -a "$LOGS_DIR/log-manager.log"
}
error() {
echo -e "${RED}[$(date +'%Y-%m-%d %H:%M:%S')] ERROR:${NC} $1" | tee -a "$LOGS_DIR/log-manager.log"
((METRICS["errors_detected"]++))
}
success() {
echo -e "${GREEN}[$(date +'%Y-%m-%d %H:%M:%S')] SUCCESS:${NC} $1" | tee -a "$LOGS_DIR/log-manager.log"
}
debug() {
if [[ "${DEBUG:-false}" == "true" ]]; then
echo -e "${CYAN}[$(date +'%Y-%m-%d %H:%M:%S')] DEBUG:${NC} $1" | tee -a "$LOGS_DIR/log-manager.log"
fi
}
# Create directory structure
setup_directories() {
local dirs=("$ARCHIVE_DIR" "$ANALYTICS_DIR" "$ALERTS_DIR" "$LOGS_DIR/rotated" "$LOGS_DIR/health")
for dir in "${dirs[@]}"; do
if [[ ! -d "$dir" ]]; then
mkdir -p "$dir"
debug "Created directory: $dir"
fi
done
}
# Enhanced log rotation with size and time-based triggers
rotate_logs() {
log "Starting intelligent log rotation..."
local rotated_count=0
local timestamp=$(date +"%Y%m%d_%H%M%S")
# Find logs that need rotation
while IFS= read -r -d '' logfile; do
local filename=$(basename "$logfile")
local size=$(stat -c%s "$logfile" 2>/dev/null || echo 0)
local size_mb=$((size / 1024 / 1024))
# Check if rotation is needed (size > limit or age > 24h)
local needs_rotation=false
if [[ $size -gt $(numfmt --from=iec "${LOG_SIZE_LIMIT}") ]]; then
needs_rotation=true
debug "Log $filename needs rotation: size ${size_mb}MB exceeds limit"
fi
if [[ $(find "$logfile" -mtime +0 -print 2>/dev/null) ]]; then
needs_rotation=true
debug "Log $filename needs rotation: older than 24 hours"
fi
if [[ "$needs_rotation" == "true" ]]; then
local rotated_name="${filename%.log}_${timestamp}.log"
mv "$logfile" "$LOGS_DIR/rotated/$rotated_name"
gzip "$LOGS_DIR/rotated/$rotated_name"
touch "$logfile" # Create fresh log file
((rotated_count++))
log "Rotated $filename -> ${rotated_name}.gz (${size_mb}MB)"
fi
done < <(find "$LOGS_DIR" -maxdepth 1 -name "*.log" -type f -print0)
METRICS["logs_rotated"]=$rotated_count
success "Log rotation completed: $rotated_count files rotated"
}
# Real-time log analysis with pattern detection
analyze_logs() {
log "Starting comprehensive log analysis..."
local analysis_file="$ANALYTICS_DIR/analysis_$(date +%Y%m%d_%H%M%S).json"
local main_log="$LOGS_DIR/mev_bot.log"
if [[ ! -f "$main_log" ]]; then
warn "Main log file not found: $main_log"
return 1
fi
# Performance metrics extraction
local total_lines=$(wc -l < "$main_log")
local error_lines=$(grep -c "ERROR" "$main_log" || echo 0)
local warn_lines=$(grep -c "WARN" "$main_log" || echo 0)
local success_lines=$(grep -c "SUCCESS\|✅" "$main_log" || echo 0)
# MEV-specific metrics
local opportunities=$(grep -c "opportunity" "$main_log" || echo 0)
local rejections=$(grep -c "REJECTED" "$main_log" || echo 0)
local parsing_failures=$(grep -c "PARSING FAILED" "$main_log" || echo 0)
local direct_parsing=$(grep -c "DIRECT PARSING" "$main_log" || echo 0)
# Transaction processing metrics
local blocks_processed=$(grep -c "Block.*Processing.*transactions" "$main_log" || echo 0)
local dex_transactions=$(grep -c "DEX transactions" "$main_log" || echo 0)
# Error pattern analysis
local zero_address_issues=$(grep -c "zero.*address" "$main_log" || echo 0)
local connection_errors=$(grep -c "connection.*failed\|context.*canceled" "$main_log" || echo 0)
local timeout_errors=$(grep -c "timeout\|deadline exceeded" "$main_log" || echo 0)
# Performance trending (last 1000 lines for recent activity)
local recent_errors=$(tail -1000 "$main_log" | grep -c "ERROR" || echo 0)
local recent_success=$(tail -1000 "$main_log" | grep -c "SUCCESS" || echo 0)
# Calculate rates and health scores
local error_rate=$(echo "scale=2; $error_lines * 100 / $total_lines" | bc -l 2>/dev/null || echo 0)
local success_rate=$(echo "scale=2; $success_lines * 100 / $total_lines" | bc -l 2>/dev/null || echo 0)
local health_score=$(echo "scale=0; 100 - $error_rate" | bc -l 2>/dev/null || echo 100)
# Generate comprehensive analysis
cat > "$analysis_file" << EOF
{
"analysis_timestamp": "$(date -Iseconds)",
"log_file": "$main_log",
"system_info": {
"hostname": "$(hostname)",
"uptime": "$(uptime -p 2>/dev/null || echo 'unknown')",
"load_average": "$(uptime | awk -F'load average:' '{print $2}' | xargs)"
},
"log_statistics": {
"total_lines": $total_lines,
"file_size_mb": $(echo "scale=2; $(stat -c%s "$main_log") / 1024 / 1024" | bc -l),
"error_lines": $error_lines,
"warning_lines": $warn_lines,
"success_lines": $success_lines,
"error_rate_percent": $error_rate,
"success_rate_percent": $success_rate,
"health_score": $health_score
},
"mev_metrics": {
"opportunities_detected": $opportunities,
"events_rejected": $rejections,
"parsing_failures": $parsing_failures,
"direct_parsing_attempts": $direct_parsing,
"blocks_processed": $blocks_processed,
"dex_transactions": $dex_transactions
},
"error_patterns": {
"zero_address_issues": $zero_address_issues,
"connection_errors": $connection_errors,
"timeout_errors": $timeout_errors
},
"recent_activity": {
"recent_errors": $recent_errors,
"recent_success": $recent_success,
"recent_health_trend": "$([ -n "${recent_errors}" ] && [ "${recent_errors}" -lt 10 ] 2>/dev/null && echo good || echo concerning)"
},
"alerts_triggered": []
}
EOF
# Check for alert conditions
check_alert_conditions "$analysis_file"
success "Log analysis completed: $analysis_file"
echo -e "${BLUE}Health Score: $health_score/100${NC} | Error Rate: ${error_rate}% | Success Rate: ${success_rate}%"
}
# Alert system with multiple notification channels
check_alert_conditions() {
local analysis_file="$1"
local alerts_triggered=()
# Read analysis data
local error_rate=$(jq -r '.log_statistics.error_rate_percent' "$analysis_file" 2>/dev/null || echo 0)
local health_score=$(jq -r '.log_statistics.health_score' "$analysis_file" 2>/dev/null || echo 100)
local parsing_failures=$(jq -r '.mev_metrics.parsing_failures' "$analysis_file" 2>/dev/null || echo 0)
local zero_address_issues=$(jq -r '.error_patterns.zero_address_issues' "$analysis_file" 2>/dev/null || echo 0)
# Define alert conditions
if (( $(echo "$error_rate > 10" | bc -l) )); then
alerts_triggered+=("HIGH_ERROR_RATE:$error_rate%")
send_alert "High Error Rate" "Error rate is $error_rate%, exceeding 10% threshold"
fi
if (( $(echo "$health_score < 80" | bc -l) )); then
alerts_triggered+=("LOW_HEALTH_SCORE:$health_score")
send_alert "Low Health Score" "System health score is $health_score/100, below 80 threshold"
fi
if (( parsing_failures > 50 )); then
alerts_triggered+=("PARSING_FAILURES:$parsing_failures")
send_alert "High Parsing Failures" "$parsing_failures parsing failures detected"
fi
if (( zero_address_issues > 100 )); then
alerts_triggered+=("ZERO_ADDRESS_CORRUPTION:$zero_address_issues")
send_alert "Address Corruption" "$zero_address_issues zero address issues detected"
fi
# Update analysis file with alerts
if [[ ${#alerts_triggered[@]} -gt 0 ]]; then
local alerts_json=$(printf '%s\n' "${alerts_triggered[@]}" | jq -R . | jq -s .)
jq ".alerts_triggered = $alerts_json" "$analysis_file" > "${analysis_file}.tmp" && mv "${analysis_file}.tmp" "$analysis_file"
METRICS["alerts_sent"]=${#alerts_triggered[@]}
fi
}
# Multi-channel alert delivery
send_alert() {
local title="$1"
local message="$2"
local timestamp=$(date -Iseconds)
local alert_file="$ALERTS_DIR/alert_$(date +%Y%m%d_%H%M%S).json"
# Create alert record
cat > "$alert_file" << EOF
{
"timestamp": "$timestamp",
"title": "$title",
"message": "$message",
"hostname": "$(hostname)",
"severity": "warning",
"system_load": "$(uptime | awk -F'load average:' '{print $2}' | xargs)",
"disk_usage": "$(df -h $LOGS_DIR | tail -1 | awk '{print $5}')"
}
EOF
error "ALERT: $title - $message"
# Email notification
if [[ -n "${ALERT_EMAIL:-}" ]] && command -v mail >/dev/null 2>&1; then
echo "MEV Bot Alert: $title - $message ($(hostname) at $timestamp)" | mail -s "MEV Bot Alert: $title" "$ALERT_EMAIL"
fi
# Slack notification
if [[ -n "${SLACK_WEBHOOK:-}" ]] && command -v curl >/dev/null 2>&1; then
curl -X POST -H 'Content-type: application/json' \
--data "{\"text\":\"🚨 MEV Bot Alert: $title\n$message\nHost: $(hostname)\nTime: $timestamp\"}" \
"$SLACK_WEBHOOK" >/dev/null 2>&1 || true
fi
}
# Log corruption detection and health checks
health_check() {
log "Running comprehensive health checks..."
local health_report="$LOGS_DIR/health/health_$(date +%Y%m%d_%H%M%S).json"
local issues=()
# Check log file integrity
while IFS= read -r -d '' logfile; do
if [[ ! -r "$logfile" ]]; then
issues+=("UNREADABLE_LOG:$(basename "$logfile")")
continue
fi
# Check for truncated logs
if [[ $(tail -c 1 "$logfile" | wc -l) -eq 0 ]]; then
issues+=("TRUNCATED_LOG:$(basename "$logfile")")
fi
# Check for corruption patterns
if grep -q "\x00" "$logfile" 2>/dev/null; then
issues+=("NULL_BYTES:$(basename "$logfile")")
((METRICS["corruption_found"]++))
fi
# Check for encoding issues
if ! file "$logfile" | grep -q "text"; then
issues+=("ENCODING_ISSUE:$(basename "$logfile")")
fi
done < <(find "$LOGS_DIR" -maxdepth 1 -name "*.log" -type f -print0)
# Check disk space
local disk_usage=$(df "$LOGS_DIR" | tail -1 | awk '{print $5}' | sed 's/%//')
if (( disk_usage > 90 )); then
issues+=("HIGH_DISK_USAGE:${disk_usage}%")
send_alert "High Disk Usage" "Log directory is ${disk_usage}% full"
fi
# Check archive integrity
while IFS= read -r -d '' archive; do
if ! tar -tzf "$archive" >/dev/null 2>&1; then
issues+=("CORRUPTED_ARCHIVE:$(basename "$archive")")
((METRICS["corruption_found"]++))
fi
done < <(find "$ARCHIVE_DIR" -name "*.tar.gz" -type f -print0 2>/dev/null)
# Generate health report
local health_status="healthy"
if [[ ${#issues[@]} -gt 0 ]]; then
health_status="issues_detected"
fi
cat > "$health_report" << EOF
{
"timestamp": "$(date -Iseconds)",
"status": "$health_status",
"issues_count": ${#issues[@]},
"issues": $(printf '%s\n' "${issues[@]}" | jq -R . | jq -s . 2>/dev/null || echo '[]'),
"disk_usage_percent": $disk_usage,
"log_files_count": $(find "$LOGS_DIR" -maxdepth 1 -name "*.log" -type f | wc -l),
"archive_files_count": $(find "$ARCHIVE_DIR" -name "*.tar.gz" -type f 2>/dev/null | wc -l),
"total_log_size_mb": $(du -sm "$LOGS_DIR" | cut -f1),
"system_load": "$(uptime | awk -F'load average:' '{print $2}' | xargs)"
}
EOF
if [[ ${#issues[@]} -eq 0 ]]; then
success "Health check passed: No issues detected"
else
warn "Health check found ${#issues[@]} issues: ${issues[*]}"
fi
echo "$health_report"
}
# Performance monitoring with trending
monitor_performance() {
log "Monitoring system performance..."
local perf_file="$ANALYTICS_DIR/performance_$(date +%Y%m%d_%H%M%S).json"
# System metrics
local cpu_usage=$(top -bn1 | grep "Cpu(s)" | awk '{print $2}' | sed 's/%us,//')
local memory_usage=$(free | grep Mem | awk '{printf("%.1f", $3/$2 * 100.0)}')
local load_avg=$(uptime | awk -F'load average:' '{print $2}' | awk '{print $1}' | sed 's/,//')
# MEV bot specific metrics
local mev_processes=$(pgrep -f mev-bot | wc -l)
local mev_memory=0
if [[ $mev_processes -gt 0 ]]; then
mev_memory=$(pgrep -f mev-bot | xargs ps -o pid,rss --no-headers | awk '{sum+=$2} END {print sum/1024}' 2>/dev/null || echo 0)
fi
# Log processing rate
local log_lines_per_min=0
if [[ -f "$LOGS_DIR/mev_bot.log" ]]; then
log_lines_per_min=$(tail -100 "$LOGS_DIR/mev_bot.log" | grep "$(date '+%Y/%m/%d %H:%M')" | wc -l || echo 0)
fi
cat > "$perf_file" << EOF
{
"timestamp": "$(date -Iseconds)",
"system_metrics": {
"cpu_usage_percent": $cpu_usage,
"memory_usage_percent": $memory_usage,
"load_average": $load_avg,
"uptime_seconds": $(awk '{print int($1)}' /proc/uptime)
},
"mev_bot_metrics": {
"process_count": $mev_processes,
"memory_usage_mb": $mev_memory,
"log_rate_lines_per_min": $log_lines_per_min
},
"log_metrics": {
"total_log_size_mb": $(du -sm "$LOGS_DIR" | cut -f1),
"archive_size_mb": $(du -sm "$ARCHIVE_DIR" 2>/dev/null | cut -f1 || echo 0),
"active_log_files": $(find "$LOGS_DIR" -maxdepth 1 -name "*.log" -type f | wc -l)
}
}
EOF
# Check for performance issues
if (( $(echo "$cpu_usage > 80" | bc -l) )); then
((METRICS["performance_issues"]++))
send_alert "High CPU Usage" "CPU usage is ${cpu_usage}%"
fi
if (( $(echo "$memory_usage > 85" | bc -l) )); then
((METRICS["performance_issues"]++))
send_alert "High Memory Usage" "Memory usage is ${memory_usage}%"
fi
debug "Performance monitoring completed: $perf_file"
}
# Advanced archiving with compression optimization
advanced_archive() {
log "Starting advanced archive process..."
local timestamp=$(date +"%Y%m%d_%H%M%S")
local archive_name="mev_logs_${timestamp}"
local temp_dir="$ARCHIVE_DIR/.tmp_$archive_name"
mkdir -p "$temp_dir"
# Copy logs with metadata preservation
find "$LOGS_DIR" -maxdepth 1 -name "*.log" -type f -exec cp -p {} "$temp_dir/" \;
# Copy rotated logs
if [[ -d "$LOGS_DIR/rotated" ]]; then
cp -r "$LOGS_DIR/rotated" "$temp_dir/"
fi
# Copy analytics and health data
if [[ -d "$ANALYTICS_DIR" ]]; then
cp -r "$ANALYTICS_DIR" "$temp_dir/"
fi
if [[ -d "$ALERTS_DIR" ]]; then
cp -r "$ALERTS_DIR" "$temp_dir/"
fi
# Generate comprehensive metadata
cat > "$temp_dir/archive_metadata.json" << EOF
{
"archive_info": {
"timestamp": "$(date -Iseconds)",
"archive_name": "$archive_name",
"created_by": "$(whoami)",
"hostname": "$(hostname)",
"mev_bot_version": "$(git rev-parse HEAD 2>/dev/null || echo 'unknown')",
"git_branch": "$(git rev-parse --abbrev-ref HEAD 2>/dev/null || echo 'unknown')",
"compression_level": ${COMPRESS_LEVEL:-9}
},
"system_snapshot": {
"os": "$(uname -s)",
"kernel": "$(uname -r)",
"architecture": "$(uname -m)",
"uptime": "$(uptime -p 2>/dev/null || echo 'unknown')",
"load_average": "$(uptime | awk -F'load average:' '{print $2}' | xargs)",
"memory_total_gb": $(echo "scale=2; $(grep MemTotal /proc/meminfo | awk '{print $2}') / 1024 / 1024" | bc -l),
"disk_space_logs": "$(df -h $LOGS_DIR | tail -1 | awk '{print $4}')"
},
"content_summary": {
"total_files": $(find "$temp_dir" -type f | wc -l),
"total_size_bytes": $(find "$temp_dir" -type f -exec stat -c%s {} + | awk '{sum+=$1} END {print sum+0}'),
"log_files": $(find "$temp_dir" -name "*.log" | wc -l),
"compressed_files": $(find "$temp_dir" -name "*.gz" | wc -l)
},
"metrics": $(echo "${METRICS[@]}" | tr ' ' '\n' | awk -F= '{print "\"" $1 "\":" $2}' | paste -sd, | sed 's/^/{/' | sed 's/$/}/')
}
EOF
# Create optimized archive
cd "$ARCHIVE_DIR"
tar -cf "${archive_name}.tar.gz" --use-compress-program="gzip -${COMPRESS_LEVEL:-9}" -C "$(dirname "$temp_dir")" "$(basename "$temp_dir")"
# Verify archive integrity
if tar -tzf "${archive_name}.tar.gz" >/dev/null 2>&1; then
local archive_size=$(stat -c%s "${archive_name}.tar.gz" | numfmt --to=iec)
success "Archive created successfully: ${archive_name}.tar.gz ($archive_size)"
# Update symlink
ln -sf "${archive_name}.tar.gz" "latest_archive.tar.gz"
# Cleanup temp directory
rm -rf "$temp_dir"
((METRICS["archives_created"]++))
else
error "Archive verification failed: ${archive_name}.tar.gz"
rm -f "${archive_name}.tar.gz"
return 1
fi
}
# Cleanup with advanced retention policies
intelligent_cleanup() {
log "Starting intelligent cleanup with retention policies..."
local deleted_archives=0
local deleted_size=0
# Archive retention by age
while IFS= read -r -d '' archive; do
local size=$(stat -c%s "$archive")
rm "$archive"
((deleted_archives++))
deleted_size=$((deleted_size + size))
debug "Deleted old archive: $(basename "$archive")"
done < <(find "$ARCHIVE_DIR" -name "mev_logs_*.tar.gz" -mtime +${RETENTION_DAYS} -print0 2>/dev/null)
# Size-based cleanup if total exceeds limit
local total_size=$(du -sb "$ARCHIVE_DIR" 2>/dev/null | cut -f1 || echo 0)
local size_limit=$(numfmt --from=iec "${ARCHIVE_SIZE_LIMIT}")
if [[ $total_size -gt $size_limit ]]; then
warn "Archive directory exceeds size limit, cleaning oldest archives..."
while [[ $total_size -gt $size_limit ]] && [[ $(find "$ARCHIVE_DIR" -name "mev_logs_*.tar.gz" | wc -l) -gt 1 ]]; do
local oldest=$(find "$ARCHIVE_DIR" -name "mev_logs_*.tar.gz" -printf '%T+ %p\n' | sort | head -1 | cut -d' ' -f2)
if [[ -f "$oldest" ]]; then
local size=$(stat -c%s "$oldest")
rm "$oldest"
((deleted_archives++))
deleted_size=$((deleted_size + size))
total_size=$((total_size - size))
debug "Deleted for size limit: $(basename "$oldest")"
fi
done
fi
# Cleanup analytics and alerts older than retention period
find "$ANALYTICS_DIR" -name "*.json" -mtime +${RETENTION_DAYS} -delete 2>/dev/null || true
find "$ALERTS_DIR" -name "*.json" -mtime +${RETENTION_DAYS} -delete 2>/dev/null || true
find "$LOGS_DIR/health" -name "*.json" -mtime +${RETENTION_DAYS} -delete 2>/dev/null || true
if [[ $deleted_archives -gt 0 ]]; then
local deleted_size_human=$(echo $deleted_size | numfmt --to=iec)
success "Cleanup completed: $deleted_archives archives deleted ($deleted_size_human freed)"
else
log "Cleanup completed: No files needed deletion"
fi
}
# Real-time monitoring daemon
start_monitoring() {
log "Starting real-time monitoring daemon..."
local monitor_pid_file="$LOGS_DIR/.monitor.pid"
if [[ -f "$monitor_pid_file" ]] && kill -0 $(cat "$monitor_pid_file") 2>/dev/null; then
warn "Monitoring daemon already running (PID: $(cat "$monitor_pid_file"))"
return 1
fi
# Background monitoring loop
(
echo $$ > "$monitor_pid_file"
while true; do
sleep "${MONITORING_INTERVAL}"
# Quick health check
if [[ "${HEALTH_CHECK_ENABLED}" == "true" ]]; then
health_check >/dev/null 2>&1
fi
# Performance monitoring
if [[ "${PERFORMANCE_TRACKING}" == "true" ]]; then
monitor_performance >/dev/null 2>&1
fi
# Auto-rotation check
if [[ "${AUTO_ROTATE}" == "true" ]]; then
local needs_rotation=$(find "$LOGS_DIR" -maxdepth 1 -name "*.log" -size +${LOG_SIZE_LIMIT} | wc -l)
if [[ $needs_rotation -gt 0 ]]; then
rotate_logs >/dev/null 2>&1
fi
fi
# Auto-analysis
if [[ "${AUTO_ANALYZE}" == "true" ]]; then
analyze_logs >/dev/null 2>&1
fi
done
) &
local daemon_pid=$!
echo "$daemon_pid" > "$monitor_pid_file"
success "Monitoring daemon started (PID: $daemon_pid, interval: ${MONITORING_INTERVAL}s)"
}
# Stop monitoring daemon
stop_monitoring() {
local monitor_pid_file="$LOGS_DIR/.monitor.pid"
if [[ -f "$monitor_pid_file" ]]; then
local pid=$(cat "$monitor_pid_file")
if kill -0 "$pid" 2>/dev/null; then
kill "$pid"
rm "$monitor_pid_file"
success "Monitoring daemon stopped (PID: $pid)"
else
warn "Monitoring daemon not running (stale PID file)"
rm "$monitor_pid_file"
fi
else
warn "Monitoring daemon not running"
fi
}
# Dashboard generation
generate_dashboard() {
log "Generating operational dashboard..."
local dashboard_file="$ANALYTICS_DIR/dashboard_$(date +%Y%m%d_%H%M%S).html"
local latest_analysis=$(find "$ANALYTICS_DIR" -name "analysis_*.json" -type f | sort | tail -1)
local latest_health=$(find "$LOGS_DIR/health" -name "health_*.json" -type f | sort | tail -1)
local latest_performance=$(find "$ANALYTICS_DIR" -name "performance_*.json" -type f | sort | tail -1)
cat > "$dashboard_file" << 'EOF'
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>MEV Bot Operations Dashboard</title>
<style>
body { font-family: 'Segoe UI', Arial, sans-serif; margin: 0; padding: 20px; background: #f5f5f5; }
.dashboard { max-width: 1200px; margin: 0 auto; }
.header { background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); color: white; padding: 20px; border-radius: 10px; margin-bottom: 20px; }
.metrics-grid { display: grid; grid-template-columns: repeat(auto-fit, minmax(250px, 1fr)); gap: 20px; margin-bottom: 20px; }
.metric-card { background: white; border-radius: 10px; padding: 20px; box-shadow: 0 2px 10px rgba(0,0,0,0.1); }
.metric-value { font-size: 2em; font-weight: bold; margin: 10px 0; }
.metric-label { color: #666; font-size: 0.9em; }
.status-good { color: #28a745; }
.status-warning { color: #ffc107; }
.status-error { color: #dc3545; }
.chart-container { background: white; border-radius: 10px; padding: 20px; margin-bottom: 20px; }
.log-preview { background: #1e1e1e; color: #fff; padding: 15px; border-radius: 5px; font-family: 'Courier New', monospace; font-size: 0.8em; max-height: 300px; overflow-y: auto; }
.timestamp { color: #888; font-size: 0.8em; }
</style>
</head>
<body>
<div class="dashboard">
<div class="header">
<h1>MEV Bot Operations Dashboard</h1>
<p class="timestamp">Generated: $(date)</p>
</div>
EOF
# Add metrics if analysis file exists
if [[ -f "$latest_analysis" ]]; then
local health_score=$(jq -r '.log_statistics.health_score' "$latest_analysis" 2>/dev/null || echo 0)
local error_rate=$(jq -r '.log_statistics.error_rate_percent' "$latest_analysis" 2>/dev/null || echo 0)
local opportunities=$(jq -r '.mev_metrics.opportunities_detected' "$latest_analysis" 2>/dev/null || echo 0)
cat >> "$dashboard_file" << EOF
<div class="metrics-grid">
<div class="metric-card">
<div class="metric-label">System Health Score</div>
<div class="metric-value status-$([ $(echo "$health_score > 80" | bc -l) -eq 1 ] && echo 'good' || echo 'warning')">${health_score}/100</div>
</div>
<div class="metric-card">
<div class="metric-label">Error Rate</div>
<div class="metric-value status-$([ $(echo "$error_rate < 5" | bc -l) -eq 1 ] && echo 'good' || echo 'warning')">${error_rate}%</div>
</div>
<div class="metric-card">
<div class="metric-label">MEV Opportunities</div>
<div class="metric-value status-good">${opportunities}</div>
</div>
</div>
EOF
fi
# Add recent log entries
cat >> "$dashboard_file" << EOF
<div class="chart-container">
<h3>Recent Log Activity</h3>
<div class="log-preview">$(tail -20 "$LOGS_DIR/mev_bot.log" 2>/dev/null | sed 's/&/\&amp;/g; s/</\&lt;/g; s/>/\&gt;/g' || echo 'No recent log activity')</div>
</div>
</div>
</body>
</html>
EOF
success "Dashboard generated: $dashboard_file"
echo "$dashboard_file"
}
# Main command dispatcher
main() {
case "${1:-help}" in
"init")
setup_directories
init_config
success "Log manager initialized"
;;
"rotate")
init_config
setup_directories
rotate_logs
;;
"analyze")
init_config
setup_directories
analyze_logs
;;
"archive")
init_config
setup_directories
advanced_archive
;;
"health")
init_config
setup_directories
health_check
;;
"monitor")
init_config
setup_directories
monitor_performance
;;
"cleanup")
init_config
setup_directories
intelligent_cleanup
;;
"start-daemon")
init_config
setup_directories
start_monitoring
;;
"stop-daemon")
stop_monitoring
;;
"dashboard")
init_config
setup_directories
generate_dashboard
;;
"full")
init_config
setup_directories
rotate_logs
analyze_logs
health_check
monitor_performance
advanced_archive
intelligent_cleanup
generate_dashboard
;;
"status")
init_config
echo -e "${BOLD}MEV Bot Log Manager Status${NC}"
echo "Configuration: $CONFIG_FILE"
echo "Monitoring: $([ -f "$LOGS_DIR/.monitor.pid" ] && echo "Running (PID: $(cat "$LOGS_DIR/.monitor.pid"))" || echo "Stopped")"
echo "Archives: $(find "$ARCHIVE_DIR" -name "*.tar.gz" 2>/dev/null | wc -l) files"
echo "Total archive size: $(du -sh "$ARCHIVE_DIR" 2>/dev/null | cut -f1 || echo "0")"
echo "Log directory size: $(du -sh "$LOGS_DIR" | cut -f1)"
;;
*)
cat << EOF
MEV Bot Production Log Manager
USAGE:
$0 <command> [options]
COMMANDS:
init Initialize log manager with directories and config
rotate Rotate large log files
analyze Perform comprehensive log analysis
archive Create compressed archive with metadata
health Run health checks and corruption detection
monitor Generate performance monitoring report
cleanup Clean old archives based on retention policy
start-daemon Start real-time monitoring daemon
stop-daemon Stop monitoring daemon
dashboard Generate HTML operations dashboard
full Run complete log management cycle
status Show current system status
EXAMPLES:
$0 init # First-time setup
$0 full # Complete log management cycle
$0 start-daemon # Start background monitoring
$0 dashboard # Generate operations dashboard
CONFIGURATION:
Edit $CONFIG_FILE to customize behavior
MONITORING:
The daemon provides real-time monitoring with configurable intervals,
automatic rotation, health checks, and alerting via email/Slack.
EOF
;;
esac
}
# Initialize and run
cd "$PROJECT_ROOT" 2>/dev/null || { error "Invalid project root: $PROJECT_ROOT"; exit 1; }
main "$@"

View File

@@ -1,27 +0,0 @@
[Unit]
Description=MEV Bot Auto-Update Service
After=network-online.target docker.service
Wants=network-online.target
[Service]
Type=oneshot
WorkingDirectory=/docker/mev-beta
ExecStart=/docker/mev-beta/scripts/auto-update.sh
StandardOutput=append:/docker/mev-beta/logs/auto-update.log
StandardError=append:/docker/mev-beta/logs/auto-update.log
# Environment variables (optional)
# Environment="WEBHOOK_URL=https://your-webhook-url"
# Environment="GIT_BRANCH=master"
# Environment="GIT_REMOTE=origin"
# Security settings
User=root
Group=docker
# Prevent service from failing if update fails
# This ensures the timer continues to run
SuccessExitStatus=0 1
[Install]
WantedBy=multi-user.target

View File

@@ -1,14 +0,0 @@
[Unit]
Description=MEV Bot Auto-Update Timer
Requires=mev-bot-auto-update.service
[Timer]
# Check for updates every 5 minutes
OnBootSec=2min
OnUnitActiveSec=5min
# Run timer even if system was off when it should have run
Persistent=true
[Install]
WantedBy=timers.target

View File

@@ -1,24 +0,0 @@
[Unit]
Description=MEV Bot Production Service
Requires=docker.service
After=docker.service network-online.target
Wants=network-online.target
[Service]
Type=oneshot
RemainAfterExit=yes
WorkingDirectory=/docker/mev-beta
ExecStartPre=/usr/bin/docker compose pull --quiet
ExecStart=/usr/bin/docker compose up -d
ExecStop=/usr/bin/docker compose down
ExecReload=/usr/bin/docker compose restart
TimeoutStartSec=0
Restart=on-failure
RestartSec=10s
# Security settings
User=root
Group=docker
[Install]
WantedBy=multi-user.target

View File

@@ -1,139 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Global MEV Tools Manager
# This script provides tools to manage development environments across multiple projects
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
GLOBAL_CONFIG_DIR="$HOME/.config/mev-tools"
# Create global config directory
mkdir -p "$GLOBAL_CONFIG_DIR"
# Function to display usage
usage() {
echo "Usage: $0 [COMMAND]"
echo "Global tools for MEV development"
echo ""
echo "Commands:"
echo " install - Install global tools to PATH"
echo " setup-project - Setup a new project with standardized structure"
echo " update-tools - Update all global tools"
echo " check-env - Check if development environment is properly configured"
echo " help - Show this help"
echo ""
echo "Examples:"
echo " $0 install # Install tools to local bin"
echo " $0 setup-project my-project # Create new project"
}
# Function to install global tools
install_tools() {
echo "Installing global tools..."
# Create local bin directory
mkdir -p "$HOME/bin"
# Copy tools to bin (this script and others)
cp "$SCRIPT_DIR/../../scripts/test-runner.sh" "$HOME/bin/test-runner" 2>/dev/null || echo "test-runner not found in expected location"
cp "$SCRIPT_DIR/../../scripts/build.sh" "$HOME/bin/build-tool" 2>/dev/null || echo "build-tool not found in expected location"
cp "$SCRIPT_DIR/../../scripts/setup-dev.sh" "$HOME/bin/dev-setup" 2>/dev/null || echo "dev-setup not found in expected location"
cp "$SCRIPT_DIR/../../scripts/performance-profile.sh" "$HOME/bin/perf-profile" 2>/dev/null || echo "perf-profile not found in expected location"
# Make them executable
chmod +x "$HOME/bin/test-runner" "$HOME/bin/build-tool" "$HOME/bin/dev-setup" "$HOME/bin/perf-profile" 2>/dev/null || true
# Add to PATH if not already there
if [[ ":$PATH:" != *":$HOME/bin:"* ]]; then
echo "Adding $HOME/bin to PATH..."
echo 'export PATH="$HOME/bin:$PATH"' >> "$HOME/.bashrc"
echo "Please run 'source ~/.bashrc' or restart your shell to update PATH"
fi
echo "Tools installed successfully!"
echo "Available tools:"
echo " test-runner - Universal test runner"
echo " build-tool - Universal build tool"
echo " dev-setup - Development environment setup"
echo " perf-profile - Performance profiling tool"
}
# Function to setup a new project
setup_project() {
PROJECT_NAME="${1:-}"
if [ -z "$PROJECT_NAME" ]; then
echo "Usage: $0 setup-project <project-name>"
exit 1
fi
echo "Creating new project: $PROJECT_NAME"
"$SCRIPT_DIR/../../scripts/create-project-template.sh" "$PROJECT_NAME"
}
# Function to update tools
update_tools() {
echo "Updating global tools..."
echo "Note: In a real implementation, this would update tools from a central repository."
echo "For now, please manually update the scripts."
}
# Function to check environment
check_env() {
echo "Checking development environment..."
# Check Go
if command -v go &> /dev/null; then
echo "✓ Go $(go version | cut -d' ' -f3) installed"
else
echo "✗ Go not installed"
exit 1
fi
# Check Git
if command -v git &> /dev/null; then
echo "✓ Git $(git --version | cut -d' ' -f3) installed"
else
echo "✗ Git not installed"
exit 1
fi
# Check for global tools
if command -v test-runner &> /dev/null; then
echo "✓ test-runner available in PATH"
else
echo "⚠ test-runner not available in PATH"
fi
if command -v build-tool &> /dev/null; then
echo "✓ build-tool available in PATH"
else
echo "⚠ build-tool not available in PATH"
fi
echo "Environment check completed."
}
# Main command router
case "${1:-}" in
"install")
install_tools
;;
"setup-project")
setup_project "$2"
;;
"update-tools")
update_tools
;;
"check-env")
check_env
;;
"help"|"-h"|"--help")
usage
;;
*)
usage
if [ $# -gt 0 ]; then
exit 1
fi
;;
esac

View File

@@ -1,68 +0,0 @@
#!/bin/bash
# Monitor 24-hour test progress
LOG_DIR="logs/24h_test"
PID_FILE="${LOG_DIR}/mev-bot.pid"
if [ ! -f "${PID_FILE}" ]; then
echo "❌ No test running (PID file not found)"
exit 1
fi
PID=$(cat "${PID_FILE}")
if ! ps -p "${PID}" > /dev/null 2>&1; then
echo "❌ Bot not running (PID ${PID} not found)"
exit 1
fi
# Find latest log
LATEST_LOG=$(ls -t ${LOG_DIR}/test_*.log 2>/dev/null | head -1)
if [ -z "${LATEST_LOG}" ]; then
echo "❌ No log file found"
exit 1
fi
echo "📊 MEV Bot 24-Hour Test Monitor"
echo "================================"
echo "PID: ${PID}"
echo "Log: ${LATEST_LOG}"
echo "Running since: $(ps -o lstart= -p ${PID})"
echo ""
# Stats
echo "📈 Statistics:"
BLOCKS=$(grep -c "Processing.*transactions" "${LATEST_LOG}" 2>/dev/null || echo "0")
DEX=$(grep -c "DEX Transaction detected" "${LATEST_LOG}" 2>/dev/null || echo "0")
OPPS=$(grep -c "ARBITRAGE OPPORTUNITY" "${LATEST_LOG}" 2>/dev/null || echo "0")
PROFITABLE=$(grep "ARBITRAGE OPPORTUNITY" "${LATEST_LOG}" 2>/dev/null | grep -c "isExecutable:true" || echo "0")
echo " Blocks processed: ${BLOCKS}"
echo " DEX transactions: ${DEX}"
echo " Opportunities: ${OPPS}"
echo " Profitable: ${PROFITABLE}"
echo ""
# Recent activity
echo "🔍 Recent Activity (last 10 opportunities):"
grep "ARBITRAGE OPPORTUNITY" "${LATEST_LOG}" 2>/dev/null | tail -10 | while read line; do
echo " $(echo $line | grep -o 'netProfitETH:[^ ]*' || echo 'N/A')"
done
echo ""
# Cache metrics
echo "💾 Cache Metrics:"
grep "Reserve cache metrics" "${LATEST_LOG}" 2>/dev/null | tail -1 || echo " Not available yet"
echo ""
# Errors
ERRORS=$(grep -c "\[ERROR\]" "${LATEST_LOG}" 2>/dev/null || echo "0")
echo "⚠️ Errors: ${ERRORS}"
if [ "${ERRORS}" -gt "0" ]; then
echo " Recent errors:"
grep "\[ERROR\]" "${LATEST_LOG}" 2>/dev/null | tail -3 | sed 's/^/ /'
fi
echo ""
echo "📝 Live monitoring:"
echo " tail -f ${LATEST_LOG} | grep -E 'ARBITRAGE|ERROR|Reserve cache'"

View File

@@ -1,191 +0,0 @@
#!/bin/bash
# Monitor Bot Wallet and Auto-Setup Once Funded
# Continuously checks wallet balance and automatically proceeds with setup
set -e
BOT_WALLET="0x40091653f652a259747D86d7Cbe3e2848082a051"
ARBITRUM_RPC="https://arb-mainnet.g.alchemy.com/v2/d6VAHgzkOI3NgLGem6uBMiADT1E9rROB"
MIN_BALANCE="0.001"
CHECK_INTERVAL=10 # seconds between checks
echo "═══════════════════════════════════════════════════════════"
echo "🤖 MEV Bot Auto-Setup Monitor"
echo "═══════════════════════════════════════════════════════════"
echo ""
echo "Monitoring wallet: $BOT_WALLET"
echo "Network: Arbitrum One"
echo "Minimum required: $MIN_BALANCE ETH"
echo "Check interval: ${CHECK_INTERVAL}s"
echo ""
echo "📋 To fund the wallet, use one of these methods:"
echo ""
echo " **Method 1: MetaMask**"
echo " • Open MetaMask on Arbitrum One network"
echo " • Send 0.001 ETH to: $BOT_WALLET"
echo ""
echo " **Method 2: Cast (Command Line)**"
echo " • Run: ./scripts/fund-bot-wallet.sh"
echo " • Follow prompts to use your funded wallet"
echo ""
echo " **Method 3: Bridge**"
echo " • Visit: https://bridge.arbitrum.io/"
echo " • Bridge ETH from Ethereum to Arbitrum"
echo " • Send to: $BOT_WALLET"
echo ""
echo "═══════════════════════════════════════════════════════════"
echo ""
echo "⏳ Monitoring for funds... (Press Ctrl+C to stop)"
echo ""
# Counter for checks
CHECK_COUNT=0
while true; do
CHECK_COUNT=$((CHECK_COUNT + 1))
# Get balance
BALANCE_WEI=$(cast balance "$BOT_WALLET" --rpc-url "$ARBITRUM_RPC" 2>/dev/null || echo "0")
BALANCE_ETH=$(cast --to-unit "$BALANCE_WEI" ether 2>/dev/null || echo "0")
# Display status
TIMESTAMP=$(date '+%Y-%m-%d %H:%M:%S')
echo "[$TIMESTAMP] Check #$CHECK_COUNT: Balance = $BALANCE_ETH ETH"
# Check if funded
if (( $(echo "$BALANCE_ETH >= $MIN_BALANCE" | bc -l) )); then
echo ""
echo "═══════════════════════════════════════════════════════════"
echo "🎉 FUNDING DETECTED!"
echo "═══════════════════════════════════════════════════════════"
echo ""
echo "✅ Wallet Balance: $BALANCE_ETH ETH"
echo ""
echo "🔗 View on Arbiscan:"
echo " https://arbiscan.io/address/$BOT_WALLET"
echo ""
# Calculate capacity
TRADES=$(echo "$BALANCE_ETH / 0.0005" | bc)
VALUE=$(echo "$BALANCE_ETH * 2000" | bc)
echo "📊 Wallet Capacity:"
echo " • Balance: $BALANCE_ETH ETH"
echo " • Value: ~\$$VALUE USD (at \$2000/ETH)"
echo " • Estimated trades: ~$TRADES transactions"
echo ""
echo "═══════════════════════════════════════════════════════════"
echo "🚀 Proceeding with Automated Setup"
echo "═══════════════════════════════════════════════════════════"
echo ""
# Step 1: Setup Keystore
echo "📝 Step 1/3: Configuring encrypted keystore..."
echo ""
if ./scripts/setup-keystore.sh; then
echo ""
echo "✅ Keystore configured successfully!"
else
echo ""
echo "❌ Keystore setup failed. Please run manually:"
echo " ./scripts/setup-keystore.sh"
exit 1
fi
echo ""
echo "─────────────────────────────────────────────────────────"
echo ""
# Step 2: Enable Execution Mode
echo "📝 Step 2/3: Enabling execution mode..."
echo ""
if ./scripts/enable-execution-mode.sh; then
echo ""
echo "✅ Execution mode enabled!"
else
echo ""
echo "❌ Execution mode setup failed. Please run manually:"
echo " ./scripts/enable-execution-mode.sh"
exit 1
fi
echo ""
echo "─────────────────────────────────────────────────────────"
echo ""
# Step 3: Restart Bot
echo "📝 Step 3/3: Restarting bot in execution mode..."
echo ""
# Kill existing bot
if pgrep -f mev-beta > /dev/null; then
echo "🛑 Stopping existing bot..."
pkill -f mev-beta
sleep 2
fi
# Start bot with production config
echo "🚀 Starting bot with execution enabled..."
cd /home/administrator/projects/mev-beta
GO_ENV=production nohup ./bin/mev-beta start > logs/mev_bot_production.log 2>&1 &
BOT_PID=$!
echo "✅ Bot started with PID: $BOT_PID"
# Wait a moment and verify it's running
sleep 3
if ps -p $BOT_PID > /dev/null; then
echo "✅ Bot is running successfully!"
else
echo "⚠️ Bot may have crashed on startup. Check logs:"
echo " tail -50 logs/mev_bot_production.log"
fi
echo ""
echo "═══════════════════════════════════════════════════════════"
echo "🎉 SETUP COMPLETE - BOT IS LIVE!"
echo "═══════════════════════════════════════════════════════════"
echo ""
echo "✅ Wallet: $BOT_WALLET"
echo "✅ Balance: $BALANCE_ETH ETH"
echo "✅ Keystore: keystore/production/executor_wallet.json"
echo "✅ Execution: ENABLED"
echo "✅ Bot Status: RUNNING (PID $BOT_PID)"
echo ""
echo "📊 Monitor Execution:"
echo " • Live logs:"
echo " tail -f logs/mev_bot.log | grep 'EXECUTION\\|Profit'"
echo ""
echo " • Watch opportunities:"
echo " tail -f logs/mev_bot.log | grep 'Opportunity detected'"
echo ""
echo " • Check wallet balance:"
echo " ./scripts/check-wallet-balance.sh"
echo ""
echo " • View on Arbiscan:"
echo " https://arbiscan.io/address/$BOT_WALLET"
echo ""
echo " • Metrics dashboard:"
echo " http://localhost:9090/metrics"
echo ""
echo "🛑 Emergency Stop:"
echo " pkill -f mev-beta"
echo ""
echo "═══════════════════════════════════════════════════════════"
echo ""
# Tail logs to show activity
echo "📺 Showing live logs (Press Ctrl+C to stop)..."
echo ""
tail -f logs/mev_bot.log
exit 0
fi
# Wait before next check
sleep $CHECK_INTERVAL
done

View File

@@ -1,401 +0,0 @@
#!/bin/bash
# Script Organization and Cleanup
# Based on SCRIPT_ANALYSIS_REPORT.md recommendations
set -e
# Colors for output
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
RED='\033[0;31m'
NC='\033[0m' # No Color
echo -e "${BLUE}========================================${NC}"
echo -e "${BLUE}MEV Bot Script Organization${NC}"
echo -e "${BLUE}========================================${NC}"
echo ""
# Check if we're in the right directory
if [ ! -f "go.mod" ]; then
echo -e "${RED}❌ Error: Must be run from project root${NC}"
exit 1
fi
# Create status before changes
echo -e "${BLUE}📊 Capturing git status before changes...${NC}"
git status --short > /tmp/mev-bot-git-status-before.txt
echo ""
# Create deprecated directory
echo -e "${BLUE}📁 Creating directories...${NC}"
mkdir -p scripts/deprecated
mkdir -p scripts/demos
echo -e "${GREEN}✅ Created scripts/deprecated/ and scripts/demos/${NC}"
echo ""
# Move redundant log management scripts to deprecated
echo -e "${BLUE}🗂️ Moving redundant log management scripts...${NC}"
DEPRECATED_SCRIPTS=(
"scripts/archive-logs.sh"
"scripts/quick-archive.sh"
"scripts/view-latest-archive.sh"
"scripts/rotate-logs.sh"
"scripts/setup-log-rotation.sh"
)
for script in "${DEPRECATED_SCRIPTS[@]}"; do
if [ -f "$script" ]; then
echo -e " Moving: $script → scripts/deprecated/"
git mv "$script" "scripts/deprecated/$(basename "$script")"
else
echo -e "${YELLOW} Skipped: $script (not found)${NC}"
fi
done
echo -e "${GREEN}✅ Moved ${#DEPRECATED_SCRIPTS[@]} scripts to deprecated/${NC}"
echo ""
# Move demo scripts
echo -e "${BLUE}🎬 Moving demo scripts...${NC}"
DEMO_SCRIPTS=(
"scripts/demo-production-logs.sh"
)
for script in "${DEMO_SCRIPTS[@]}"; do
if [ -f "$script" ]; then
echo -e " Moving: $script → scripts/demos/"
git mv "$script" "scripts/demos/$(basename "$script")"
else
echo -e "${YELLOW} Skipped: $script (not found)${NC}"
fi
done
echo -e "${GREEN}✅ Moved ${#DEMO_SCRIPTS[@]} scripts to demos/${NC}"
echo ""
# Create README for deprecated scripts
echo -e "${BLUE}📝 Creating README for deprecated scripts...${NC}"
cat > scripts/deprecated/README.md << 'EOF'
# Deprecated Scripts
These scripts have been moved here because their functionality is now handled by more comprehensive tools.
## Log Management Scripts (Superseded by log-manager.sh)
All of these scripts have been replaced by `scripts/log-manager.sh`, which provides:
- Real-time analysis with health scoring
- Performance monitoring with MEV-specific metrics
- Corruption detection and integrity validation
- Multi-channel alerting (email, Slack)
- Background monitoring daemon
- Operations dashboard generation
- Intelligent rotation (size and time-based)
- Advanced archiving with metadata
### Deprecated Scripts
1. **archive-logs.sh** → Use `./scripts/log-manager.sh archive`
2. **quick-archive.sh** → Use `./scripts/log-manager.sh full`
3. **view-latest-archive.sh** → Use `./scripts/log-manager.sh status`
4. **rotate-logs.sh** → Use `./scripts/log-manager.sh rotate`
5. **setup-log-rotation.sh** → Use `./scripts/log-manager.sh init`
## Migration Guide
**Instead of:**
```bash
./scripts/archive-logs.sh
```
**Use:**
```bash
./scripts/log-manager.sh archive
```
**Instead of:**
```bash
./scripts/quick-archive.sh --clear-logs
```
**Use:**
```bash
./scripts/log-manager.sh full
```
## Why These Were Deprecated
The individual log management scripts were created before the comprehensive `log-manager.sh` system was implemented. The new system provides:
- **Unified Interface**: Single command with multiple subcommands
- **Production Grade**: Health monitoring, alerting, and metrics
- **Better Maintenance**: One script to maintain instead of five
- **More Features**: Dashboard generation, daemon mode, performance tracking
- **Safer Operations**: Validation and corruption detection
## Can I Still Use These?
Yes, these scripts still work and are kept for backwards compatibility. However, it's recommended to migrate to `log-manager.sh` for better functionality and ongoing support.
## When Will These Be Removed?
These scripts will be kept for at least one major version release to allow for migration. They may be removed in a future version once all users have migrated to `log-manager.sh`.
---
**See:** `docs/SCRIPT_ANALYSIS_REPORT.md` for the full analysis
EOF
echo -e "${GREEN}✅ Created README for deprecated scripts${NC}"
echo ""
# Create README for demos
echo -e "${BLUE}📝 Creating README for demo scripts...${NC}"
cat > scripts/demos/README.md << 'EOF'
# Demo & Example Scripts
These scripts are for demonstration and testing purposes only. They should not be used in production environments.
## Available Demos
### demo-production-logs.sh
Demonstrates the production log management system capabilities.
**Purpose:** Show how the log-manager.sh system works
**Usage:**
```bash
./scripts/demos/demo-production-logs.sh
```
**What it does:**
- Generates sample log entries
- Runs log analysis
- Shows health checks
- Demonstrates alerting
- Creates performance reports
- Generates operations dashboard
**Note:** This is a demonstration script. For production log management, use `./scripts/log-manager.sh`
---
**See:** `docs/SCRIPT_ANALYSIS_REPORT.md` for more information
EOF
echo -e "${GREEN}✅ Created README for demo scripts${NC}"
echo ""
# Create index README for scripts directory
echo -e "${BLUE}📝 Creating scripts directory index...${NC}"
cat > scripts/README.md << 'EOF'
# MEV Bot Scripts Directory
This directory contains all operational, utility, and development scripts for the MEV Bot project.
## Core Scripts
### Build & Runtime
- **build.sh** - Universal Go build script with configurable options
- **run.sh** - Main MEV bot execution script with production environment loading
- **test.sh** - Basic test runner
### Log Management
- **log-manager.sh** ⭐ Production-grade log management system
- Real-time analysis and health monitoring
- Performance tracking with MEV metrics
- Corruption detection and alerting
- Background daemon and dashboard generation
- See: `./scripts/log-manager.sh --help`
## CI/CD & Quality Assurance
### Primary CI Pipeline
- **../harness/local-ci-pipeline.sh** - Comprehensive CI/CD pipeline
- **ci-precommit.sh** - Fast pre-commit validation (10-30s)
- **ci-quick.sh** - Quick CI pipeline (30-60s)
- **ci-dev.sh** - Development CI pipeline (1-2min)
- **ci-full.sh** - Full CI pipeline (3-5min)
- **ci-container.sh** - Containerized CI execution
- **ci-watch.sh** - Watch mode for continuous validation
### Testing
- **test-runner.sh** - Configurable test execution (levels: basic, unit, integration, comprehensive, audit)
- **run_audit_suite.sh** - Mathematical correctness audit
- **security-validation.sh** - Comprehensive security validation
- **quick-test.sh** - Quick fix validation (30s)
- **run-stress-tests.sh** - Stress testing
- **run-fork-tests.sh** - Blockchain fork testing
## Deployment & Production
### Contract Deployment
- **deploy-contracts.sh** - Deploy smart contracts to Arbitrum
- **verify-contracts.sh** - Verify contracts on Arbiscan
- **deploy-staging.sh** - Staging environment deployment
- **deploy-production.sh** - Full production deployment with Docker Compose
### Production Operations
- **production-start.sh** - Start production MEV bot
- **production-validation.sh** - Pre-deployment validation checks
- **pre-run-validation.sh** - Environment validation before startup
## Wallet Management
- **setup-keystore.sh** - Encrypt and securely store private keys (AES-256-CBC)
- **fund-bot-wallet.sh** - Fund MEV bot wallet using Foundry cast
- **check-wallet-balance.sh** - Check wallet balance on Arbitrum One
## Monitoring & Analysis
- **watch-live.sh** - Real-time MEV bot activity monitor
- **analyze.sh** - Comprehensive system analysis (tests, benchmarks, coverage, static analysis)
- **performance-profile.sh** - Performance profiling with pprof
## Development Utilities
### Environment Setup
- **setup-env.sh** - Environment variable setup
- **setup-dev.sh** - Development environment setup
- **fix-rpc-config.sh** - Fix RPC configuration issues
### Git Workflow
- **git-hooks-setup.sh** - Install git hooks
- **git-enhanced.sh** - Enhanced git workflow commands
- **git-local-server.sh** - Local git server simulation
### Data & Code Generation
- **fetch_arbiscan_tx.sh** - Fetch transaction data from Arbiscan
- **extract_multicall_fixture.sh** - Extract multicall fixtures for testing
- **refresh-mev-datasets.sh** - Update MEV research datasets
- **generate-bindings.sh** - Generate Go bindings for smart contracts
### Other Utilities
- **kill-bot.sh** - Stop running MEV bot processes
- **dependency-scan.sh** - Scan for dependency vulnerabilities
- **verify-organization.sh** - Verify project organization
- **24h-validation-test.sh** - 24-hour validation test
## Special Directories
### deprecated/
Contains scripts that have been superseded by better alternatives. See `deprecated/README.md` for migration guide.
**Replaced by log-manager.sh:**
- archive-logs.sh
- quick-archive.sh
- view-latest-archive.sh
- rotate-logs.sh
- setup-log-rotation.sh
### demos/
Contains demonstration and example scripts for testing purposes only. Not for production use.
## Quick Reference
### Development Workflow
```bash
# Setup
./scripts/setup-dev.sh
# Quick validation
./scripts/ci-precommit.sh
# Run tests
./scripts/test-runner.sh --level comprehensive --coverage
# Security check
./scripts/security-validation.sh
# Math audit
./scripts/run_audit_suite.sh
```
### Production Deployment
```bash
# Validate environment
./scripts/production-validation.sh
# Deploy contracts
./scripts/deploy-contracts.sh
# Setup wallet
./scripts/setup-keystore.sh
./scripts/check-wallet-balance.sh
# Deploy and start
./scripts/deploy-production.sh
./scripts/run.sh
```
### Monitoring
```bash
# Live activity monitor
./scripts/watch-live.sh
# Log management
./scripts/log-manager.sh analyze
./scripts/log-manager.sh health
./scripts/log-manager.sh dashboard
# Performance profiling
./scripts/performance-profile.sh
```
## Documentation
For detailed script analysis and recommendations, see:
- **docs/SCRIPT_ANALYSIS_REPORT.md** - Comprehensive script analysis
- **Makefile** - Build automation targets and workflows
## Contributing
When adding new scripts:
1. Make scripts executable: `chmod +x script-name.sh`
2. Add shebang: `#!/bin/bash` or `#!/usr/bin/env bash`
3. Use set -e for error handling
4. Add descriptive comments
5. Update this README
6. Add help text (use --help flag)
---
**Total Scripts:** 80+
**Active Scripts:** 50+
**Deprecated Scripts:** 5
**Demo Scripts:** 1
EOF
echo -e "${GREEN}✅ Created scripts directory index${NC}"
echo ""
# Capture status after changes
echo -e "${BLUE}📊 Capturing git status after changes...${NC}"
git status --short > /tmp/mev-bot-git-status-after.txt
echo ""
# Show summary
echo -e "${BLUE}========================================${NC}"
echo -e "${GREEN}✅ Script Organization Complete!${NC}"
echo -e "${BLUE}========================================${NC}"
echo ""
echo -e "${BLUE}📝 Summary:${NC}"
echo -e " • Created: scripts/deprecated/"
echo -e " • Created: scripts/demos/"
echo -e " • Moved: ${#DEPRECATED_SCRIPTS[@]} scripts to deprecated/"
echo -e " • Moved: ${#DEMO_SCRIPTS[@]} scripts to demos/"
echo -e " • Created: 3 README files"
echo ""
echo -e "${BLUE}📊 Git Status Changes:${NC}"
echo -e "${YELLOW}Before:${NC}"
cat /tmp/mev-bot-git-status-before.txt | head -10
echo ""
echo -e "${YELLOW}After:${NC}"
git status --short | head -20
echo ""
echo -e "${BLUE}⏭️ Next Steps:${NC}"
echo -e " 1. Review changes: ${YELLOW}git diff --cached${NC}"
echo -e " 2. Commit changes: ${YELLOW}git commit -m \"refactor: organize scripts into logical directories\"${NC}"
echo -e " 3. Read report: ${YELLOW}docs/SCRIPT_ANALYSIS_REPORT.md${NC}"
echo ""

View File

@@ -1,46 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Performance profiling script
# Set default values
REPORT_DIR="${REPORT_DIR:-reports/performance}"
BINARY_DIR="${BINARY_DIR:-bin}"
BINARY_NAME="${BINARY_NAME:-mev-bot}"
PROFILE_TYPES="${PROFILE_TYPES:-cpu,mem,block,mutex}"
TIMEOUT="${TIMEOUT:-30s}"
echo "Starting performance profile..."
# Create report directory
mkdir -p "$REPORT_DIR"
# Build the application if it doesn't exist
if [ ! -f "$BINARY_DIR/$BINARY_NAME" ]; then
echo "Building application..."
make build
fi
# Run the application with profiling enabled
echo "Running application with profiling for $TIMEOUT..."
"$BINARY_DIR/$BINARY_NAME" --config config/development.yaml &
APP_PID=$!
# Wait for the app to start
sleep 2
# Profile the application
if command -v go &> /dev/null; then
# Use go tool pprof to profile the running application
echo "Profiling CPU for $TIMEOUT..."
go tool pprof -text -top -nodecount=10 "http://localhost:6060/debug/pprof/profile?seconds=${TIMEOUT%?}" > "$REPORT_DIR/cpu-profile.txt" 2>/dev/null || echo "Could not connect to pprof endpoint, make sure your app has debug/pprof enabled"
echo "Profiling memory..."
go tool pprof -text -top -nodecount=10 "http://localhost:6060/debug/pprof/heap" > "$REPORT_DIR/mem-profile.txt" 2>/dev/null || echo "Could not connect to heap profile endpoint"
fi
# Alternative: run a benchmark test if the application doesn't support pprof
echo "Running benchmark tests..."
go test -bench=. -benchmem -run=^$ ./pkg/... ./cmd/... > "$REPORT_DIR/benchmark-results.txt"
echo "Performance profiling complete. Reports saved to $REPORT_DIR"

View File

@@ -1,76 +0,0 @@
#!/bin/bash
# Pre-Run Validation Script
# Validates environment before starting MEV bot
set -e
echo "========================================="
echo "MEV Bot Pre-Run Validation"
echo "========================================="
ERRORS=0
# Check RPC endpoints
echo "[1/5] Checking RPC endpoints..."
if [ -z "$ARBITRUM_RPC_ENDPOINT" ]; then
echo "❌ ARBITRUM_RPC_ENDPOINT not set"
ERRORS=$((ERRORS + 1))
else
echo "✅ ARBITRUM_RPC_ENDPOINT: $ARBITRUM_RPC_ENDPOINT"
fi
# Check for wss:// or https:// prefix
echo "[2/5] Validating endpoint format..."
if [[ "$ARBITRUM_RPC_ENDPOINT" == wss://* ]] || [[ "$ARBITRUM_RPC_ENDPOINT" == https://* ]]; then
echo "✅ Endpoint format valid"
else
echo "❌ Endpoint must start with wss:// or https://"
ERRORS=$((ERRORS + 1))
fi
# Check log directory
echo "[3/5] Checking log directory..."
if [ -d "logs" ]; then
echo "✅ Log directory exists"
# Check for excessive zero addresses in recent logs
if [ -f "logs/liquidity_events_$(date +%Y-%m-%d).jsonl" ]; then
ZERO_COUNT=$(grep -c "0x0000000000000000000000000000000000000000" "logs/liquidity_events_$(date +%Y-%m-%d).jsonl" 2>/dev/null || echo 0)
echo "Zero addresses in today's events: $ZERO_COUNT"
if [ "$ZERO_COUNT" -gt 10 ]; then
echo "⚠️ WARNING: High zero address count detected"
fi
fi
else
mkdir -p logs
echo "✅ Created log directory"
fi
# Check binary exists
echo "[4/5] Checking binary..."
if [ -f "./mev-bot" ] || [ -f "./bin/mev-bot" ]; then
echo "✅ MEV bot binary found"
else
echo "❌ MEV bot binary not found. Run 'make build' first"
ERRORS=$((ERRORS + 1))
fi
# Check for port conflicts
echo "[5/5] Checking for port conflicts..."
if lsof -Pi :9090 -sTCP:LISTEN -t >/dev/null 2>&1; then
echo "⚠️ WARNING: Port 9090 (metrics) already in use"
fi
if lsof -Pi :8080 -sTCP:LISTEN -t >/dev/null 2>&1; then
echo "⚠️ WARNING: Port 8080 (dashboard) already in use"
fi
echo ""
echo "========================================="
if [ $ERRORS -eq 0 ]; then
echo "✅ Validation PASSED - Safe to start"
exit 0
else
echo "❌ Validation FAILED - $ERRORS error(s) found"
exit 1
fi

View File

@@ -1,320 +0,0 @@
#!/usr/bin/env bash
# 🚀 MEV BOT PRODUCTION STARTUP SCRIPT - IMMEDIATE PROFIT MODE
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
cd "$PROJECT_ROOT"
# Colors
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
RED='\033[0;31m'
BLUE='\033[0;34m'
BOLD='\033[1m'
NC='\033[0m'
log() {
echo -e "${GREEN}[PRODUCTION-START]${NC} $*"
}
warn() {
echo -e "${YELLOW}[WARNING]${NC} $*"
}
error() {
echo -e "${RED}[ERROR]${NC} $*"
}
info() {
echo -e "${BLUE}[INFO]${NC} $*"
}
header() {
echo -e "${BOLD}${GREEN}===============================================${NC}"
echo -e "${BOLD}${GREEN} 🚀 MEV BOT PRODUCTION DEPLOYMENT - PROFIT MODE${NC}"
echo -e "${BOLD}${GREEN}===============================================${NC}"
echo ""
}
# Pre-flight checks
preflight_checks() {
log "Running pre-flight checks..."
# Check if binary exists and is recent
if [[ ! -f "bin/mev-bot" ]]; then
error "MEV bot binary not found. Building..."
make build
fi
# Check if binary is executable
if [[ ! -x "bin/mev-bot" ]]; then
chmod +x bin/mev-bot
fi
# Verify binary
local binary_info
binary_info=$(file bin/mev-bot)
if [[ "$binary_info" =~ "ELF 64-bit" ]]; then
log "✅ Binary verified: $(ls -lh bin/mev-bot | awk '{print $5}')"
else
error "❌ Invalid binary format"
exit 1
fi
# Check Go version
local go_version
go_version=$(go version | grep -o 'go[0-9.]*' | head -1)
log "✅ Go version: $go_version"
# Check critical directories
mkdir -p keystore/production logs backups/production
log "✅ Production directories created"
# Check environment file
if [[ -f ".env.production.secure" ]]; then
log "✅ Production environment configuration found"
else
warn "Production environment not configured"
return 1
fi
}
# Validate configuration
validate_config() {
log "Validating production configuration..."
# Source production environment
if [[ -f ".env.production.secure" ]]; then
source .env.production.secure
else
error "Production environment file not found"
exit 1
fi
# Validate critical settings
if [[ -z "${MEV_BOT_ENCRYPTION_KEY:-}" ]]; then
error "MEV_BOT_ENCRYPTION_KEY not set"
exit 1
fi
if [[ ${#MEV_BOT_ENCRYPTION_KEY} -lt 32 ]]; then
error "MEV_BOT_ENCRYPTION_KEY too short (minimum 32 characters)"
exit 1
fi
if [[ -z "${ARBITRUM_RPC_ENDPOINT:-}" ]]; then
error "ARBITRUM_RPC_ENDPOINT not set"
exit 1
fi
log "✅ Configuration validated"
}
# Start monitoring
start_monitoring() {
log "Starting monitoring and metrics..."
# Start Prometheus (if available)
if command -v prometheus >/dev/null 2>&1; then
log "Starting Prometheus..."
prometheus --config.file=monitoring/prometheus.yml --storage.tsdb.path=monitoring/data &
echo $! > monitoring/prometheus.pid
fi
# Start Grafana (if available)
if command -v grafana-server >/dev/null 2>&1; then
log "Starting Grafana..."
grafana-server --config=monitoring/grafana.ini &
echo $! > monitoring/grafana.pid
fi
log "✅ Monitoring started (check ports 9090, 3000)"
}
# Start MEV bot
start_mev_bot() {
log "🚀 Starting MEV Bot in PRODUCTION mode..."
# Set production environment safely (avoid code injection)
if [[ -f ".env.production.secure" ]]; then
set -a
source .env.production.secure
set +a
fi
# Additional production environment
export GO_ENV=production
export DEBUG=false
export LOG_LEVEL=info
# Performance tuning
export GOMAXPROCS=4
export GOGC=100
# Start the bot with proper logging
log "Starting MEV bot process..."
# Create log file with timestamp
local log_file="logs/production-$(date +%Y%m%d-%H%M%S).log"
# Start bot with output to both console and log file
./bin/mev-bot start 2>&1 | tee "$log_file" &
local bot_pid=$!
echo "$bot_pid" > mev-bot.pid
log "✅ MEV Bot started with PID: $bot_pid"
log "📊 Logs: $log_file"
log "🔍 Monitor with: tail -f $log_file"
# Wait a moment to check if process started successfully
sleep 3
if kill -0 "$bot_pid" 2>/dev/null; then
log "🎉 MEV Bot is running successfully!"
log "💰 Profit tracking active - monitor logs for opportunities"
else
error "❌ MEV Bot failed to start"
exit 1
fi
}
# Display status
show_status() {
echo ""
log "🎯 PRODUCTION STATUS:"
echo ""
# MEV Bot status
if [[ -f "mev-bot.pid" ]]; then
local bot_pid
bot_pid=$(cat mev-bot.pid)
if kill -0 "$bot_pid" 2>/dev/null; then
info "✅ MEV Bot: RUNNING (PID: $bot_pid)"
else
warn "❌ MEV Bot: STOPPED"
fi
else
warn "❌ MEV Bot: NOT STARTED"
fi
# Monitoring status
if [[ -f "monitoring/prometheus.pid" ]]; then
local prom_pid
prom_pid=$(cat monitoring/prometheus.pid)
if kill -0 "$prom_pid" 2>/dev/null; then
info "✅ Prometheus: RUNNING (PID: $prom_pid) - http://localhost:9090"
fi
fi
if [[ -f "monitoring/grafana.pid" ]]; then
local grafana_pid
grafana_pid=$(cat monitoring/grafana.pid)
if kill -0 "$grafana_pid" 2>/dev/null; then
info "✅ Grafana: RUNNING (PID: $grafana_pid) - http://localhost:3000"
fi
fi
echo ""
log "💡 Quick Commands:"
log " View logs: tail -f logs/production-*.log"
log " Stop bot: ./scripts/production-stop.sh"
log " Monitor: watch -n 1 'ps aux | grep mev-bot'"
echo ""
log "🎯 READY FOR PROFIT GENERATION!"
log "Monitor the logs for arbitrage opportunities and executed trades"
}
# Production safety warning
safety_warning() {
echo ""
warn "⚠️ PRODUCTION DEPLOYMENT WARNING:"
echo ""
echo "1. 💰 This will start LIVE trading with real funds"
echo "2. 🔑 Ensure your private keys are secure"
echo "3. 💸 Start with small position sizes"
echo "4. 📊 Monitor all transactions closely"
echo "5. 🚨 Set up alerts for unusual activity"
echo ""
read -p "Are you ready to start LIVE trading? (type 'PROFIT' to confirm): " -r
if [[ $REPLY != "PROFIT" ]]; then
echo "Deployment cancelled"
exit 0
fi
echo ""
}
# Quick deployment mode
quick_deploy() {
log "🚀 QUICK DEPLOYMENT MODE - MAXIMUM SPEED TO PROFIT"
# Skip most checks, assume environment is ready
if [[ -f ".env.production.secure" ]]; then
set -a
source .env.production.secure
set +a
fi
export GO_ENV=production
export DEBUG=false
# Start bot immediately
./bin/mev-bot start &
local bot_pid=$!
echo "$bot_pid" > mev-bot.pid
log "⚡ MEV Bot deployed in QUICK mode (PID: $bot_pid)"
log "💰 PROFIT GENERATION ACTIVE"
# Quick status check
sleep 2
if kill -0 "$bot_pid" 2>/dev/null; then
log "🎉 SUCCESS - Bot is generating profits!"
else
error "❌ Quick deployment failed"
exit 1
fi
}
# Main deployment function
main() {
local mode="${1:-full}"
header
case "$mode" in
quick)
log "QUICK DEPLOYMENT MODE"
quick_deploy
;;
full)
log "FULL PRODUCTION DEPLOYMENT"
safety_warning
preflight_checks
validate_config
start_monitoring
start_mev_bot
show_status
;;
status)
show_status
;;
*)
error "Unknown mode: $mode"
echo "Usage: $0 [full|quick|status]"
echo " full - Complete production deployment with all checks"
echo " quick - Fast deployment, skip checks (IMMEDIATE PROFIT)"
echo " status - Show current status"
exit 1
;;
esac
}
# Handle interrupts gracefully
trap 'error "Deployment interrupted"; exit 1' INT TERM
# Run main function
main "$@"

View File

@@ -1,273 +0,0 @@
#!/bin/bash
set -euo pipefail
# Production Readiness Validation Script for MEV Bot
# This script proves the bot is ready for real-world arbitrage trading
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
PURPLE='\033[0;35m'
CYAN='\033[0;36m'
NC='\033[0m' # No Color
# Logging functions
log_info() {
echo -e "${BLUE}[INFO]${NC} $1"
}
log_success() {
echo -e "${GREEN}[SUCCESS]${NC} $1"
}
log_warning() {
echo -e "${YELLOW}[WARNING]${NC} $1"
}
log_error() {
echo -e "${RED}[ERROR]${NC} $1"
}
log_step() {
echo -e "${PURPLE}[STEP]${NC} $1"
}
# Banner
echo -e "${CYAN}"
cat << "EOF"
╔══════════════════════════════════════════════════════════════════════════════╗
║ MEV BOT PRODUCTION VALIDATION ║
║ ║
║ This validation proves our MEV bot can detect and execute ║
║ profitable arbitrages in real market conditions on Arbitrum ║
╚══════════════════════════════════════════════════════════════════════════════╝
EOF
echo -e "${NC}"
# Change to project directory
cd "$PROJECT_ROOT"
# Step 1: Environment Validation
log_step "1. Validating Environment Configuration"
# Check if required environment variables are set for testing
if [[ -z "${ARBITRUM_RPC_ENDPOINT:-}" ]]; then
log_warning "ARBITRUM_RPC_ENDPOINT not set, using public endpoint for validation"
export ARBITRUM_RPC_ENDPOINT="https://arb1.arbitrum.io/rpc"
fi
if [[ -z "${ARBITRUM_FALLBACK_ENDPOINTS:-}" ]]; then
export ARBITRUM_FALLBACK_ENDPOINTS="https://arbitrum.llamarpc.com,https://arbitrum-one.publicnode.com"
fi
log_info "Primary RPC: $ARBITRUM_RPC_ENDPOINT"
log_info "Fallback endpoints: ${ARBITRUM_FALLBACK_ENDPOINTS:-none}"
log_success "Environment configuration validated"
# Step 2: Dependencies Check
log_step "2. Checking Dependencies"
# Check if Go is installed
if ! command -v go &> /dev/null; then
log_error "Go is not installed or not in PATH"
exit 1
fi
GO_VERSION=$(go version | grep -oE '[0-9]+\.[0-9]+\.[0-9]+')
log_info "Go version: $GO_VERSION"
# Check if required tools are available
if ! command -v curl &> /dev/null; then
log_error "curl is required but not installed"
exit 1
fi
log_success "All dependencies available"
# Step 3: Build Validation
log_step "3. Building MEV Bot"
# Clean and build
log_info "Cleaning previous builds..."
go clean -cache
rm -f ./mev-bot
log_info "Building MEV bot..."
if ! go build -o mev-bot ./cmd/mev-bot; then
log_error "Failed to build MEV bot"
exit 1
fi
if [[ ! -f "./mev-bot" ]]; then
log_error "MEV bot binary not found after build"
exit 1
fi
log_success "MEV bot built successfully"
# Step 4: Contract Bindings Validation
log_step "4. Validating Contract Bindings"
if [[ ! -d "./bindings" ]] || [[ -z "$(ls -A ./bindings 2>/dev/null)" ]]; then
log_warning "Contract bindings not found, they would need to be generated for production"
else
BINDING_COUNT=$(find ./bindings -name "*.go" | wc -l)
log_info "Found $BINDING_COUNT contract binding files"
log_success "Contract bindings validated"
fi
# Step 5: Configuration Validation
log_step "5. Validating Configuration Files"
CONFIG_FILE="./config/arbitrum_production.yaml"
if [[ ! -f "$CONFIG_FILE" ]]; then
log_error "Production config file not found: $CONFIG_FILE"
exit 1
fi
log_info "Validating production configuration..."
if ! ./scripts/simple-validation.sh "$CONFIG_FILE"; then
log_error "Configuration validation failed"
exit 1
fi
log_success "Production configuration validated"
# Step 6: Network Connectivity Test
log_step "6. Testing Network Connectivity"
log_info "Testing primary RPC endpoint..."
if curl -s -f --max-time 10 -X POST "$ARBITRUM_RPC_ENDPOINT" \
-H "Content-Type: application/json" \
-d '{"jsonrpc":"2.0","method":"eth_chainId","params":[],"id":1}' > /dev/null; then
log_success "Primary RPC endpoint accessible"
else
log_warning "Primary RPC endpoint not accessible, will rely on fallbacks"
fi
# Test fallback endpoints
if [[ -n "${ARBITRUM_FALLBACK_ENDPOINTS:-}" ]]; then
IFS=',' read -ra ENDPOINTS <<< "$ARBITRUM_FALLBACK_ENDPOINTS"
ACCESSIBLE_COUNT=0
for endpoint in "${ENDPOINTS[@]}"; do
endpoint=$(echo "$endpoint" | xargs) # trim whitespace
if curl -s -f --max-time 5 -X POST "$endpoint" \
-H "Content-Type: application/json" \
-d '{"jsonrpc":"2.0","method":"eth_chainId","params":[],"id":1}' > /dev/null; then
log_info "✓ Fallback endpoint accessible: $endpoint"
((ACCESSIBLE_COUNT++))
else
log_warning "✗ Fallback endpoint not accessible: $endpoint"
fi
done
if [[ $ACCESSIBLE_COUNT -gt 0 ]]; then
log_success "$ACCESSIBLE_COUNT fallback endpoints accessible"
else
log_error "No fallback endpoints accessible"
exit 1
fi
fi
# Step 7: Run Production Validation Tests
log_step "7. Running Production Arbitrage Validation Tests"
log_info "This test will:"
log_info " • Connect to real Arbitrum mainnet (forked)"
log_info " • Analyze actual WETH/USDC pools"
log_info " • Deploy our arbitrage contract"
log_info " • Detect real arbitrage opportunities"
log_info " • Test MEV competition analysis"
log_info " • Validate real-time monitoring"
echo ""
log_info "Starting comprehensive production validation..."
echo ""
# Run the production validation test with verbose output
if go test -v -timeout=300s ./test/production -run TestProductionArbitrageValidation; then
log_success "🎉 PRODUCTION VALIDATION PASSED!"
else
log_error "❌ Production validation failed"
exit 1
fi
echo ""
# Step 8: Performance Benchmarks
log_step "8. Running Performance Benchmarks"
log_info "Testing arbitrage detection performance..."
go test -bench=BenchmarkArbitrageDetection -benchtime=10s ./test/integration > benchmark_results.txt 2>&1 || true
if [[ -f "benchmark_results.txt" ]]; then
log_info "Benchmark results:"
grep -E "(BenchmarkArbitrageDetection|ops|allocs)" benchmark_results.txt || log_warning "No benchmark data found"
rm -f benchmark_results.txt
fi
log_success "Performance benchmarks completed"
# Step 9: Security Validation
log_step "9. Security Validation"
log_info "Checking for hardcoded secrets..."
if grep -r -E "(private.*key|secret|password)" --include="*.go" --exclude-dir=vendor . | grep -v test | grep -v example; then
log_error "Potential hardcoded secrets found in source code"
exit 1
else
log_success "No hardcoded secrets found"
fi
log_info "Validating secure configuration..."
if [[ -f ".env.production" ]]; then
if grep -q "your_.*_here" .env.production; then
log_warning "Production .env file contains placeholder values"
else
log_success "Production environment file properly configured"
fi
fi
# Step 10: Final Production Readiness Assessment
log_step "10. Final Production Readiness Assessment"
echo ""
log_success "✅ Build system working"
log_success "✅ Configuration system validated"
log_success "✅ Network connectivity confirmed"
log_success "✅ Real market data access verified"
log_success "✅ Arbitrage detection functional"
log_success "✅ Smart contract deployment working"
log_success "✅ MEV competition analysis operational"
log_success "✅ Real-time monitoring capability confirmed"
log_success "✅ Fallback connectivity working"
log_success "✅ Security checks passed"
echo ""
echo -e "${GREEN}╔══════════════════════════════════════════════════════════════════════════════╗${NC}"
echo -e "${GREEN}║ 🚀 PRODUCTION READY! 🚀 ║${NC}"
echo -e "${GREEN}║ ║${NC}"
echo -e "${GREEN}║ Your MEV bot has passed all production validation tests and is ready to ║${NC}"
echo -e "${GREEN}║ detect and execute profitable arbitrages on Arbitrum mainnet. ║${NC}"
echo -e "${GREEN}║ ║${NC}"
echo -e "${GREEN}║ Next steps for deployment: ║${NC}"
echo -e "${GREEN}║ 1. Deploy your smart contracts to Arbitrum mainnet ║${NC}"
echo -e "${GREEN}║ 2. Configure your private keys and RPC endpoints ║${NC}"
echo -e "${GREEN}║ 3. Start with small position sizes for initial testing ║${NC}"
echo -e "${GREEN}║ 4. Monitor performance and profitability closely ║${NC}"
echo -e "${GREEN}╚══════════════════════════════════════════════════════════════════════════════╝${NC}"
echo ""
log_info "To deploy in production:"
log_info " • Copy .env.example to .env and configure your actual values"
log_info " • Deploy contracts: ./scripts/deploy-contracts.sh"
log_info " • Start bot: docker-compose -f docker-compose.production.yaml up -d"
log_info " • Monitor logs: docker-compose logs -f mev-bot"
echo ""
log_success "Production validation completed successfully! 🎉"

View File

@@ -1,156 +0,0 @@
#!/bin/bash
# This script runs comprehensive code quality checks
set -e
echo "Starting comprehensive code quality checks..."
# Initialize exit code
exit_code=0
# Run gofmt to check formatting
echo "Running gofmt check..."
unformatted=$(go fmt ./...)
if [ -n "$unformatted" ]; then
echo "❌ Some files need formatting:"
echo "$unformatted"
exit_code=1
else
echo "✅ All files are properly formatted"
fi
# Run go vet
echo "Running go vet..."
if ! go vet ./...; then
echo "❌ go vet failed"
exit_code=1
else
echo "✅ go vet passed"
fi
# Run errcheck
echo "Running errcheck..."
if command -v errcheck >/dev/null 2>&1; then
if ! errcheck -blank ./...; then
echo "❌ errcheck found unchecked errors"
exit_code=1
else
echo "✅ errcheck passed"
else
echo "⚠️ errcheck not installed, skipping"
fi
# Run ineffassign
echo "Running ineffassign..."
if command -v ineffassign >/dev/null 2>&1; then
if ! ineffassign ./...; then
echo "❌ ineffassign found ineffective assignments"
exit_code=1
else
echo "✅ ineffassign passed"
else
echo "⚠️ ineffassign not installed, skipping"
fi
# Run staticcheck
echo "Running staticcheck..."
if command -v staticcheck >/dev/null 2>&1; then
if ! staticcheck ./...; then
echo "❌ staticcheck found issues"
exit_code=1
else
echo "✅ staticcheck passed"
else
echo "⚠️ staticcheck not installed, skipping"
fi
# Run structcheck
echo "Running structcheck..."
if command -v structcheck >/dev/null 2>&1; then
if ! structcheck ./...; then
echo "❌ structcheck found unused struct fields"
exit_code=1
else
echo "✅ structcheck passed"
else
echo "⚠️ structcheck not installed, skipping"
fi
# Run varcheck
echo "Running varcheck..."
if command -v varcheck >/dev/null 2>&1; then
if ! varcheck ./...; then
echo "❌ varcheck found unused variables"
exit_code=1
else
echo "✅ varcheck passed"
else
echo "⚠️ varcheck not installed, skipping"
fi
# Run deadcode
echo "Running deadcode..."
if command -v deadcode >/dev/null 2>&1; then
if ! deadcode ./...; then
echo "❌ deadcode found unused code"
exit_code=1
else
echo "✅ deadcode passed"
else
echo "⚠️ deadcode not installed, skipping"
fi
# Run gocyclo
echo "Running gocyclo for cyclomatic complexity..."
if command -v gocyclo >/dev/null 2>&1; then
if gocyclo -over 15 ./...; then
echo "⚠️ Found functions with high cyclomatic complexity (over 15)"
else
echo "✅ All functions have reasonable cyclomatic complexity"
else
echo "⚠️ gocyclo not installed, skipping"
fi
# Run goconst
echo "Running goconst for repeated strings..."
if command -v goconst >/dev/null 2>&1; then
if ! goconst -min-occurrences 3 ./...; then
echo "⚠️ Found repeated strings that could be constants"
else
echo "✅ goconst found no issues"
else
echo "⚠️ goconst not installed, skipping"
fi
# Run dupl
echo "Running dupl for code duplication..."
if command -v dupl >/dev/null 2>&1; then
if dupl -threshold 100 ./...; then
echo "⚠️ Found duplicated code blocks"
else
echo "✅ No significant code duplication found"
else
echo "⚠️ dupl not installed, skipping"
fi
# Run nakedret
echo "Running nakedret for naked returns..."
if command -v nakedret >/dev/null 2>&1; then
if nakedret -l 10 ./...; then
echo "⚠️ Found naked returns in functions with more than 10 lines"
else
echo "✅ nakedret found no issues"
else
echo "⚠️ nakedret not installed, skipping"
fi
echo "Code quality checks completed."
if [ $exit_code -ne 0 ]; then
echo "❌ Some code quality checks failed"
exit $exit_code
else
echo "✅ All code quality checks passed"
exit 0
fi

View File

@@ -1,15 +0,0 @@
#!/bin/bash
# Quick MEV Bot Analysis Script
echo "Running quick analysis of the MEV bot system..."
# Ensure imports are correct
echo "Checking Go imports"
goimports -w .
# Check go.mod for dependencies
echo "Checking Go module dependencies..."
go mod tidy
echo "Quick analysis complete."

View File

@@ -1,158 +0,0 @@
package main
import (
"context"
"encoding/json"
"fmt"
"io/ioutil"
"log"
"time"
"github.com/ethereum/go-ethereum"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/ethclient"
)
type BlacklistEntry struct {
Address string `json:"address"`
FailureCount int `json:"failure_count"`
LastReason string `json:"last_reason"`
FirstSeen time.Time `json:"first_seen"`
IsBlacklisted bool `json:"is_blacklisted"`
}
func main() {
// Read blacklist
data, err := ioutil.ReadFile("logs/pool_blacklist.json")
if err != nil {
log.Fatal("Failed to read blacklist:", err)
}
var entries []BlacklistEntry
if err := json.Unmarshal(data, &entries); err != nil {
log.Fatal("Failed to parse blacklist:", err)
}
// Connect to Arbitrum
client, err := ethclient.Dial("https://arb1.arbitrum.io/rpc")
if err != nil {
log.Fatal("Failed to connect:", err)
}
fmt.Println("Analyzing Valid Failing Pools")
fmt.Println("=============================")
fmt.Println()
// Function selectors
token0Selector := []byte{0x0d, 0xfe, 0x16, 0x81} // token0()
token1Selector := []byte{0xd2, 0x12, 0x20, 0xa7} // token1()
feeSelector := []byte{0xdd, 0xca, 0x3f, 0x43} // fee()
slot0Selector := []byte{0x38, 0x50, 0xc7, 0xbd} // slot0()
reservesSelector := []byte{0x09, 0x02, 0xf1, 0xac} // getReserves()
uniV3Count := 0
uniV2Count := 0
otherCount := 0
noContractCount := 0
// Test first 20 valid entries
tested := 0
for _, entry := range entries {
if !entry.IsBlacklisted || tested >= 20 {
continue
}
poolAddress := common.HexToAddress(entry.Address)
fmt.Printf("Testing %s (reason: %s):\n", entry.Address[:10]+"...", entry.LastReason)
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
// Check if contract exists
code, err := client.CodeAt(ctx, poolAddress, nil)
if err != nil || len(code) == 0 {
fmt.Println(" ❌ No contract")
noContractCount++
cancel()
tested++
continue
}
// Test token0
result, err := client.CallContract(ctx, ethereum.CallMsg{
To: &poolAddress,
Data: token0Selector,
}, nil)
hasToken0 := err == nil && len(result) >= 32
// Test token1
result, err = client.CallContract(ctx, ethereum.CallMsg{
To: &poolAddress,
Data: token1Selector,
}, nil)
hasToken1 := err == nil && len(result) >= 32
// Test fee (V3)
result, err = client.CallContract(ctx, ethereum.CallMsg{
To: &poolAddress,
Data: feeSelector,
}, nil)
hasFee := err == nil && len(result) >= 32
// Test slot0 (V3)
result, err = client.CallContract(ctx, ethereum.CallMsg{
To: &poolAddress,
Data: slot0Selector,
}, nil)
hasSlot0 := err == nil && len(result) >= 32
// Test getReserves (V2)
result, err = client.CallContract(ctx, ethereum.CallMsg{
To: &poolAddress,
Data: reservesSelector,
}, nil)
hasReserves := err == nil && len(result) >= 96
// Determine pool type
if hasToken0 && hasToken1 {
if hasFee && hasSlot0 {
fmt.Println(" ✅ UniswapV3 Pool")
uniV3Count++
} else if hasReserves {
fmt.Println(" ✅ UniswapV2/Sushiswap Pool")
uniV2Count++
} else {
fmt.Println(" ⚠️ Has tokens but unknown type")
otherCount++
}
} else {
fmt.Printf(" ❌ Not standard AMM (token0:%v, token1:%v)\n", hasToken0, hasToken1)
otherCount++
}
cancel()
tested++
}
fmt.Println()
fmt.Println("Summary")
fmt.Println("=======")
fmt.Printf("UniswapV3: %d\n", uniV3Count)
fmt.Printf("UniswapV2: %d\n", uniV2Count)
fmt.Printf("Other/Unknown: %d\n", otherCount)
fmt.Printf("No Contract: %d\n", noContractCount)
fmt.Println()
// Analyze failure reasons
reasonCounts := make(map[string]int)
for _, entry := range entries {
if entry.IsBlacklisted {
reasonCounts[entry.LastReason]++
}
}
fmt.Println("Failure Reasons")
fmt.Println("===============")
for reason, count := range reasonCounts {
fmt.Printf("%s: %d\n", reason, count)
}
}

View File

@@ -1,47 +0,0 @@
#!/bin/bash
# Quick Test Script - Validates fixes are working
set -e
echo "========================================="
echo "MEV Bot Quick Test"
echo "========================================="
# Run pre-validation
echo "[1/3] Running pre-run validation..."
./scripts/pre-run-validation.sh
# Build
echo "[2/3] Building..."
make build 2>&1 | tail -10
# Run for 30 seconds
echo "[3/3] Running bot for 30 seconds..."
timeout 30 ./mev-bot start 2>&1 | tee test-run.log || true
echo ""
echo "========================================="
echo "Analyzing Test Run..."
echo "========================================="
# Check for critical errors
WSS_ERRORS=$(grep -c "unsupported protocol scheme" test-run.log 2>/dev/null || echo 0)
ZERO_ADDR=$(grep -c "0x00000000000000000000000000000000000000000" test-run.log 2>/dev/null || echo 0)
RATE_LIMITS=$(grep -c "Too Many Requests" test-run.log 2>/dev/null || echo 0)
echo "WebSocket errors: $WSS_ERRORS"
echo "Zero addresses: $ZERO_ADDR"
echo "Rate limit errors: $RATE_LIMITS"
if [ "$WSS_ERRORS" -eq 0 ] && [ "$ZERO_ADDR" -lt 10 ] && [ "$RATE_LIMITS" -lt 10 ]; then
echo ""
echo "✅ TEST PASSED - Fixes appear to be working"
exit 0
else
echo ""
echo "⚠️ TEST WARNINGS - Some issues remain:"
[ "$WSS_ERRORS" -gt 0 ] && echo " - WebSocket errors still present"
[ "$ZERO_ADDR" -ge 10 ] && echo " - High zero address count"
[ "$RATE_LIMITS" -ge 10 ] && echo " - Rate limiting issues"
exit 1
fi

View File

@@ -1,45 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
PYTHON="${PYTHON:-python3}"
PORTAL_RAW="${REPO_ROOT}/data/raw_arbitrum_portal_projects.json"
PORTAL_URL="https://portal-data.arbitrum.io/api/projects"
SKIP_PORTAL_FETCH="${SKIP_PORTAL_FETCH:-0}"
pull_portal_catalogue() {
local tmp_file
tmp_file="$(mktemp "${PORTAL_RAW}.XXXXXX")"
echo "Pulling Arbitrum Portal catalogue..."
if ! curl -fLs "${PORTAL_URL}" -o "${tmp_file}"; then
rm -f "${tmp_file}"
echo "Failed to download Portal data from ${PORTAL_URL}" >&2
exit 1
fi
mv "${tmp_file}" "${PORTAL_RAW}"
}
if [[ "${SKIP_PORTAL_FETCH}" != "1" ]]; then
mkdir -p "$(dirname "${PORTAL_RAW}")"
pull_portal_catalogue
elif [[ ! -f "${PORTAL_RAW}" ]]; then
echo "SKIP_PORTAL_FETCH=1 set but ${PORTAL_RAW} missing; cannot proceed." >&2
exit 1
else
echo "Skipping Portal catalogue download (SKIP_PORTAL_FETCH=1)."
fi
echo "Pulling DeFiLlama exchange snapshot..."
"${PYTHON}" "${REPO_ROOT}/docs/5_development/mev_research/datasets/pull_llama_exchange_snapshot.py"
echo "Refreshing exchange datasets..."
"${PYTHON}" "${REPO_ROOT}/docs/5_development/mev_research/datasets/update_exchange_datasets.py"
echo "Refreshing lending and bridge datasets..."
"${PYTHON}" "${REPO_ROOT}/docs/5_development/mev_research/datasets/update_market_datasets.py"
echo "MEV research datasets refreshed successfully."

View File

@@ -1,112 +0,0 @@
#!/bin/bash
# Script to run arbitrage tests with forked Arbitrum environment
set -e
echo "🚀 Starting forked Arbitrum tests..."
# Check if anvil is available
if ! command -v anvil &> /dev/null; then
echo "❌ Anvil not found. Please install Foundry first:"
echo "curl -L https://foundry.paradigm.xyz | bash"
echo "foundryup"
exit 1
fi
# Kill any existing anvil processes
echo "🔄 Stopping any existing anvil processes..."
pkill -f anvil || true
sleep 2
# Set up environment variables for forked network
export ARBITRUM_RPC_ENDPOINT="https://arb1.arbitrum.io/rpc"
export ARBITRUM_WS_ENDPOINT="ws://localhost:8545"
export METRICS_ENABLED="false"
export MEV_BOT_ENCRYPTION_KEY="test-fork-encryption-key-32-chars"
export MEV_BOT_ALLOW_LOCALHOST="true"
export TEST_WITH_FORK="true"
# Start anvil with Arbitrum fork
echo "🔗 Starting anvil with Arbitrum One fork..."
anvil \
--fork-url "$ARBITRUM_RPC_ENDPOINT" \
--host 0.0.0.0 \
--port 8545 \
--accounts 10 \
--balance 1000 \
--gas-limit 30000000 \
--gas-price 100000000 \
--block-time 1 \
--silent &
ANVIL_PID=$!
echo "📊 Anvil started with PID: $ANVIL_PID"
# Cleanup function
cleanup() {
echo "🧹 Cleaning up..."
kill $ANVIL_PID 2>/dev/null || true
wait $ANVIL_PID 2>/dev/null || true
echo "✅ Cleanup completed"
}
# Set up trap for cleanup
trap cleanup EXIT
# Wait for anvil to be ready
echo "⏳ Waiting for anvil to be ready..."
sleep 5
# Verify anvil is running
if ! curl -s -X POST \
-H "Content-Type: application/json" \
--data '{"jsonrpc":"2.0","method":"eth_blockNumber","params":[],"id":1}' \
http://localhost:8545 > /dev/null; then
echo "❌ Anvil failed to start properly"
exit 1
fi
echo "✅ Anvil fork ready on http://localhost:8545"
# Update RPC endpoint to use local fork
export ARBITRUM_RPC_ENDPOINT="http://localhost:8545"
# Run the fork tests
echo "🧪 Running arbitrage tests with forked environment..."
# Test 1: Security validation with fork
echo "🔒 Testing security validation..."
go test -v ./test/security_validation_test.go -timeout 60s
# Test 2: Arbitrage execution with fork
echo "🔄 Testing arbitrage execution..."
go test -v ./test/arbitrage_fork_test.go -timeout 120s
# Test 3: Build and run bot briefly
echo "🔨 Building and testing MEV bot..."
go build -o bin/mev-bot cmd/mev-bot/main.go
if [ $? -ne 0 ]; then
echo "❌ Failed to build MEV bot"
exit 1
fi
echo "✅ MEV bot built successfully"
# Run bot for 10 seconds to test startup
echo "🚀 Testing bot startup..."
timeout 10 ./bin/mev-bot start || true
echo "🎯 All fork tests completed successfully!"
echo ""
echo "📊 Test Summary:"
echo " ✅ Security vulnerabilities fixed"
echo " ✅ Arbitrage execution implemented"
echo " ✅ Fork connectivity verified"
echo " ✅ Real contract integration working"
echo ""
echo "🔗 Fork Details:"
echo " URL: http://localhost:8545"
echo " Chain ID: 42161 (Arbitrum One)"
echo " Block: ~250M (recent state)"
echo " Funded accounts: 10 with 1000 ETH each"

View File

@@ -1,312 +0,0 @@
#!/bin/bash
# Comprehensive Stress Testing Script for MEV Bot
# This script runs all stress tests to validate system performance and reliability
set -e # Exit on any error
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
PURPLE='\033[0;35m'
CYAN='\033[0;36m'
NC='\033[0m' # No Color
# Script information
echo -e "${PURPLE}🧪 MEV Bot Comprehensive Stress Testing Suite${NC}"
echo -e "${PURPLE}==========================================${NC}"
echo ""
# Check if running from project root
if [ ! -f "go.mod" ]; then
echo -e "${RED}❌ Error: This script must be run from the project root directory${NC}"
exit 1
fi
# Parse command line arguments
TEST_DURATION=300 # Default 5 minutes
TRANSACTIONS_PER_SECOND=1000
CONCURRENT_WORKERS=10
MEMORY_LIMIT="2G"
CPU_LIMIT="2.0"
VERBOSE=false
RUN_ALL=false
RUN_UNIT=false
RUN_INTEGRATION=false
RUN_PERFORMANCE=false
RUN_LOAD=false
RUN_STRESS=false
while [[ $# -gt 0 ]]; do
case $1 in
--duration)
TEST_DURATION="$2"
shift 2
;;
--tps)
TRANSACTIONS_PER_SECOND="$2"
shift 2
;;
--workers)
CONCURRENT_WORKERS="$2"
shift 2
;;
--memory)
MEMORY_LIMIT="$2"
shift 2
;;
--cpu)
CPU_LIMIT="$2"
shift 2
;;
--verbose|-v)
VERBOSE=true
shift
;;
--all)
RUN_ALL=true
shift
;;
--unit)
RUN_UNIT=true
shift
;;
--integration)
RUN_INTEGRATION=true
shift
;;
--performance)
RUN_PERFORMANCE=true
shift
;;
--load)
RUN_LOAD=true
shift
;;
--stress)
RUN_STRESS=true
shift
;;
--help|-h)
echo "Usage: $0 [OPTIONS]"
echo ""
echo "Options:"
echo " --duration SECONDS Test duration in seconds (default: 300)"
echo " --tps COUNT Transactions per second (default: 1000)"
echo " --workers COUNT Concurrent workers (default: 10)"
echo " --memory LIMIT Memory limit (default: 2G)"
echo " --cpu LIMIT CPU limit (default: 2.0)"
echo " --verbose, -v Verbose output"
echo " --all Run all tests"
echo " --unit Run unit tests"
echo " --integration Run integration tests"
echo " --performance Run performance tests"
echo " --load Run load tests"
echo " --stress Run stress tests"
echo " --help, -h Show this help message"
echo ""
exit 0
;;
*)
echo -e "${RED}❌ Unknown option: $1${NC}"
exit 1
;;
esac
done
# If no specific tests selected, run all
if [ "$RUN_ALL" = false ] && [ "$RUN_UNIT" = false ] && [ "$RUN_INTEGRATION" = false ] && [ "$RUN_PERFORMANCE" = false ] && [ "$RUN_LOAD" = false ] && [ "$RUN_STRESS" = false ]; then
RUN_ALL=true
fi
# Set flags based on --all
if [ "$RUN_ALL" = true ]; then
RUN_UNIT=true
RUN_INTEGRATION=true
RUN_PERFORMANCE=true
RUN_LOAD=true
RUN_STRESS=true
fi
# Display test configuration
echo -e "${BLUE}📋 Test Configuration:${NC}"
echo -e " Duration: ${TEST_DURATION}s"
echo -e " TPS: ${TRANSACTIONS_PER_SECOND}"
echo -e " Workers: ${CONCURRENT_WORKERS}"
echo -e " Memory Limit: ${MEMORY_LIMIT}"
echo -e " CPU Limit: ${CPU_LIMIT}"
echo -e " Verbose: ${VERBOSE}"
echo ""
# Create results directory
RESULTS_DIR="test_results/stress_tests/$(date +%Y%m%d_%H%M%S)"
mkdir -p "$RESULTS_DIR"
# Function to run a test and capture results
run_test() {
local test_name="$1"
local test_cmd="$2"
local test_file="$3"
echo -e "${BLUE}🚀 Running $test_name...${NC}"
local start_time=$(date +%s)
local output_file="$RESULTS_DIR/${test_file}.log"
if [ "$VERBOSE" = true ]; then
eval "$test_cmd" 2>&1 | tee "$output_file"
local exit_code=${PIPESTATUS[0]}
else
eval "$test_cmd" > "$output_file" 2>&1
local exit_code=$?
fi
local end_time=$(date +%s)
local duration=$((end_time - start_time))
if [ $exit_code -eq 0 ]; then
echo -e "${GREEN}$test_name PASSED in ${duration}s${NC}"
echo "$test_name,PASSED,$duration" >> "$RESULTS_DIR/test_summary.csv"
else
echo -e "${RED}$test_name FAILED in ${duration}s${NC}"
echo "$test_name,FAILED,$duration" >> "$RESULTS_DIR/test_summary.csv"
cat "$output_file" | tail -20
fi
return $exit_code
}
# Initialize test summary
echo "Test Name,Status,Duration (seconds)" > "$RESULTS_DIR/test_summary.csv"
# Run unit tests
if [ "$RUN_UNIT" = true ]; then
echo -e "${CYAN}🧩 Running Unit Tests${NC}"
# Basic unit tests
run_test "Basic Unit Tests" "go test -v ./pkg/... -short" "unit_basic"
# Math unit tests
run_test "Math Unit Tests" "go test -v ./pkg/math/..." "unit_math"
# Scanner unit tests
run_test "Scanner Unit Tests" "go test -v ./pkg/scanner/..." "unit_scanner"
# Arbitrage unit tests
run_test "Arbitrage Unit Tests" "go test -v ./pkg/arbitrage/..." "unit_arbitrage"
# Trading unit tests
run_test "Trading Unit Tests" "go test -v ./pkg/trading/..." "unit_trading"
echo ""
fi
# Run integration tests
if [ "$RUN_INTEGRATION" = true ]; then
echo -e "${CYAN}🔗 Running Integration Tests${NC}"
# Integration tests with mocked network
run_test "Integration Tests" "go test -v ./test/integration/..." "integration"
# End-to-end tests
run_test "End-to-End Tests" "go test -v ./test/e2e/..." "e2e"
echo ""
fi
# Run performance tests
if [ "$RUN_PERFORMANCE" = true ]; then
echo -e "${CYAN}⚡ Running Performance Tests${NC}"
# Benchmark tests
run_test "Benchmark Tests" "go test -bench=. -benchmem ./pkg/... -count=3" "benchmarks"
# Performance regression tests
run_test "Performance Regression Tests" "go test -v ./test/performance_benchmarks_test.go -count=1" "perf_regression"
echo ""
fi
# Run load tests
if [ "$RUN_LOAD" = true ]; then
echo -e "${CYAN}🏋️ Running Load Tests${NC}"
# High TPS load test
run_test "High TPS Load Test" "timeout ${TEST_DURATION}s go run test/load/high_tps_test.go --tps ${TRANSACTIONS_PER_SECOND} --duration ${TEST_DURATION}" "load_high_tps"
# Concurrent workers load test
run_test "Concurrent Workers Load Test" "timeout ${TEST_DURATION}s go run test/load/concurrent_workers_test.go --workers ${CONCURRENT_WORKERS} --duration ${TEST_DURATION}" "load_concurrent"
# Memory intensive load test
run_test "Memory Intensive Load Test" "timeout ${TEST_DURATION}s go run test/load/memory_intensive_test.go --memory ${MEMORY_LIMIT} --duration ${TEST_DURATION}" "load_memory"
echo ""
fi
# Run stress tests
if [ "$RUN_STRESS" = true ]; then
echo -e "${CYAN}🔥 Running Stress Tests${NC}"
# Stress test runner
run_test "Stress Test Suite" "timeout ${TEST_DURATION}s go run test/stress/stress_test_runner.go --full-suite --duration ${TEST_DURATION} --tps ${TRANSACTIONS_PER_SECOND}" "stress_suite"
# Market scanner stress test
run_test "Market Scanner Stress Test" "timeout ${TEST_DURATION}s go run test/stress/market_scanner_test.go --duration ${TEST_DURATION}" "stress_market_scanner"
# Swap analyzer stress test
run_test "Swap Analyzer Stress Test" "timeout ${TEST_DURATION}s go run test/stress/swap_analyzer_test.go --duration ${TEST_DURATION}" "stress_swap_analyzer"
# Pool discovery stress test
run_test "Pool Discovery Stress Test" "timeout ${TEST_DURATION}s go run test/stress/pool_discovery_test.go --duration ${TEST_DURATION}" "stress_pool_discovery"
# Arbitrage engine stress test
run_test "Arbitrage Engine Stress Test" "timeout ${TEST_DURATION}s go run test/stress/arbitrage_engine_test.go --duration ${TEST_DURATION}" "stress_arbitrage_engine"
echo ""
fi
# Generate test summary report
echo -e "${PURPLE}📊 Test Summary Report${NC}"
echo -e "${PURPLE}====================${NC}"
PASSED_COUNT=0
FAILED_COUNT=0
while IFS=, read -r name status duration; do
if [ "$name" != "Test Name" ]; then
if [ "$status" = "PASSED" ]; then
PASSED_COUNT=$((PASSED_COUNT + 1))
echo -e "$name - ${GREEN}PASSED${NC} (${duration}s)"
else
FAILED_COUNT=$((FAILED_COUNT + 1))
echo -e "$name - ${RED}FAILED${NC} (${duration}s)"
fi
fi
done < "$RESULTS_DIR/test_summary.csv"
TOTAL_TESTS=$((PASSED_COUNT + FAILED_COUNT))
SUCCESS_RATE=0
if [ $TOTAL_TESTS -gt 0 ]; then
SUCCESS_RATE=$((PASSED_COUNT * 100 / TOTAL_TESTS))
fi
echo ""
echo -e "${BLUE}📈 Test Results:${NC}"
echo -e " Total Tests: ${TOTAL_TESTS}"
echo -e " Passed: ${PASSED_COUNT}"
echo -e " Failed: ${FAILED_COUNT}"
echo -e " Success Rate: ${SUCCESS_RATE}%"
echo ""
# Final status
if [ $FAILED_COUNT -eq 0 ]; then
echo -e "${GREEN}🎉 All stress tests passed! System is ready for production deployment.${NC}"
echo -e "${GREEN} Results saved to: $RESULTS_DIR${NC}"
exit 0
else
echo -e "${RED}⚠️ $FAILED_COUNT stress tests failed. Please review results and fix issues before production deployment.${NC}"
echo -e "${YELLOW} Results saved to: $RESULTS_DIR${NC}"
exit 1
fi

View File

@@ -1,116 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# run.sh - Run the MEV bot
# Builds and starts the MEV bot with production configuration
echo "Running MEV bot..."
# Set default GO_ENV if not already set (production by default)
export GO_ENV="${GO_ENV:-production}"
# Build the application first
./scripts/build.sh
if [ $? -eq 0 ]; then
# Normalize GO_ENV when passed as .env.* style
if [[ -n "$GO_ENV" && "$GO_ENV" == .env.* ]]; then
GO_ENV="${GO_ENV#.env.}"
export GO_ENV
fi
# Load environment variables based on GO_ENV
if [ "$GO_ENV" = "development" ]; then
echo "🔧 Development mode: Using .env for local configuration..."
if [ -f ".env" ]; then
set -a
source .env
set +a
fi
else
# Production mode requires .env.production
if [ -f ".env.production" ]; then
echo "🔧 Loading production environment variables from .env.production..."
set -a # Automatically export all variables
source .env.production
set +a # Stop automatically exporting
else
echo "❌ .env.production file not found! Creating one with defaults..."
echo "Please configure .env.production for production deployment"
exit 1
fi
fi
# Validate required environment variables (production mode only)
if [ "$GO_ENV" != "development" ]; then
if [ -z "${MEV_BOT_ENCRYPTION_KEY:-}" ]; then
echo "❌ MEV_BOT_ENCRYPTION_KEY not found in .env.production"
echo "Please set this variable for secure operations"
exit 1
fi
if [ -z "${CONTRACT_ARBITRAGE_EXECUTOR:-}" ]; then
echo "❌ CONTRACT_ARBITRAGE_EXECUTOR not found in .env.production"
echo "Please set the deployed arbitrage executor contract address"
exit 1
fi
fi
# Set required environment variables with production values
export ARBITRUM_RPC_ENDPOINT="${ARBITRUM_RPC_ENDPOINT:-wss://arbitrum-mainnet.core.chainstack.com/53c30e7a941160679fdcc396c894fc57}"
export ARBITRUM_WS_ENDPOINT="${ARBITRUM_WS_ENDPOINT:-$ARBITRUM_RPC_ENDPOINT}"
export METRICS_ENABLED="${METRICS_ENABLED:-true}"
export METRICS_PORT="${METRICS_PORT:-9090}"
export MEV_BOT_KEYSTORE_PATH="${MEV_BOT_KEYSTORE_PATH:-keystore/production}"
export MEV_BOT_AUDIT_LOG="${MEV_BOT_AUDIT_LOG:-logs/production_audit.log}"
export MEV_BOT_BACKUP_PATH="${MEV_BOT_BACKUP_PATH:-backups/production}"
mkdir -p "$MEV_BOT_KEYSTORE_PATH"
mkdir -p "$MEV_BOT_BACKUP_PATH"
mkdir -p "$(dirname "$MEV_BOT_AUDIT_LOG")"
echo "Keystore path: $MEV_BOT_KEYSTORE_PATH"
env | grep MEV_BOT_KEYSTORE_PATH
echo ""
if [ "$GO_ENV" = "development" ]; then
echo "🚀 DEVELOPMENT MEV BOT STARTUP"
echo "==============================="
else
echo "🚀 PRODUCTION MEV BOT STARTUP"
echo "=============================="
fi
echo "Environment: $GO_ENV"
echo ""
echo "📡 Network Configuration:"
echo " RPC: ${ARBITRUM_RPC_ENDPOINT:-not set}"
echo " WS: ${ARBITRUM_WS_ENDPOINT:-not set}"
echo " Metrics Port: ${METRICS_PORT:-9090}"
echo ""
if [ "$GO_ENV" != "development" ] && [ -n "${CONTRACT_ARBITRAGE_EXECUTOR:-}" ]; then
echo "📝 Deployed Contracts:"
echo " ArbitrageExecutor: $CONTRACT_ARBITRAGE_EXECUTOR"
echo " FlashSwapper: ${CONTRACT_FLASH_SWAPPER:-not set}"
echo " DataFetcher: ${CONTRACT_DATA_FETCHER:-not set}"
echo ""
fi
if [ -n "${MEV_BOT_ENCRYPTION_KEY:-}" ]; then
echo "🔐 Security:"
echo " Encryption Key: ${MEV_BOT_ENCRYPTION_KEY:0:8}...***"
echo ""
fi
# Set provider config path if not already set
export PROVIDER_CONFIG_PATH="${PROVIDER_CONFIG_PATH:-$PWD/config/providers_runtime.yaml}"
echo "📋 Provider Config: $PROVIDER_CONFIG_PATH"
echo ""
# Run the application
./bin/mev-beta start
else
echo "Failed to build the application!"
exit 1
fi

View File

@@ -1,10 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
REPORT_DIR="${1:-$ROOT_DIR/reports/math/latest}"
echo "Running math audit using vectors: default"
GOCACHE="${GOCACHE:-$ROOT_DIR/.gocache}" \
GOFLAGS="${GOFLAGS:-}" \
go run ./tools/math-audit --vectors default --report "${REPORT_DIR}"

View File

@@ -1,14 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
REPORT_DIR="${1:-$ROOT_DIR/reports/simulation/latest}"
VECTORS="${SIMULATION_VECTORS:-$ROOT_DIR/tools/simulation/vectors/default.json}"
mkdir -p "$REPORT_DIR"
GOCACHE="${GOCACHE:-$ROOT_DIR/.gocache}" \
GOFLAGS="${GOFLAGS:-}" \
go run ./tools/simulation --vectors "$VECTORS" --report "$REPORT_DIR"
echo "Simulation summary written to $REPORT_DIR"

View File

@@ -1,223 +0,0 @@
#!/bin/bash
# MEV Bot Security Validation Script
# This script validates all security implementations and configurations
set -e
echo "🔒 MEV Bot Security Validation"
echo "=============================="
# Color codes for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Counters
TOTAL_CHECKS=0
PASSED_CHECKS=0
FAILED_CHECKS=0
WARNINGS=0
# Helper function to run checks
run_check() {
local check_name="$1"
local command="$2"
local description="$3"
TOTAL_CHECKS=$((TOTAL_CHECKS + 1))
echo -e "${BLUE}🔍 $check_name${NC}: $description"
if eval "$command" > /dev/null 2>&1; then
echo -e " ${GREEN}✅ PASSED${NC}"
PASSED_CHECKS=$((PASSED_CHECKS + 1))
return 0
else
echo -e " ${RED}❌ FAILED${NC}"
FAILED_CHECKS=$((FAILED_CHECKS + 1))
return 1
fi
}
# Helper function for warnings
run_warning() {
local check_name="$1"
local command="$2"
local description="$3"
TOTAL_CHECKS=$((TOTAL_CHECKS + 1))
echo -e "${BLUE}🔍 $check_name${NC}: $description"
if eval "$command" > /dev/null 2>&1; then
echo -e " ${GREEN}✅ PASSED${NC}"
PASSED_CHECKS=$((PASSED_CHECKS + 1))
else
echo -e " ${YELLOW}⚠️ WARNING${NC}"
WARNINGS=$((WARNINGS + 1))
fi
}
echo
echo "1. Environment Security Checks"
echo "=============================="
# Check for required environment variables
run_check "Encryption Key" "test -n \"\$MEV_BOT_ENCRYPTION_KEY\"" "Check if encryption key is set"
run_check "RPC Endpoints" "test -n \"\$ARBITRUM_RPC_ENDPOINT\"" "Check if RPC endpoint is configured"
run_warning "WS Endpoints" "test -n \"\$ARBITRUM_WS_ENDPOINT\"" "Check if WebSocket endpoint is configured"
# Check encryption key strength
if [ -n "$MEV_BOT_ENCRYPTION_KEY" ]; then
KEY_LENGTH=$(echo -n "$MEV_BOT_ENCRYPTION_KEY" | base64 -d 2>/dev/null | wc -c || echo "0")
run_check "Key Strength" "test $KEY_LENGTH -eq 32" "Verify encryption key is 256-bit (32 bytes)"
else
echo -e " ${RED}❌ Cannot validate key strength - key not set${NC}"
FAILED_CHECKS=$((FAILED_CHECKS + 1))
fi
# Check for hardcoded secrets in code
echo
echo "2. Code Security Analysis"
echo "========================"
run_check "No Hardcoded Secrets" "! grep -r 'wss://.*\.com.*[a-f0-9]\\{40\\}' pkg/ --include='*.go'" "Check for hardcoded API keys in RPC URLs"
run_check "No Hardcoded Passwords" "! grep -r 'password.*=' pkg/ --include='*.go' | grep -v '_test.go'" "Check for hardcoded passwords"
run_check "No Hardcoded Keys" "! grep -r 'private.*key.*=' pkg/ --include='*.go' | grep -v '_test.go'" "Check for hardcoded private keys"
# Check for security imports
run_check "Crypto/Rand Usage" "grep -r 'crypto/rand' pkg/ --include='*.go' > /dev/null" "Verify crypto/rand is used for randomness"
run_check "SafeMath Implementation" "test -f pkg/security/safemath.go" "Check if SafeMath is implemented"
run_check "Input Validation" "test -f pkg/security/input_validator.go" "Check if input validation is implemented"
echo
echo "3. Integer Overflow Protection"
echo "============================="
# Check for unsafe integer conversions
run_check "Safe Uint32 Conversion" "grep -r 'security\\.SafeUint32' pkg/ --include='*.go' > /dev/null" "Check if safe uint32 conversions are used"
run_check "Safe Uint8 Conversion" "grep -r 'security\\.SafeUint64FromBigInt' pkg/ --include='*.go' > /dev/null" "Check if safe big.Int conversions are used"
run_check "No Direct uint32 Cast" "! grep -r 'uint32(' pkg/ --include='*.go' | grep -v 'SafeUint32' | grep -v '_test.go'" "Check for direct uint32 casts"
echo
echo "4. Configuration Security"
echo "========================"
run_check "Secure Config" "test -f pkg/security/config.go" "Check if secure configuration is implemented"
run_check "No Hardcoded Endpoints" "! grep -r 'wss://.*chainstack.*53c30e7a941160679fdcc396c894fc57' pkg/ --include='*.go'" "Check that hardcoded endpoints are removed"
# Check configuration validation
if [ -f pkg/security/config.go ]; then
run_check "Endpoint Validation" "grep -q 'validateEndpoint' pkg/security/config.go" "Check if endpoint validation is implemented"
run_check "Encryption Support" "grep -q 'Encrypt.*string' pkg/security/config.go" "Check if configuration encryption is supported"
fi
echo
echo "5. Transaction Security"
echo "======================"
run_check "Transaction Security" "test -f pkg/security/transaction_security.go" "Check if transaction security is implemented"
run_check "Front-running Protection" "grep -q 'frontRunningProtection' pkg/security/transaction_security.go 2>/dev/null" "Check if front-running protection exists"
run_check "Gas Validation" "grep -q 'gasValidation' pkg/security/transaction_security.go 2>/dev/null" "Check if gas validation exists"
run_check "Profit Validation" "grep -q 'profitValidation' pkg/security/transaction_security.go 2>/dev/null" "Check if profit validation exists"
echo
echo "6. Rate Limiting and DDoS Protection"
echo "===================================="
run_check "Rate Limiter" "test -f pkg/security/rate_limiter.go" "Check if rate limiter is implemented"
run_check "DDoS Detection" "grep -q 'DDoSDetector' pkg/security/rate_limiter.go 2>/dev/null" "Check if DDoS detection exists"
run_check "Token Bucket" "grep -q 'TokenBucket' pkg/security/rate_limiter.go 2>/dev/null" "Check if token bucket algorithm is implemented"
echo
echo "7. Monitoring and Alerting"
echo "=========================="
run_check "Security Monitor" "test -f pkg/security/monitor.go" "Check if security monitoring is implemented"
run_check "Alert System" "grep -q 'SecurityAlert' pkg/security/monitor.go 2>/dev/null" "Check if alert system exists"
run_check "Metrics Collection" "grep -q 'SecurityMetrics' pkg/security/monitor.go 2>/dev/null" "Check if metrics collection exists"
echo
echo "8. Build and Compilation Tests"
echo "=============================="
# Test core package compilation (excluding problematic ones)
run_check "SafeMath Compilation" "go build pkg/security/safemath.go" "Test SafeMath package compilation"
run_check "Config Compilation" "go build pkg/security/config.go" "Test secure config compilation"
run_check "Input Validator Compilation" "go build pkg/security/input_validator.go pkg/security/safemath.go" "Test input validator compilation"
# Clean up build artifacts
rm -f safemath config input_validator 2>/dev/null
echo
echo "9. Security Best Practices"
echo "=========================="
# Check for security best practices
run_check "Error Wrapping" "grep -r 'fmt\\.Errorf.*%w' pkg/ --include='*.go' > /dev/null" "Check if errors are properly wrapped"
run_check "Context Usage" "grep -r 'context\\.Context' pkg/ --include='*.go' > /dev/null" "Check if context is used for cancellation"
run_check "Mutex Usage" "grep -r 'sync\\..*Mutex' pkg/ --include='*.go' > /dev/null" "Check if mutexes are used for thread safety"
echo
echo "10. Static Security Analysis"
echo "============================"
# Run gosec if available
if command -v gosec &> /dev/null; then
GOSEC_OUTPUT=$(gosec -quiet ./... 2>&1 | grep -E "(HIGH|MEDIUM)" | wc -l)
if [ "$GOSEC_OUTPUT" -eq 0 ]; then
echo -e "${BLUE}🔍 Gosec Analysis${NC}: Run static security analysis"
echo -e " ${GREEN}✅ PASSED${NC} - No high/medium severity issues found"
PASSED_CHECKS=$((PASSED_CHECKS + 1))
else
echo -e "${BLUE}🔍 Gosec Analysis${NC}: Run static security analysis"
echo -e " ${RED}❌ FAILED${NC} - Found $GOSEC_OUTPUT high/medium severity issues"
FAILED_CHECKS=$((FAILED_CHECKS + 1))
fi
TOTAL_CHECKS=$((TOTAL_CHECKS + 1))
else
echo -e "${YELLOW}⚠️ Gosec not available - install with: go install github.com/securecodewarrior/gosec/v2/cmd/gosec@latest${NC}"
fi
echo
echo "11. Production Readiness Checks"
echo "==============================="
run_check "No Debug Code" "! grep -r 'fmt\\.Print' pkg/ --include='*.go'" "Check for debug print statements"
run_check "No Test Code in Prod" "! grep -r 'testing\\.T' pkg/ --include='*.go' | grep -v '_test.go'" "Check for test code in production files"
run_warning "Logging Configuration" "grep -r 'logger\\.' pkg/ --include='*.go' > /dev/null" "Check if proper logging is used"
echo
echo "SECURITY VALIDATION SUMMARY"
echo "==========================="
echo -e "Total Checks: ${BLUE}$TOTAL_CHECKS${NC}"
echo -e "Passed: ${GREEN}$PASSED_CHECKS${NC}"
echo -e "Failed: ${RED}$FAILED_CHECKS${NC}"
echo -e "Warnings: ${YELLOW}$WARNINGS${NC}"
# Calculate percentage
if [ $TOTAL_CHECKS -gt 0 ]; then
PASS_PERCENTAGE=$(( (PASSED_CHECKS * 100) / TOTAL_CHECKS ))
echo -e "Pass Rate: ${BLUE}$PASS_PERCENTAGE%${NC}"
fi
echo
if [ $FAILED_CHECKS -eq 0 ]; then
echo -e "${GREEN}🎉 SECURITY VALIDATION PASSED!${NC}"
echo -e "The MEV bot meets all critical security requirements."
if [ $WARNINGS -gt 0 ]; then
echo -e "${YELLOW}⚠️ Note: $WARNINGS warnings found - consider addressing them for enhanced security.${NC}"
fi
echo
echo "✅ PRODUCTION READY - Security validation successful"
exit 0
else
echo -e "${RED}🚨 SECURITY VALIDATION FAILED!${NC}"
echo -e "Found $FAILED_CHECKS critical security issues that must be resolved before production deployment."
echo
echo "❌ NOT PRODUCTION READY - Address all failed checks before deploying"
exit 1
fi

View File

@@ -1,149 +0,0 @@
#!/bin/bash
# Setup Auto-Update System for MEV Bot
# This script installs git hooks and optionally systemd timers for automatic updates
set -e
PROJECT_DIR=$(cd "$(dirname "$0")/.." && pwd)
# Color codes
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
RED='\033[0;31m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
echo -e "${BLUE}========================================="
echo "MEV Bot Auto-Update Setup"
echo "=========================================${NC}"
echo ""
# Check if running as root
IS_ROOT=false
if [ "$EUID" -eq 0 ]; then
IS_ROOT=true
echo -e "${YELLOW}Running as root - will setup systemd timer${NC}"
else
echo -e "${YELLOW}Not running as root - will skip systemd timer${NC}"
echo -e "${YELLOW}Run with sudo to enable automatic periodic updates${NC}"
fi
echo ""
# Step 1: Install git hooks
echo -e "${BLUE}Step 1: Installing Git Hooks${NC}"
echo "-------------------------------------------"
cd "$PROJECT_DIR"
if [ -f "$PROJECT_DIR/scripts/install-git-hooks.sh" ]; then
"$PROJECT_DIR/scripts/install-git-hooks.sh"
echo -e "${GREEN}✓ Git hooks installed${NC}"
else
echo -e "${RED}✗ Git hooks installation script not found${NC}"
exit 1
fi
echo ""
# Step 2: Setup systemd timer (if root)
if [ "$IS_ROOT" = true ]; then
echo -e "${BLUE}Step 2: Setting up Systemd Timer${NC}"
echo "-------------------------------------------"
# Update WorkingDirectory in service files
sed "s|WorkingDirectory=/docker/mev-beta|WorkingDirectory=$PROJECT_DIR|g" \
"$PROJECT_DIR/scripts/mev-bot-auto-update.service" > /tmp/mev-bot-auto-update.service
sed "s|/docker/mev-beta|$PROJECT_DIR|g" \
/tmp/mev-bot-auto-update.service > /tmp/mev-bot-auto-update.service.tmp
mv /tmp/mev-bot-auto-update.service.tmp /tmp/mev-bot-auto-update.service
# Copy service and timer files
cp /tmp/mev-bot-auto-update.service /etc/systemd/system/
cp "$PROJECT_DIR/scripts/mev-bot-auto-update.timer" /etc/systemd/system/
# Reload systemd
systemctl daemon-reload
# Enable and start the timer
systemctl enable mev-bot-auto-update.timer
systemctl start mev-bot-auto-update.timer
echo -e "${GREEN}✓ Systemd timer enabled and started${NC}"
echo ""
# Show timer status
echo -e "${YELLOW}Timer status:${NC}"
systemctl status mev-bot-auto-update.timer --no-pager | head -10
echo ""
# Show next scheduled run
echo -e "${YELLOW}Next scheduled update check:${NC}"
systemctl list-timers mev-bot-auto-update.timer --no-pager
echo ""
else
echo -e "${BLUE}Step 2: Systemd Timer (Skipped)${NC}"
echo "-------------------------------------------"
echo -e "${YELLOW}Run 'sudo ./scripts/setup-auto-update.sh' to enable automatic updates${NC}"
echo ""
fi
# Step 3: Create logs directory
echo -e "${BLUE}Step 3: Creating Logs Directory${NC}"
echo "-------------------------------------------"
mkdir -p "$PROJECT_DIR/logs"
echo -e "${GREEN}✓ Logs directory created${NC}"
echo ""
# Summary
echo -e "${GREEN}========================================="
echo "Auto-Update Setup Complete!"
echo "=========================================${NC}"
echo ""
echo "What's been configured:"
echo ""
echo "1. Git Hooks:"
echo " ✓ post-merge hook installed"
echo " → Triggers auto-rebuild after 'git pull'"
echo ""
if [ "$IS_ROOT" = true ]; then
echo "2. Systemd Timer:"
echo " ✓ Checks for updates every 5 minutes"
echo " ✓ Starts automatically on boot"
echo " ✓ Pulls and rebuilds when updates detected"
echo ""
echo "Manage the timer:"
echo " sudo systemctl status mev-bot-auto-update.timer"
echo " sudo systemctl stop mev-bot-auto-update.timer"
echo " sudo systemctl start mev-bot-auto-update.timer"
echo " sudo journalctl -u mev-bot-auto-update -f"
echo ""
else
echo "2. Manual Updates:"
echo " Run: ./scripts/auto-update.sh"
echo " Or: git pull (hooks will auto-rebuild)"
echo ""
fi
echo "3. Logs:"
echo " Auto-update log: tail -f logs/auto-update.log"
echo " Container log: docker compose logs -f mev-bot"
echo ""
echo -e "${BLUE}Optional: Webhook Receiver${NC}"
echo "-------------------------------------------"
echo "For immediate updates via GitHub/GitLab webhooks:"
echo ""
echo "1. Configure webhook in your Git provider:"
echo " URL: http://your-server:9000/webhook"
echo " Events: Push events (master branch)"
echo ""
echo "2. Start the webhook receiver:"
echo " ./scripts/webhook-receiver.sh"
echo ""
echo "Or run as systemd service (advanced setup needed)"
echo ""
echo -e "${GREEN}Your MEV bot will now auto-update! 🚀${NC}"
echo ""

View File

@@ -1,62 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Development environment setup script
echo "Setting up development environment..."
# Create directories if they don't exist
mkdir -p logs
mkdir -p reports
mkdir -p reports/coverage
mkdir -p reports/test-results
mkdir -p reports/augments
mkdir -p storage
mkdir -p storage/keystore
mkdir -p storage/cache
mkdir -p .gocache
# Check if Go is installed
if ! command -v go &> /dev/null; then
echo "Error: Go is not installed" >&2
exit 1
fi
# Check if required tools are installed
echo "Checking for required tools..."
# Install golangci-lint if not present
if ! command -v golangci-lint &> /dev/null; then
echo "Installing golangci-lint..."
go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest
fi
# Install gosec if not present
if ! command -v gosec &> /dev/null; then
echo "Installing gosec..."
go install github.com/securego/gosec/v2/cmd/gosec@latest
fi
# Install govulncheck if not present
if ! command -v govulncheck &> /dev/null; then
echo "Installing govulncheck..."
go install golang.org/x/vuln/cmd/govulncheck@latest
fi
# Install delve if not present
if ! command -v dlv &> /dev/null; then
echo "Installing delve..."
go install github.com/go-delve/delve/cmd/dlv@latest
fi
# Copy example config if not exists
if [ ! -f config/development.yaml ]; then
echo "Creating development config..."
cp config/development.yaml.example config/development.yaml 2>/dev/null || echo "Config example not found, creating basic config..."
fi
# Verify Go modules
echo "Verifying Go modules..."
go mod tidy
echo "Development environment setup complete!"

View File

@@ -1,30 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Setup script for MEV Bot environment
echo "Setting up MEV Bot environment..."
# Copy the fixed .env file if it exists
if [[ -f ".env.fixed" ]]; then
cp .env.fixed .env
echo "✅ Copied .env.fixed to .env"
else
echo "⚠️ Warning: .env.fixed not found, skipping copy"
fi
# Create required directories
mkdir -p keystore backups logs
# Set proper permissions
chmod 600 .env
echo "Environment setup complete!"
echo ""
echo "IMPORTANT: Please update the following variables in .env:"
echo " - ETHEREUM_PRIVATE_KEY: Your actual private key"
echo " - ETHEREUM_ACCOUNT_ADDRESS: Your Ethereum account address"
echo " - CONTRACT_ARBITRAGE_EXECUTOR: Your deployed arbitrage contract address"
echo " - CONTRACT_FLASH_SWAPPER: Your deployed flash swapper contract address"
echo ""
echo "You can generate a new encryption key with:"
echo " openssl rand -base64 32"

View File

@@ -1,151 +0,0 @@
#!/usr/bin/env bash
# MEV Bot Keystore Setup Script
# Encrypts and stores the private key securely
set -euo pipefail
KEYSTORE_DIR="keystore/production"
PRIVATE_KEY_FILE="/tmp/wallet_key.txt"
ENCRYPTION_KEY_ENV="MEV_BOT_ENCRYPTION_KEY"
echo "═══════════════════════════════════════════════════════════"
echo "🔐 MEV Bot Keystore Configuration"
echo "═══════════════════════════════════════════════════════════"
echo ""
# Check if private key file exists
if [ ! -f "$PRIVATE_KEY_FILE" ]; then
echo "❌ Error: Private key file not found at $PRIVATE_KEY_FILE"
echo " Please ensure the wallet has been generated first."
exit 1
fi
# Check if encryption key is set
ENCRYPTION_KEY="${MEV_BOT_ENCRYPTION_KEY:-}"
if [ -z "$ENCRYPTION_KEY" ]; then
echo "⚠️ Warning: $ENCRYPTION_KEY_ENV not set in environment"
echo ""
echo "📝 Setting up encryption key..."
# Generate a secure encryption key
ENCRYPTION_KEY=$(openssl rand -base64 32)
echo "export $ENCRYPTION_KEY_ENV=\"$ENCRYPTION_KEY\"" >> .env.production
export MEV_BOT_ENCRYPTION_KEY="$ENCRYPTION_KEY"
echo "✅ Generated and saved encryption key to .env.production"
fi
# Create keystore directory
mkdir -p "$KEYSTORE_DIR"
# Read private key
PRIVATE_KEY=$(cat "$PRIVATE_KEY_FILE")
# Derive wallet address from private key (using cast if available, otherwise Python)
echo ""
echo "🔍 Deriving wallet address from private key..."
if command -v cast &> /dev/null; then
# Use Foundry's cast tool
WALLET_ADDRESS=$(cast wallet address "$PRIVATE_KEY")
echo "✅ Wallet Address: $WALLET_ADDRESS"
else
# Use Python with web3.py (if available)
if command -v python3 &> /dev/null; then
WALLET_ADDRESS=$(python3 << EOF
from eth_account import Account
import sys
try:
private_key = "$PRIVATE_KEY"
if private_key.startswith('0x'):
private_key = private_key[2:]
account = Account.from_key(bytes.fromhex(private_key))
print(account.address)
except Exception as e:
print(f"Error: {e}", file=sys.stderr)
sys.exit(1)
EOF
)
if [ $? -eq 0 ]; then
echo "✅ Wallet Address: $WALLET_ADDRESS"
else
echo "⚠️ Warning: Could not derive address automatically"
echo " Please provide your wallet address manually."
read -p "Enter wallet address (from MetaMask): " WALLET_ADDRESS
fi
else
echo "⚠️ Neither cast nor python3 available"
echo " Please provide your wallet address manually."
read -p "Enter wallet address (from MetaMask): " WALLET_ADDRESS
fi
fi
# Create keystore file with encryption
KEYSTORE_FILE="$KEYSTORE_DIR/executor_wallet.json"
echo ""
echo "🔐 Creating encrypted keystore..."
# Encrypt private key with AES-256-CBC
ENCRYPTED_KEY=$(echo "$PRIVATE_KEY" | openssl enc -aes-256-cbc -a -salt -pass pass:"${MEV_BOT_ENCRYPTION_KEY}")
# Create keystore JSON
cat > "$KEYSTORE_FILE" << EOF
{
"version": 1,
"address": "$WALLET_ADDRESS",
"crypto": {
"cipher": "aes-256-cbc",
"ciphertext": "$ENCRYPTED_KEY"
},
"id": "$(uuidgen 2>/dev/null || echo "mev-executor-$(date +%s)")",
"metadata": {
"created": "$(date -u +"%Y-%m-%dT%H:%M:%SZ")",
"purpose": "MEV Bot Executor Wallet",
"network": "Arbitrum One (Chain ID: 42161)"
}
}
EOF
chmod 600 "$KEYSTORE_FILE"
echo "✅ Encrypted keystore created: $KEYSTORE_FILE"
echo ""
# Verify keystore
echo "🔍 Verifying keystore integrity..."
# Test decryption
DECRYPTED_KEY=$(echo "$ENCRYPTED_KEY" | openssl enc -aes-256-cbc -d -a -pass pass:"${MEV_BOT_ENCRYPTION_KEY}")
if [ "$DECRYPTED_KEY" = "$PRIVATE_KEY" ]; then
echo "✅ Keystore verification successful!"
else
echo "❌ Error: Keystore verification failed!"
echo " Encryption/decryption mismatch detected."
exit 1
fi
echo ""
echo "═══════════════════════════════════════════════════════════"
echo "✅ Keystore Configuration Complete!"
echo "═══════════════════════════════════════════════════════════"
echo ""
echo "📋 Summary:"
echo " • Wallet Address: $WALLET_ADDRESS"
echo " • Keystore File: $KEYSTORE_FILE"
echo " • Encryption: AES-256-CBC"
echo " • Network: Arbitrum One"
echo ""
echo "🔒 Security:"
echo " • Private key encrypted with MEV_BOT_ENCRYPTION_KEY"
echo " • Keystore file permissions: 600 (owner read/write only)"
echo " • Original key file: $PRIVATE_KEY_FILE (keep secure!)"
echo ""
echo "⏭️ Next Steps:"
echo " 1. Verify wallet is funded (use check-wallet-balance.sh)"
echo " 2. Enable execution mode in bot config"
echo " 3. Restart bot to begin live trading"
echo ""

View File

@@ -1,179 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)
cd "$ROOT_DIR"
LOG_DIR="${LOCAL_STAGING_LOG_DIR:-$ROOT_DIR/reports/ci/local-staging}"
mkdir -p "$LOG_DIR"
export GOCACHE="${LOCAL_STAGING_GOCACHE:-$ROOT_DIR/.gocache}"
export GOMODCACHE="${LOCAL_STAGING_GOMODCACHE:-$ROOT_DIR/.gomodcache}"
mkdir -p "$GOCACHE" "$GOMODCACHE"
BRANCH="${LOCAL_STAGING_BRANCH:-$(git rev-parse --abbrev-ref HEAD)}"
GO_IMAGE="${LOCAL_STAGING_GO_IMAGE:-docker.io/library/golang:1.24}"
GOLANGCI_IMAGE="${LOCAL_STAGING_GOLANGCI_IMAGE:-docker.io/golangci/golangci-lint:v1.60.1}"
HELM_IMAGE="${LOCAL_STAGING_HELM_IMAGE:-docker.io/alpine/helm:3.15.2}"
KUBECTL_IMAGE="${LOCAL_STAGING_KUBECTL_IMAGE:-docker.io/bitnami/kubectl:1.30.1}"
IMAGE_NAME="${LOCAL_STAGING_IMAGE_NAME:-mev-bot}"
IMAGE_TAG="${LOCAL_STAGING_IMAGE_TAG:-staging-local}"
IMAGE_REF="${IMAGE_NAME}:${IMAGE_TAG}"
IMAGE_TAR="${LOCAL_STAGING_IMAGE_TAR:-$ROOT_DIR/${IMAGE_NAME}-${IMAGE_TAG}.tar}"
HELM_RELEASE="${LOCAL_STAGING_HELM_RELEASE:-mev-bot}"
HELM_NAMESPACE="${LOCAL_STAGING_HELM_NAMESPACE:-mev-bot-staging}"
HELM_CHART="${LOCAL_STAGING_HELM_CHART:-charts/mev-bot}"
HELM_DRY_RUN="${LOCAL_STAGING_HELM_DRY_RUN:-true}"
KUBECONFIG_PATH="${LOCAL_STAGING_KUBECONFIG:-$HOME/.kube/config}"
SKIP_DOCKER="${LOCAL_STAGING_SKIP_DOCKER:-false}"
SKIP_DEPLOY="${LOCAL_STAGING_SKIP_DEPLOY:-false}"
CONTAINER_RUNTIME="${LOCAL_STAGING_RUNTIME:-}"
if [[ -z "$CONTAINER_RUNTIME" ]]; then
if command -v podman >/dev/null 2>&1; then
CONTAINER_RUNTIME=podman
elif command -v docker >/dev/null 2>&1; then
CONTAINER_RUNTIME=docker
else
echo "ERROR: Neither podman nor docker is available. Install one or set LOCAL_STAGING_RUNTIME." >&2
exit 1
fi
fi
if ! command -v "$CONTAINER_RUNTIME" >/dev/null 2>&1; then
echo "ERROR: Container runtime '$CONTAINER_RUNTIME' not found in PATH" >&2
exit 1
fi
CONTAINER_CMD=("$CONTAINER_RUNTIME")
if [[ "$CONTAINER_RUNTIME" == "podman" ]]; then
CONTAINER_CMD+=("--remote")
fi
CONTAINER_USER="$(id -u):$(id -g)"
RUNTIME_ARGS=(-u "$CONTAINER_USER" -v "$ROOT_DIR":/work -w /work -v "$GOCACHE":/gocache -v "$GOMODCACHE":/gomodcache -e GOCACHE=/gocache -e GOMODCACHE=/gomodcache)
if [[ "$CONTAINER_RUNTIME" == "podman" ]]; then
RUNTIME_ARGS+=(--security-opt label=disable)
fi
run_step() {
local name="$1"
shift
local logfile="$LOG_DIR/${name}.log"
printf '[%s] Starting %s\n' "$(date '+%Y-%m-%d %H:%M:%S')" "$name"
if "$@" |& tee "$logfile"; then
printf '[%s] Completed %s\n' "$(date '+%Y-%m-%d %H:%M:%S')" "$name"
else
printf '[%s] Failed %s; see %s\n' "$(date '+%Y-%m-%d %H:%M:%S')" "$name" "$logfile"
exit 1
fi
}
log() {
printf '[%s] %s\n' "$(date '+%Y-%m-%d %H:%M:%S')" "$*"
}
require_cmd() {
if ! command -v "$1" >/dev/null 2>&1; then
echo "ERROR: Required command '$1' not found" >&2
exit 1
fi
}
run_container_step() {
local step="$1"
local image="$2"
shift 2
local cmd="$*"
run_step "$step" "${CONTAINER_CMD[@]}" run --rm "${RUNTIME_ARGS[@]}" "$image" bash -lc "$cmd"
}
run_container_step_env() {
local step="$1"
local image="$2"
shift 2
local env_args=()
while [[ $# -gt 0 && "$1" == --env=* ]]; do
env_args+=("${1/--env=/}")
shift
done
local cmd="$*"
local run_args=("${RUNTIME_ARGS[@]}")
for env in "${env_args[@]}"; do
run_args+=(-e "$env")
done
run_step "$step" "${CONTAINER_CMD[@]}" run --rm "${run_args[@]}" "$image" bash -lc "$cmd"
}
require_cmd git
log "Running local staging pipeline for branch ${BRANCH} using ${CONTAINER_CMD[*]}"
log "Logs: $LOG_DIR"
HOST_GOROOT="${LOCAL_STAGING_GOROOT:-}"
if [[ -z "$HOST_GOROOT" ]] && command -v go >/dev/null 2>&1; then
HOST_GOROOT=$(go env GOROOT 2>/dev/null || true)
fi
if [[ -n "$HOST_GOROOT" && -d "$HOST_GOROOT" ]]; then
RUNTIME_ARGS+=(-v "$HOST_GOROOT":/goroot:ro -e GOROOT=/goroot)
GO_BIN_PATH="/goroot/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
else
GO_BIN_PATH="/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
fi
run_container_step_env setup-dependencies "$GO_IMAGE" "export PATH=$GO_BIN_PATH; go mod download && go mod verify"
run_container_step_env lint "$GOLANGCI_IMAGE" --env=GOLANGCI_LINT_CACHE=/tmp/golangci-lint --env=PATH="$GO_BIN_PATH" "golangci-lint run --timeout=10m"
run_container_step_env unit-tests "$GO_IMAGE" "export PATH=$GO_BIN_PATH; go test -race -coverprofile=coverage.out ./..."
if [[ -f coverage.out ]]; then
mv coverage.out "$LOG_DIR/coverage.out"
fi
run_container_step_env math-audit "$GO_IMAGE" "export PATH=$GO_BIN_PATH; go run ./tools/math-audit --vectors default --report reports/math/latest"
run_container_step_env profit-simulation "$GO_IMAGE" "export PATH=$GO_BIN_PATH; ./scripts/run_profit_simulation.sh"
if [[ "$SKIP_DOCKER" != "true" ]]; then
run_step docker-build "${CONTAINER_CMD[@]}" build -t "$IMAGE_REF" .
run_step docker-save "${CONTAINER_CMD[@]}" save "$IMAGE_REF" -o "$IMAGE_TAR"
else
log "Skipping Docker/Podman build and save (LOCAL_STAGING_SKIP_DOCKER=true)"
fi
if [[ "$SKIP_DEPLOY" != "true" ]]; then
HELM_RUN_ARGS=("${RUNTIME_ARGS[@]}")
if [[ -f "$KUBECONFIG_PATH" ]]; then
HELM_RUN_ARGS+=(-v "$KUBECONFIG_PATH":/kubeconfig:ro -e KUBECONFIG=/kubeconfig)
elif [[ "$HELM_DRY_RUN" == "false" ]]; then
echo "ERROR: kubeconfig not found at $KUBECONFIG_PATH" >&2
exit 1
fi
HELM_CMD=(helm upgrade --install "$HELM_RELEASE" "$HELM_CHART" --set "image.tag=${IMAGE_TAG}" --namespace "$HELM_NAMESPACE")
if [[ "$HELM_DRY_RUN" != "false" ]]; then
HELM_CMD+=('--dry-run')
fi
run_step helm-upgrade "${CONTAINER_CMD[@]}" run --rm "${HELM_RUN_ARGS[@]}" "$HELM_IMAGE" bash -lc "${HELM_CMD[*]}"
if command -v kubectl >/dev/null 2>&1 && [[ "$HELM_DRY_RUN" == "false" ]]; then
run_step rollout-status kubectl rollout status "deploy/${HELM_RELEASE}" -n "$HELM_NAMESPACE" --timeout=120s
run_step rollout-logs kubectl logs "deploy/${HELM_RELEASE}" -n "$HELM_NAMESPACE" --tail=100
elif [[ "$HELM_DRY_RUN" == "false" ]]; then
KUBE_RUN_ARGS=("${RUNTIME_ARGS[@]}")
if [[ -f "$KUBECONFIG_PATH" ]]; then
KUBE_RUN_ARGS+=(-v "$KUBECONFIG_PATH":/kubeconfig:ro -e KUBECONFIG=/kubeconfig)
fi
run_step rollout-status "${CONTAINER_CMD[@]}" run --rm "${KUBE_RUN_ARGS[@]}" "$KUBECTL_IMAGE" bash -lc "kubectl rollout status deploy/${HELM_RELEASE} -n ${HELM_NAMESPACE} --timeout=120s"
run_step rollout-logs "${CONTAINER_CMD[@]}" run --rm "${KUBE_RUN_ARGS[@]}" "$KUBECTL_IMAGE" bash -lc "kubectl logs deploy/${HELM_RELEASE} -n ${HELM_NAMESPACE} --tail=100"
else
log "Skipping rollout status/log tail (dry run or kube tooling unavailable)"
fi
else
log "Skipping deploy stage (LOCAL_STAGING_SKIP_DEPLOY=true)"
fi
log "Local staging pipeline completed"

View File

@@ -1,195 +0,0 @@
#!/bin/bash
# 24-Hour MEV Bot Validation Test
# Starts bot in background with comprehensive logging
set -e
echo "🚀 Starting 24-Hour MEV Bot Validation Test"
echo "Time: $(date)"
echo "============================================"
# Configuration
LOG_DIR="logs/24h_test"
MAIN_LOG="${LOG_DIR}/test_$(date +%Y%m%d_%H%M%S).log"
PID_FILE="${LOG_DIR}/mev-bot.pid"
MONITOR_LOG="${LOG_DIR}/monitor.log"
# Create log directory
mkdir -p "${LOG_DIR}"
# Check if already running
if [ -f "${PID_FILE}" ]; then
PID=$(cat "${PID_FILE}")
if ps -p "${PID}" > /dev/null 2>&1; then
echo "❌ MEV bot already running with PID ${PID}"
echo "Stop it first with: kill ${PID}"
exit 1
else
echo "⚠️ Removing stale PID file"
rm -f "${PID_FILE}"
fi
fi
# Start MEV bot in background
echo "📊 Starting MEV bot..."
PROVIDER_CONFIG_PATH=$PWD/config/providers_runtime.yaml \
nohup ./bin/mev-bot start > "${MAIN_LOG}" 2>&1 &
BOT_PID=$!
echo ${BOT_PID} > "${PID_FILE}"
# Wait a moment for startup
sleep 3
# Check if still running
if ! ps -p ${BOT_PID} > /dev/null 2>&1; then
echo "❌ Bot failed to start. Check logs:"
tail -50 "${MAIN_LOG}"
rm -f "${PID_FILE}"
exit 1
fi
echo "✅ MEV bot started successfully"
echo " PID: ${BOT_PID}"
echo " Log: ${MAIN_LOG}"
echo ""
echo "📊 Test will run for 24 hours"
echo " Started: $(date)"
echo " Expected end: $(date -d '+24 hours' 2>/dev/null || date -v +24H 2>/dev/null || echo 'in 24 hours')"
echo ""
echo "📝 Monitor with:"
echo " tail -f ${MAIN_LOG}"
echo " ./scripts/monitor-24h-test.sh"
echo ""
echo "🛑 Stop with:"
echo " kill ${BOT_PID}"
echo " # or"
echo " ./scripts/stop-24h-test.sh"
echo ""
# Create monitoring script
cat > ./scripts/monitor-24h-test.sh << 'EOF'
#!/bin/bash
# Monitor 24-hour test progress
LOG_DIR="logs/24h_test"
PID_FILE="${LOG_DIR}/mev-bot.pid"
if [ ! -f "${PID_FILE}" ]; then
echo "❌ No test running (PID file not found)"
exit 1
fi
PID=$(cat "${PID_FILE}")
if ! ps -p "${PID}" > /dev/null 2>&1; then
echo "❌ Bot not running (PID ${PID} not found)"
exit 1
fi
# Find latest log
LATEST_LOG=$(ls -t ${LOG_DIR}/test_*.log 2>/dev/null | head -1)
if [ -z "${LATEST_LOG}" ]; then
echo "❌ No log file found"
exit 1
fi
echo "📊 MEV Bot 24-Hour Test Monitor"
echo "================================"
echo "PID: ${PID}"
echo "Log: ${LATEST_LOG}"
echo "Running since: $(ps -o lstart= -p ${PID})"
echo ""
# Stats
echo "📈 Statistics:"
BLOCKS=$(grep -c "Processing.*transactions" "${LATEST_LOG}" 2>/dev/null || echo "0")
DEX=$(grep -c "DEX Transaction detected" "${LATEST_LOG}" 2>/dev/null || echo "0")
OPPS=$(grep -c "ARBITRAGE OPPORTUNITY" "${LATEST_LOG}" 2>/dev/null || echo "0")
PROFITABLE=$(grep "ARBITRAGE OPPORTUNITY" "${LATEST_LOG}" 2>/dev/null | grep -c "isExecutable:true" || echo "0")
echo " Blocks processed: ${BLOCKS}"
echo " DEX transactions: ${DEX}"
echo " Opportunities: ${OPPS}"
echo " Profitable: ${PROFITABLE}"
echo ""
# Recent activity
echo "🔍 Recent Activity (last 10 opportunities):"
grep "ARBITRAGE OPPORTUNITY" "${LATEST_LOG}" 2>/dev/null | tail -10 | while read line; do
echo " $(echo $line | grep -o 'netProfitETH:[^ ]*' || echo 'N/A')"
done
echo ""
# Cache metrics
echo "💾 Cache Metrics:"
grep "Reserve cache metrics" "${LATEST_LOG}" 2>/dev/null | tail -1 || echo " Not available yet"
echo ""
# Errors
ERRORS=$(grep -c "\[ERROR\]" "${LATEST_LOG}" 2>/dev/null || echo "0")
echo "⚠️ Errors: ${ERRORS}"
if [ "${ERRORS}" -gt "0" ]; then
echo " Recent errors:"
grep "\[ERROR\]" "${LATEST_LOG}" 2>/dev/null | tail -3 | sed 's/^/ /'
fi
echo ""
echo "📝 Live monitoring:"
echo " tail -f ${LATEST_LOG} | grep -E 'ARBITRAGE|ERROR|Reserve cache'"
EOF
chmod +x ./scripts/monitor-24h-test.sh
# Create stop script
cat > ./scripts/stop-24h-test.sh << 'EOF'
#!/bin/bash
# Stop 24-hour test
LOG_DIR="logs/24h_test"
PID_FILE="${LOG_DIR}/mev-bot.pid"
if [ ! -f "${PID_FILE}" ]; then
echo "❌ No test running (PID file not found)"
exit 1
fi
PID=$(cat "${PID_FILE}")
echo "🛑 Stopping MEV bot (PID ${PID})..."
if ps -p "${PID}" > /dev/null 2>&1; then
kill "${PID}"
echo " Waiting for graceful shutdown..."
# Wait up to 10 seconds
for i in {1..10}; do
if ! ps -p "${PID}" > /dev/null 2>&1; then
echo "✅ Bot stopped successfully"
rm -f "${PID_FILE}"
exit 0
fi
sleep 1
done
# Force kill if still running
echo "⚠️ Forcing shutdown..."
kill -9 "${PID}" 2>/dev/null
rm -f "${PID_FILE}"
echo "✅ Bot forcefully stopped"
else
echo "⚠️ Bot not running, cleaning up PID file"
rm -f "${PID_FILE}"
fi
# Generate final report
echo ""
echo "📊 Generating final report..."
./scripts/generate-test-report.sh
EOF
chmod +x ./scripts/stop-24h-test.sh
echo "✅ 24-hour test started successfully!"
echo ""
echo "🎯 Next: Run monitoring script to track progress"
echo " ./scripts/monitor-24h-test.sh"

View File

@@ -1,42 +0,0 @@
#!/bin/bash
# Stop 24-hour test
LOG_DIR="logs/24h_test"
PID_FILE="${LOG_DIR}/mev-bot.pid"
if [ ! -f "${PID_FILE}" ]; then
echo "❌ No test running (PID file not found)"
exit 1
fi
PID=$(cat "${PID_FILE}")
echo "🛑 Stopping MEV bot (PID ${PID})..."
if ps -p "${PID}" > /dev/null 2>&1; then
kill "${PID}"
echo " Waiting for graceful shutdown..."
# Wait up to 10 seconds
for i in {1..10}; do
if ! ps -p "${PID}" > /dev/null 2>&1; then
echo "✅ Bot stopped successfully"
rm -f "${PID_FILE}"
exit 0
fi
sleep 1
done
# Force kill if still running
echo "⚠️ Forcing shutdown..."
kill -9 "${PID}" 2>/dev/null
rm -f "${PID_FILE}"
echo "✅ Bot forcefully stopped"
else
echo "⚠️ Bot not running, cleaning up PID file"
rm -f "${PID_FILE}"
fi
# Generate final report
echo ""
echo "📊 Generating final report..."
./scripts/generate-test-report.sh

View File

@@ -1,200 +0,0 @@
#!/bin/bash
# Test Calculations Framework - Extract and validate arbitrage calculations from logs
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
TEST_DIR="$PROJECT_ROOT/tests/calculation-validation"
LOG_FILE="${1:-/tmp/mev_full_logs.txt}"
# Colors for output
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
RED='\033[0;31m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
echo -e "${BLUE}═══════════════════════════════════════════════════════════════════════${NC}"
echo -e "${BLUE} MEV Bot - Arbitrage Calculation Validation Framework${NC}"
echo -e "${BLUE}═══════════════════════════════════════════════════════════════════════${NC}"
echo ""
# Create test directory
mkdir -p "$TEST_DIR/extracted"
mkdir -p "$TEST_DIR/reports"
echo -e "${YELLOW}Step 1: Extracting arbitrage opportunities from logs...${NC}"
# Check if log file exists
if [ ! -f "$LOG_FILE" ]; then
echo -e "${YELLOW}Log file not found at $LOG_FILE${NC}"
echo -e "${YELLOW}Extracting from running container...${NC}"
podman logs mev-bot-dev-master-dev 2>&1 > "$LOG_FILE"
fi
# Extract executable opportunities
echo -e "${GREEN}Extracting executable opportunities...${NC}"
grep "EXECUTABLE OPPORTUNITY" "$LOG_FILE" > "$TEST_DIR/extracted/executable_opportunities.log" || true
EXEC_COUNT=$(wc -l < "$TEST_DIR/extracted/executable_opportunities.log" || echo "0")
echo -e " ✓ Found $EXEC_COUNT executable opportunities"
# Extract opportunity details
echo -e "${GREEN}Extracting opportunity details...${NC}"
grep "ARBITRAGE OPPORTUNITY DETECTED" "$LOG_FILE" -A 20 > "$TEST_DIR/extracted/opportunity_details.log" || true
DETAIL_COUNT=$(grep -c "ARBITRAGE OPPORTUNITY DETECTED" "$TEST_DIR/extracted/opportunity_details.log" || echo "0")
echo -e " ✓ Found $DETAIL_COUNT opportunity records"
# Extract V3 swap calculations
echo -e "${GREEN}Extracting V3 swap calculations...${NC}"
grep "V3 calculation:" "$LOG_FILE" > "$TEST_DIR/extracted/v3_calculations.log" || true
V3_COUNT=$(wc -l < "$TEST_DIR/extracted/v3_calculations.log" || echo "0")
echo -e " ✓ Found $V3_COUNT V3 swap calculations"
# Extract profit threshold checks
echo -e "${GREEN}Extracting profit threshold checks...${NC}"
grep "Profit threshold check:" "$LOG_FILE" > "$TEST_DIR/extracted/threshold_checks.log" || true
THRESHOLD_COUNT=$(wc -l < "$TEST_DIR/extracted/threshold_checks.log" || echo "0")
echo -e " ✓ Found $THRESHOLD_COUNT threshold validation checks"
# Extract rejected opportunities
echo -e "${GREEN}Extracting rejected opportunities...${NC}"
grep "rejectReason:" "$LOG_FILE" | grep -v "rejectReason: " | grep -v "rejectReason:$" > "$TEST_DIR/extracted/rejections.log" || true
REJECT_COUNT=$(wc -l < "$TEST_DIR/extracted/rejections.log" || echo "0")
echo -e " ✓ Found $REJECT_COUNT rejections"
echo ""
echo -e "${YELLOW}Step 2: Generating summary report...${NC}"
REPORT_FILE="$TEST_DIR/reports/validation_report_$(date +%Y%m%d_%H%M%S).md"
cat > "$REPORT_FILE" <<EOF
# Arbitrage Calculation Validation Report
**Generated:** $(date)
**Log File:** $LOG_FILE
## Summary Statistics
- **Executable Opportunities:** $EXEC_COUNT
- **Total Opportunity Records:** $DETAIL_COUNT
- **V3 Calculations:** $V3_COUNT
- **Threshold Checks:** $THRESHOLD_COUNT
- **Rejections:** $REJECT_COUNT
## Executable Opportunities Analysis
EOF
if [ $EXEC_COUNT -gt 0 ]; then
echo "### Top 10 Executable Opportunities" >> "$REPORT_FILE"
echo '```' >> "$REPORT_FILE"
head -10 "$TEST_DIR/extracted/executable_opportunities.log" >> "$REPORT_FILE"
echo '```' >> "$REPORT_FILE"
echo "" >> "$REPORT_FILE"
fi
echo "## Profit Calculation Validation" >> "$REPORT_FILE"
echo "" >> "$REPORT_FILE"
if [ $THRESHOLD_COUNT -gt 0 ]; then
echo "### Sample Threshold Checks (First 5)" >> "$REPORT_FILE"
echo '```' >> "$REPORT_FILE"
head -5 "$TEST_DIR/extracted/threshold_checks.log" >> "$REPORT_FILE"
echo '```' >> "$REPORT_FILE"
echo "" >> "$REPORT_FILE"
fi
echo "## Rejection Analysis" >> "$REPORT_FILE"
echo "" >> "$REPORT_FILE"
if [ $REJECT_COUNT -gt 0 ]; then
echo "### Rejection Reasons Breakdown" >> "$REPORT_FILE"
echo '```' >> "$REPORT_FILE"
grep -oP 'rejectReason:\K[^,}]+' "$TEST_DIR/extracted/rejections.log" | sort | uniq -c | sort -rn >> "$REPORT_FILE"
echo '```' >> "$REPORT_FILE"
echo "" >> "$REPORT_FILE"
fi
echo "## V3 Calculation Samples" >> "$REPORT_FILE"
echo "" >> "$REPORT_FILE"
if [ $V3_COUNT -gt 0 ]; then
echo "### Recent V3 Calculations (Last 10)" >> "$REPORT_FILE"
echo '```' >> "$REPORT_FILE"
tail -10 "$TEST_DIR/extracted/v3_calculations.log" >> "$REPORT_FILE"
echo '```' >> "$REPORT_FILE"
echo "" >> "$REPORT_FILE"
fi
echo "---" >> "$REPORT_FILE"
echo "**Report saved to:** $REPORT_FILE" >> "$REPORT_FILE"
echo -e "${GREEN}✓ Report generated: $REPORT_FILE${NC}"
echo ""
echo -e "${YELLOW}Step 3: Analyzing calculation accuracy...${NC}"
# Parse and validate calculations
if [ $EXEC_COUNT -gt 0 ]; then
echo -e "${GREEN}Validating executable opportunity calculations...${NC}"
# Extract profit values
grep -oP 'Profit=\K[0-9.]+' "$TEST_DIR/extracted/executable_opportunities.log" > "$TEST_DIR/extracted/profit_values.txt" || true
# Calculate statistics
if [ -f "$TEST_DIR/extracted/profit_values.txt" ] && [ -s "$TEST_DIR/extracted/profit_values.txt" ]; then
TOTAL_PROFIT=$(awk '{sum+=$1} END {print sum}' "$TEST_DIR/extracted/profit_values.txt")
AVG_PROFIT=$(awk '{sum+=$1} END {print sum/NR}' "$TEST_DIR/extracted/profit_values.txt")
MAX_PROFIT=$(sort -n "$TEST_DIR/extracted/profit_values.txt" | tail -1)
MIN_PROFIT=$(sort -n "$TEST_DIR/extracted/profit_values.txt" | head -1)
echo -e " ${BLUE}Total Profit Detected:${NC} $TOTAL_PROFIT ETH"
echo -e " ${BLUE}Average Profit:${NC} $AVG_PROFIT ETH"
echo -e " ${BLUE}Max Profit:${NC} $MAX_PROFIT ETH"
echo -e " ${BLUE}Min Profit:${NC} $MIN_PROFIT ETH"
fi
fi
echo ""
echo -e "${YELLOW}Step 4: Extracting test data for replay...${NC}"
# Create JSON test data for replay
TEST_DATA_FILE="$TEST_DIR/extracted/test_data.json"
echo "{" > "$TEST_DATA_FILE"
echo " \"timestamp\": \"$(date -Iseconds)\"," >> "$TEST_DATA_FILE"
echo " \"executableOpportunities\": $EXEC_COUNT," >> "$TEST_DATA_FILE"
echo " \"totalOpportunities\": $DETAIL_COUNT," >> "$TEST_DATA_FILE"
echo " \"v3Calculations\": $V3_COUNT," >> "$TEST_DATA_FILE"
echo " \"thresholdChecks\": $THRESHOLD_COUNT," >> "$TEST_DATA_FILE"
echo " \"rejections\": $REJECT_COUNT," >> "$TEST_DATA_FILE"
echo " \"logFile\": \"$LOG_FILE\"," >> "$TEST_DATA_FILE"
echo " \"extractedFiles\": {" >> "$TEST_DATA_FILE"
echo " \"executable\": \"$TEST_DIR/extracted/executable_opportunities.log\"," >> "$TEST_DATA_FILE"
echo " \"details\": \"$TEST_DIR/extracted/opportunity_details.log\"," >> "$TEST_DATA_FILE"
echo " \"v3Calcs\": \"$TEST_DIR/extracted/v3_calculations.log\"," >> "$TEST_DATA_FILE"
echo " \"thresholds\": \"$TEST_DIR/extracted/threshold_checks.log\"," >> "$TEST_DATA_FILE"
echo " \"rejections\": \"$TEST_DIR/extracted/rejections.log\"" >> "$TEST_DATA_FILE"
echo " }" >> "$TEST_DATA_FILE"
echo "}" >> "$TEST_DATA_FILE"
echo -e "${GREEN}✓ Test data saved: $TEST_DATA_FILE${NC}"
echo ""
echo -e "${GREEN}═══════════════════════════════════════════════════════════════════════${NC}"
echo -e "${GREEN} Extraction Complete!${NC}"
echo -e "${GREEN}═══════════════════════════════════════════════════════════════════════${NC}"
echo ""
echo -e "${BLUE}Next Steps:${NC}"
echo -e " 1. Review report: cat $REPORT_FILE"
echo -e " 2. Run Go validation tests: go test ./tests/calculation-validation/..."
echo -e " 3. Replay calculations: go run ./tests/calculation-validation/replay.go"
echo ""
echo -e "${YELLOW}Files Created:${NC}"
echo -e " - Executable opportunities: $TEST_DIR/extracted/executable_opportunities.log"
echo -e " - Opportunity details: $TEST_DIR/extracted/opportunity_details.log"
echo -e " - V3 calculations: $TEST_DIR/extracted/v3_calculations.log"
echo -e " - Threshold checks: $TEST_DIR/extracted/threshold_checks.log"
echo -e " - Rejections: $TEST_DIR/extracted/rejections.log"
echo -e " - Test data JSON: $TEST_DATA_FILE"
echo -e " - Validation report: $REPORT_FILE"
echo ""

View File

@@ -1,201 +0,0 @@
#!/bin/bash
# Docker-based testing script for MEV Bot
# Runs all tests in isolated Docker containers
set -e
# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
log_info() { echo -e "${BLUE}[INFO]${NC} $1"; }
log_success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; }
log_error() { echo -e "${RED}[ERROR]${NC} $1"; }
log_warning() { echo -e "${YELLOW}[WARNING]${NC} $1"; }
# Create coverage directory
mkdir -p coverage
echo -e "${BLUE}================================${NC}"
echo -e "${BLUE}MEV Bot Docker Test Suite${NC}"
echo -e "${BLUE}================================${NC}"
echo ""
# Parse command line arguments
TEST_TYPE="${1:-all}"
run_unit_tests() {
log_info "Running unit tests in Docker..."
docker-compose -f docker-compose.test.yml run --rm test-unit
if [ $? -eq 0 ]; then
log_success "Unit tests passed!"
else
log_error "Unit tests failed!"
return 1
fi
}
run_integration_tests() {
log_info "Running integration tests in Docker..."
docker-compose -f docker-compose.test.yml run --rm test-integration
if [ $? -eq 0 ]; then
log_success "Integration tests passed!"
else
log_error "Integration tests failed!"
return 1
fi
}
run_race_tests() {
log_info "Running race detector tests in Docker..."
docker-compose -f docker-compose.test.yml run --rm test-race
if [ $? -eq 0 ]; then
log_success "Race detector tests passed (0 race conditions)!"
else
log_error "Race detector tests failed!"
return 1
fi
}
run_build_test() {
log_info "Running build verification in Docker..."
docker-compose -f docker-compose.test.yml run --rm test-build
if [ $? -eq 0 ]; then
log_success "Build verification passed!"
else
log_error "Build verification failed!"
return 1
fi
}
run_coverage_test() {
log_info "Generating coverage report in Docker..."
docker-compose -f docker-compose.test.yml run --rm test-coverage
if [ $? -eq 0 ]; then
log_success "Coverage report generated!"
if [ -f coverage/coverage.html ]; then
log_info "Coverage report: coverage/coverage.html"
fi
else
log_error "Coverage generation failed!"
return 1
fi
}
run_security_scan() {
log_info "Running security scan in Docker..."
docker-compose -f docker-compose.test.yml run --rm test-security
if [ $? -eq 0 ]; then
log_success "Security scan passed!"
else
log_warning "Security scan found issues (check coverage/gosec-report.json)"
fi
}
run_lint() {
log_info "Running linters in Docker..."
docker-compose -f docker-compose.test.yml run --rm test-lint
if [ $? -eq 0 ]; then
log_success "Linting passed!"
else
log_warning "Linting found issues"
fi
}
cleanup() {
log_info "Cleaning up Docker resources..."
docker-compose -f docker-compose.test.yml down --volumes --remove-orphans
log_success "Cleanup complete!"
}
# Main execution
case "$TEST_TYPE" in
unit)
run_unit_tests
;;
integration)
run_integration_tests
;;
race)
run_race_tests
;;
build)
run_build_test
;;
coverage)
run_coverage_test
;;
security)
run_security_scan
;;
lint)
run_lint
;;
all)
log_info "Running complete test suite..."
echo ""
# Run in sequence
run_build_test || exit 1
echo ""
run_unit_tests || exit 1
echo ""
run_race_tests || exit 1
echo ""
run_integration_tests || exit 1
echo ""
run_coverage_test || exit 1
echo ""
run_lint
echo ""
run_security_scan
echo ""
log_success "All tests passed!"
;;
clean)
cleanup
exit 0
;;
*)
log_error "Unknown test type: $TEST_TYPE"
echo ""
echo "Usage: $0 [test-type]"
echo ""
echo "Test types:"
echo " all - Run all tests (default)"
echo " unit - Run unit tests only"
echo " integration - Run integration tests only"
echo " race - Run race detector tests only"
echo " build - Run build verification only"
echo " coverage - Generate coverage report"
echo " security - Run security scan"
echo " lint - Run linters"
echo " clean - Clean up Docker resources"
exit 1
;;
esac
# Cleanup on success
trap cleanup EXIT
echo ""
echo -e "${GREEN}================================${NC}"
echo -e "${GREEN}Test Suite Complete!${NC}"
echo -e "${GREEN}================================${NC}"
echo ""
echo -e "${BLUE}Test Results:${NC}"
echo -e " ${GREEN}${NC} All tests passed in isolated Docker environment"
echo -e " ${GREEN}${NC} Code coverage report: coverage/coverage.html"
echo -e " ${GREEN}${NC} Security report: coverage/gosec-report.json"
echo ""

View File

@@ -1,70 +0,0 @@
#!/bin/bash
# Test script for MEV Bot fixes
echo "Testing MEV Bot fixes..."
# Run the setup script
./setup-env.sh
# Test environment variables
echo "Testing environment variables..."
if [ -f .env ]; then
source .env
echo "✓ .env file exists"
else
echo "✗ .env file missing"
exit 1
fi
# Check required variables
required_vars=("ARBITRUM_RPC_ENDPOINT" "MEV_BOT_ENCRYPTION_KEY")
missing_vars=()
for var in "${required_vars[@]}"; do
if [ -z "${!var}" ]; then
missing_vars+=("$var")
fi
done
if [ ${#missing_vars[@]} -eq 0 ]; then
echo "✓ All required environment variables are set"
else
echo "✗ Missing environment variables: ${missing_vars[*]}"
exit 1
fi
# Test configuration files
echo "Testing configuration files..."
if [ -f config/config.yaml ]; then
echo "✓ config.yaml exists"
else
echo "✗ config.yaml missing"
exit 1
fi
if [ -f config/initial_markets.yaml ]; then
echo "✓ initial_markets.yaml exists"
else
echo "✗ initial_markets.yaml missing"
exit 1
fi
# Test rate limiting configuration
echo "Checking rate limiting configuration..."
requests_per_second=$(grep "requests_per_second:" config/config.yaml | awk '{print $2}' | head -1)
if [ "$requests_per_second" -le 5 ]; then
echo "✓ Rate limiting is properly configured (RPS: $requests_per_second)"
else
echo "✗ Rate limiting may be too high (RPS: $requests_per_second)"
fi
# Test market scan configuration
echo "Checking market scan configuration..."
max_blocks_back=$(grep "max_blocks_back:" config/initial_markets.yaml | awk '{print $2}')
if [ "$max_blocks_back" -le 1000 ]; then
echo "✓ Market scan block range is properly configured ($max_blocks_back blocks)"
else
echo "✗ Market scan block range may be too high ($max_blocks_back blocks)"
fi
echo "All tests completed!"

View File

@@ -1,113 +0,0 @@
#!/bin/bash
# Test script for forked Arbitrum environment
set -e
echo "🚀 Setting up forked Arbitrum environment for MEV bot testing..."
# Check if anvil is available
if ! command -v anvil &> /dev/null; then
echo "❌ Anvil not found. Please install Foundry first:"
echo "curl -L https://foundry.paradigm.xyz | bash"
echo "foundryup"
exit 1
fi
# Kill any existing anvil processes
echo "🔄 Stopping any existing anvil processes..."
pkill -f anvil || true
sleep 2
# Set up environment variables for forked network
export ARBITRUM_RPC_ENDPOINT="https://arbitrum-mainnet.core.chainstack.com/53c30e7a941160679fdcc396c894fc57"
export ARBITRUM_WS_ENDPOINT="ws://localhost:8545"
export METRICS_ENABLED="false"
export MEV_BOT_ENCRYPTION_KEY="test-fork-encryption-key-32-chars"
export MEV_BOT_ALLOW_LOCALHOST="true"
# Start anvil with Arbitrum fork
echo "🔗 Starting anvil with Arbitrum One fork..."
anvil \
--fork-url "$ARBITRUM_RPC_ENDPOINT" \
--fork-block-number 250000000 \
--host 0.0.0.0 \
--port 8545 \
--accounts 10 \
--balance 1000 \
--gas-limit 30000000 \
--gas-price 100000000 \
--block-time 1 \
--silent &
ANVIL_PID=$!
echo "📊 Anvil started with PID: $ANVIL_PID"
# Wait for anvil to be ready
echo "⏳ Waiting for anvil to be ready..."
sleep 5
# Verify anvil is running
if ! curl -s -X POST \
-H "Content-Type: application/json" \
--data '{"jsonrpc":"2.0","method":"eth_blockNumber","params":[],"id":1}' \
http://localhost:8545 > /dev/null; then
echo "❌ Anvil failed to start properly"
kill $ANVIL_PID 2>/dev/null || true
exit 1
fi
echo "✅ Anvil fork ready on http://localhost:8545"
# Update RPC endpoint to use local fork
export ARBITRUM_RPC_ENDPOINT="http://localhost:8545"
# Build the MEV bot
echo "🔨 Building MEV bot..."
go build -o bin/mev-bot cmd/mev-bot/main.go
if [ $? -ne 0 ]; then
echo "❌ Failed to build MEV bot"
kill $ANVIL_PID 2>/dev/null || true
exit 1
fi
echo "✅ MEV bot built successfully"
# Test the bot with fork
echo "🧪 Testing MEV bot with forked environment..."
# Run bot for 30 seconds to test
timeout 30 ./bin/mev-bot start || true
echo "🎯 Fork test completed"
# Cleanup function
cleanup() {
echo "🧹 Cleaning up..."
kill $ANVIL_PID 2>/dev/null || true
wait $ANVIL_PID 2>/dev/null || true
echo "✅ Cleanup completed"
}
# Set up trap for cleanup
trap cleanup EXIT
# Keep anvil running if requested
if [ "$1" = "--keep-running" ]; then
echo "🔄 Keeping anvil running. Press Ctrl+C to stop."
echo "📍 Fork URL: http://localhost:8545"
echo "🔗 Chain ID: 42161 (Arbitrum One)"
echo "💰 Test accounts funded with 1000 ETH each"
echo ""
echo "To test manually:"
echo "export ARBITRUM_RPC_ENDPOINT=\"http://localhost:8545\""
echo "export ARBITRUM_WS_ENDPOINT=\"ws://localhost:8545\""
echo "export MEV_BOT_ENCRYPTION_KEY=\"test-fork-encryption-key-32-chars\""
echo "export MEV_BOT_ALLOW_LOCALHOST=\"true\""
echo "./bin/mev-bot start"
# Wait for user interrupt
wait $ANVIL_PID
else
echo "🏁 Test completed. Use --keep-running to keep the fork active."
fi

View File

@@ -1,104 +0,0 @@
#!/bin/bash
# Test extraction logic with real log data
# Color codes
GREEN='\033[0;32m'
RED='\033[0;31m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
CYAN='\033[0;36m'
NC='\033[0m'
# Get the last opportunity from logs
line=$(grep "OPPORTUNITY DETECTED" logs/mev_bot.log | tail -1)
echo "════════════════════════════════════════════════════════════"
echo "Testing Extraction Logic with Real Log Data"
echo "════════════════════════════════════════════════════════════"
echo ""
# Extract timestamp
TIMESTAMP=$(echo "$line" | awk '{print $1, $2}')
echo "Timestamp: $TIMESTAMP"
# Check if executable
if echo "$line" | grep -q "isExecutable:true"; then
echo "Status: EXECUTABLE ✅"
else
echo "Status: NOT EXECUTABLE ⚠️"
fi
echo ""
echo "Extracted Fields:"
echo "----------------"
# Token pair (FIXED)
TOKEN_IN=$(echo "$line" | grep -oP 'tokenIn:[^ \]]+' | cut -d: -f2)
TOKEN_OUT=$(echo "$line" | grep -oP 'tokenOut:[^ \]]+' | cut -d: -f2)
echo "Token In: $TOKEN_IN"
echo "Token Out: $TOKEN_OUT"
# Amounts
AMOUNT_IN=$(echo "$line" | grep -oP 'Amount In: [0-9.]+' | grep -oP '[0-9.]+')
AMOUNT_OUT=$(echo "$line" | grep -oP 'Amount Out: [0-9.]+' | grep -oP '[0-9.]+')
echo "Amount In: $AMOUNT_IN"
echo "Amount Out: $AMOUNT_OUT"
# Financial metrics
ESTIMATED_PROFIT=$(echo "$line" | grep -oP 'estimatedProfitETH:[0-9.]+' | cut -d: -f2)
GAS_COST=$(echo "$line" | grep -oP 'gasCostETH:[0-9.eE+-]+' | cut -d: -f2)
NET_PROFIT=$(echo "$line" | grep -oP 'netProfitETH:-?[0-9.eE+-]+' | cut -d: -f2)
echo "Estimated Profit: $ESTIMATED_PROFIT ETH"
echo "Gas Cost: $GAS_COST ETH"
echo "Net Profit: $NET_PROFIT ETH"
# Price impact & margin
PRICE_IMPACT=$(echo "$line" | grep -oP 'priceImpact:[0-9.eE+-]+' | cut -d: -f2)
PROFIT_MARGIN=$(echo "$line" | grep -oP 'profitMargin:-?[0-9.eE+-]+' | cut -d: -f2)
echo "Price Impact: $PRICE_IMPACT"
echo "Profit Margin: $PROFIT_MARGIN"
# Confidence
CONFIDENCE=$(echo "$line" | grep -oP 'confidence:[0-9.]+' | cut -d: -f2)
echo "Confidence: $CONFIDENCE"
# Reject reason (FIXED)
REASON=$(echo "$line" | grep -oP 'rejectReason:[^ ]+ [^ ]+ [^ ]+ [^ ]+ [^ ]+ [^ ]+' | cut -d: -f2)
# Trim to just the meaningful part (remove trailing field names)
REASON=$(echo "$REASON" | sed 's/ token[0-9].*$//' | sed 's/ protocol.*$//' | sed 's/ poolAddress.*$//')
echo "Reject Reason: $REASON"
echo ""
echo "════════════════════════════════════════════════════════════"
echo "Now showing formatted output:"
echo "════════════════════════════════════════════════════════════"
echo ""
# Show formatted output
echo -e "${YELLOW}[$TIMESTAMP] 🎯 Opportunity (not executable)${NC}"
echo -e "${CYAN} 🔄 Pair: ${TOKEN_IN}${TOKEN_OUT}${NC}"
echo -e "${CYAN} 📊 Amounts: ${AMOUNT_IN}${AMOUNT_OUT}${NC}"
echo -e "${YELLOW} 💰 Estimated Profit: ${ESTIMATED_PROFIT} ETH${NC}"
echo -e "${RED} ⛽ Gas Cost: ${GAS_COST} ETH${NC}"
echo -e "${RED} 📉 Net After Gas: ${NET_PROFIT} ETH${NC}"
# Price impact as percentage
if [[ -n "$PRICE_IMPACT" ]]; then
PRICE_IMPACT_PCT=$(echo "$PRICE_IMPACT * 100" | bc -l 2>/dev/null || echo "$PRICE_IMPACT")
echo -e "${YELLOW} 📊 Price Impact: $(printf "%.6f" $PRICE_IMPACT_PCT 2>/dev/null || echo "$PRICE_IMPACT")%${NC}"
fi
# Profit margin as percentage
if [[ -n "$PROFIT_MARGIN" ]]; then
PROFIT_MARGIN_PCT=$(echo "$PROFIT_MARGIN * 100" | bc -l 2>/dev/null || echo "$PROFIT_MARGIN")
echo -e "${RED} 📊 Profit Margin: $(printf "%.6f" $PROFIT_MARGIN_PCT 2>/dev/null || echo "$PROFIT_MARGIN")%${NC}"
fi
# Confidence as percentage
if [[ -n "$CONFIDENCE" ]]; then
CONFIDENCE_PCT=$(echo "$CONFIDENCE * 100" | bc -l 2>/dev/null || echo "$CONFIDENCE")
echo -e "${YELLOW} 🎯 Confidence: $(printf "%.1f" $CONFIDENCE_PCT 2>/dev/null || echo "$CONFIDENCE")%${NC}"
fi
echo -e "${RED} ❌ Reason: $REASON${NC}"
echo ""

View File

@@ -1,81 +0,0 @@
package main
import (
"context"
"fmt"
"log"
"time"
"github.com/ethereum/go-ethereum"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/ethclient"
)
func main() {
// Connect to Arbitrum
client, err := ethclient.Dial("https://arb1.arbitrum.io/rpc")
if err != nil {
log.Fatal("Failed to connect:", err)
}
// Test pools that were "failing"
pools := []string{
"0x6f38e884725a116C9C7fBF208e79FE8828a2595F",
"0x2f5e87C9312fa29aed5c179E456625D79015299c",
"0xB1026b8e7276e7AC75410F1fcbbe21796e8f7526",
}
// Function selectors
token0Selector := []byte{0x0d, 0xfe, 0x16, 0x81} // token0()
token1Selector := []byte{0xd2, 0x1c, 0xec, 0xd4} // token1() - NOTE: Different from what's in the code!
for _, poolHex := range pools {
poolAddress := common.HexToAddress(poolHex)
fmt.Printf("\nTesting pool: %s\n", poolHex)
fmt.Println("=" + "=" + "=" + "=" + "=" + "=" + "=" + "=" + "=")
// Create timeout context
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
// Test token0()
token0Data, err := client.CallContract(ctx, ethereum.CallMsg{
To: &poolAddress,
Data: token0Selector,
}, nil)
if err != nil {
fmt.Printf(" token0() ERROR: %v\n", err)
} else {
if len(token0Data) >= 32 {
token0 := common.BytesToAddress(token0Data[12:32])
fmt.Printf(" token0() SUCCESS: %s\n", token0.Hex())
} else {
fmt.Printf(" token0() INVALID DATA: %x\n", token0Data)
}
}
// Test token1()
token1Data, err := client.CallContract(ctx, ethereum.CallMsg{
To: &poolAddress,
Data: token1Selector,
}, nil)
if err != nil {
fmt.Printf(" token1() ERROR: %v\n", err)
} else {
if len(token1Data) >= 32 {
token1 := common.BytesToAddress(token1Data[12:32])
fmt.Printf(" token1() SUCCESS: %s\n", token1.Hex())
} else {
fmt.Printf(" token1() INVALID DATA: %x\n", token1Data)
}
}
cancel()
}
// Now let's check what selector is actually being used in the error messages
fmt.Println("\nChecking token1() selector:")
fmt.Printf(" Correct V3 selector: 0xd21220a7\n")
fmt.Printf(" Our code selector: 0x%x\n", token1Selector)
}

View File

@@ -1,130 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Reusable, agnostic test runner script
# Can be used in any Go project by adjusting configuration
# Configuration variables
TEST_LEVEL="${TEST_LEVEL:-basic}" # basic, unit, integration, comprehensive, audit
COVERAGE="${COVERAGE:-false}"
OUTPUT_DIR="${OUTPUT_DIR:-test-results}"
PACKAGE_FILTER="${PACKAGE_FILTER:-./...}"
TIMEOUT="${TIMEOUT:-10m}"
JUNIT_OUTPUT="${JUNIT_OUTPUT:-false}"
# Parse command line arguments
while [[ $# -gt 0 ]]; do
case $1 in
-l|--level)
TEST_LEVEL="$2"
shift 2
;;
-c|--coverage)
COVERAGE=true
shift
;;
-o|--output)
OUTPUT_DIR="$2"
shift 2
;;
-p|--package)
PACKAGE_FILTER="$2"
shift 2
;;
--junit)
JUNIT_OUTPUT=true
shift
;;
-t|--timeout)
TIMEOUT="$2"
shift 2
;;
-h|--help)
echo "Usage: $0 [OPTIONS]"
echo "Run tests with different levels of coverage"
echo ""
echo "Options:"
echo " -l, --level LEVEL Test level: basic, unit, integration, comprehensive, audit (default: basic)"
echo " -c, --coverage Generate coverage report"
echo " -o, --output DIR Output directory (default: test-results)"
echo " -p, --package PKG Package filter (default: ./...)"
echo " --junit Generate JUnit output"
echo " -t, --timeout DURATION Timeout duration (default: 10m)"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
echo "Starting test execution (level: $TEST_LEVEL, coverage: $COVERAGE)"
# Create output directory
mkdir -p "$OUTPUT_DIR"
# Run tests based on level
case $TEST_LEVEL in
"basic")
echo "Running basic tests..."
if [ "$COVERAGE" = "true" ]; then
go test -v -short -coverprofile="$OUTPUT_DIR/coverage.out" -covermode=atomic "$PACKAGE_FILTER"
go tool cover -html="$OUTPUT_DIR/coverage.out" -o "$OUTPUT_DIR/coverage.html"
else
go test -v -short "$PACKAGE_FILTER"
fi
;;
"unit")
echo "Running unit tests..."
if [ "$COVERAGE" = "true" ]; then
go test -v ./test/unit/... -coverprofile="$OUTPUT_DIR/coverage.out" -covermode=atomic
go tool cover -html="$OUTPUT_DIR/coverage.out" -o "$OUTPUT_DIR/coverage.html"
else
go test -v ./test/unit/...
fi
;;
"integration")
echo "Running integration tests..."
if [ "$COVERAGE" = "true" ]; then
go test -v ./test/integration/... -coverprofile="$OUTPUT_DIR/coverage.out" -covermode=atomic
go tool cover -html="$OUTPUT_DIR/coverage.out" -o "$OUTPUT_DIR/coverage.html"
else
go test -v ./test/integration/...
fi
;;
"comprehensive")
echo "Running comprehensive tests..."
if [ "$COVERAGE" = "true" ]; then
go test -v ./test/unit/... ./test/integration/... ./test/e2e/... -coverprofile="$OUTPUT_DIR/coverage.out" -covermode=atomic
go tool cover -html="$OUTPUT_DIR/coverage.out" -o "$OUTPUT_DIR/coverage.html"
else
go test -v ./test/unit/... ./test/integration/... ./test/e2e/...
fi
;;
"audit")
echo "Running audit tests..."
if [ "$COVERAGE" = "true" ]; then
go test -v ./test/unit/... ./test/integration/... ./test/e2e/... ./test/property/... ./test/fuzzing/... ./test/security/... -coverprofile="$OUTPUT_DIR/coverage.out" -covermode=atomic
go tool cover -html="$OUTPUT_DIR/coverage.out" -o "$OUTPUT_DIR/coverage.html"
else
go test -v ./test/unit/... ./test/integration/... ./test/e2e/... ./test/property/... ./test/fuzzing/... ./test/security/...
fi
;;
*)
echo "Invalid test level: $TEST_LEVEL. Use: basic, unit, integration, comprehensive, audit"
exit 1
;;
esac
# Generate JUnit output if requested
if [ "$JUNIT_OUTPUT" = "true" ]; then
if command -v go-junit-report &> /dev/null; then
go test -v "$PACKAGE_FILTER" 2>&1 | go-junit-report > "$OUTPUT_DIR/test-results.xml"
else
echo "go-junit-report not found. Install with: go install github.com/jstemmer/go-junit-report@latest"
fi
fi
echo "Test execution completed. Results in $OUTPUT_DIR"

View File

@@ -1,82 +0,0 @@
#!/bin/bash
# MEV Bot Testing Environment Setup
# This script sets up a forked Arbitrum environment for testing
set -e
echo "🚀 Setting up MEV Bot testing environment..."
# Configuration
ARBITRUM_RPC_URL="https://arb1.arbitrum.io/rpc"
FORK_BLOCK_NUMBER="latest"
ANVIL_PORT="8545"
ANVIL_CHAIN_ID="31337"
# Directories
TEST_DIR="./tests"
CONTRACTS_DIR="./tests/contracts"
SCENARIOS_DIR="./tests/scenarios"
LOGS_DIR="./tests/logs"
# Create test directories
mkdir -p "$TEST_DIR"
mkdir -p "$CONTRACTS_DIR"
mkdir -p "$SCENARIOS_DIR"
mkdir -p "$LOGS_DIR"
echo "📁 Created test directory structure"
# Check if Foundry is installed
if ! command -v forge &> /dev/null; then
echo "❌ Foundry not found. Please install Foundry first:"
echo " curl -L https://foundry.paradigm.xyz | bash"
echo " foundryup"
exit 1
fi
if ! command -v anvil &> /dev/null; then
echo "❌ Anvil not found. Please install Foundry first:"
exit 1
fi
echo "✅ Foundry installation verified"
# Create foundry.toml configuration
cat > foundry.toml << EOF
[profile.default]
src = "tests/contracts"
out = "tests/out"
libs = ["lib"]
test = "tests"
cache_path = "tests/cache"
force = false
[profile.default.optimizer]
enabled = true
runs = 200
[profile.default.fmt]
line_length = 120
tab_width = 4
[rpc_endpoints]
arbitrum = "$ARBITRUM_RPC_URL"
local = "http://localhost:$ANVIL_PORT"
EOF
echo "⚙️ Created foundry.toml configuration"
echo "✅ Test environment setup complete!"
echo ""
echo "🎯 Next steps:"
echo "1. Install Foundry: curl -L https://foundry.paradigm.xyz | bash && foundryup"
echo "2. Run: source tests/setup_env.sh"
echo "3. Run: cd tests/scenarios && ./run_tests.sh"
echo ""
echo "📊 The testing environment includes:"
echo " - Forked Arbitrum network with Anvil"
echo " - Solidity contracts for realistic scenarios"
echo " - Go integration tests"
echo " - Automated test runner"
echo " - Comprehensive logging"

View File

@@ -1,93 +0,0 @@
#!/bin/bash
# This script runs comprehensive automated tests
set -e
echo "Starting comprehensive automated tests..."
# Initialize exit code
exit_code=0
# Run unit tests
echo "Running unit tests..."
if ! go test -v ./... -timeout=30s; then
echo "❌ Unit tests failed"
exit_code=1
else
echo "✅ Unit tests passed"
fi
# Run tests with race detection
echo "Running race condition tests..."
if ! go test -race -v ./... -timeout=60s; then
echo "❌ Race condition tests failed"
exit_code=1
else
echo "✅ Race condition tests passed"
fi
# Run coverage test
echo "Running coverage tests..."
if ! go test -v -coverprofile=coverage.out ./... -timeout=30s; then
echo "❌ Coverage tests failed"
exit_code=1
else
echo "✅ Coverage tests passed"
# Show coverage summary
go tool cover -func=coverage.out | tail -n 1
fi
# Run benchmarks (to make sure they don't panic)
echo "Running benchmarks..."
if ! go test -bench=. -run=^$ ./...; then
echo "❌ Benchmarks failed"
exit_code=1
else
echo "✅ Benchmarks passed"
fi
# Run integration tests (if they exist)
echo "Running integration tests..."
if [ -n "$(find . -name "*_integration_test.go" -print -quit)" ]; then
if ! go test -tags=integration -v ./... -timeout=60s; then
echo "❌ Integration tests failed"
exit_code=1
else
echo "✅ Integration tests passed"
fi
else
echo " No integration tests found"
fi
# Run property-based tests (if they exist)
echo "Running property-based tests..."
if [ -n "$(find . -name "*_property_test.go" -print -quit)" ]; then
if ! go test -tags=property -v ./... -timeout=60s; then
echo "❌ Property-based tests failed"
exit_code=1
else
echo "✅ Property-based tests passed"
fi
else
echo " No property-based tests found"
fi
# Run fuzz tests (if they exist)
echo "Running fuzz tests..."
if [ -n "$(find . -name "*_fuzz_test.go" -print -quit)" ]; then
# Run a quick fuzz test to ensure they work
go test -fuzz=Fuzz -fuzztime=10s ./pkg/math/ 2>/dev/null || echo "No fuzz tests found in math package"
else
echo " No fuzz tests found"
fi
echo "Comprehensive automated tests completed."
if [ $exit_code -ne 0 ]; then
echo "❌ Some tests failed"
exit $exit_code
else
echo "✅ All tests passed"
exit 0
fi

Some files were not shown because too many files have changed in this diff Show More