Update module name to github.com/fraktal/mev-beta and fix channel closing issues in pipeline stages

This commit is contained in:
Krypto Kajun
2025-09-12 19:08:38 -05:00
parent fbb85e529a
commit 1113d82499
31 changed files with 3359 additions and 210 deletions

View File

@@ -6,29 +6,35 @@ import (
"sync"
"time"
"github.com/your-username/mev-beta/internal/config"
"github.com/your-username/mev-beta/internal/logger"
"github.com/your-username/mev-beta/pkg/uniswap"
"github.com/fraktal/mev-beta/internal/config"
"github.com/fraktal/mev-beta/internal/logger"
"github.com/fraktal/mev-beta/pkg/events"
"github.com/fraktal/mev-beta/pkg/uniswap"
"github.com/ethereum/go-ethereum/common"
"github.com/holiman/uint256"
"golang.org/x/sync/singleflight"
)
// MarketScanner scans markets for price movement opportunities with concurrency
type MarketScanner struct {
config *config.BotConfig
logger *logger.Logger
workerPool chan chan SwapDetails
workers []*SwapWorker
workerPool chan chan EventDetails
workers []*EventWorker
wg sync.WaitGroup
cacheGroup singleflight.Group
cache map[string]*CachedData
cacheMutex sync.RWMutex
cacheTTL time.Duration
}
// SwapWorker represents a worker that processes swap details
type SwapWorker struct {
ID int
WorkerPool chan chan SwapDetails
JobChannel chan SwapDetails
QuitChan chan bool
scanner *MarketScanner
// EventWorker represents a worker that processes event details
type EventWorker struct {
ID int
WorkerPool chan chan EventDetails
JobChannel chan EventDetails
QuitChan chan bool
scanner *MarketScanner
}
// NewMarketScanner creates a new market scanner with concurrency support
@@ -36,33 +42,38 @@ func NewMarketScanner(cfg *config.BotConfig, logger *logger.Logger) *MarketScann
scanner := &MarketScanner{
config: cfg,
logger: logger,
workerPool: make(chan chan SwapDetails, cfg.MaxWorkers),
workers: make([]*SwapWorker, 0, cfg.MaxWorkers),
workerPool: make(chan chan EventDetails, cfg.MaxWorkers),
workers: make([]*EventWorker, 0, cfg.MaxWorkers),
cache: make(map[string]*CachedData),
cacheTTL: time.Duration(cfg.RPCTimeout) * time.Second,
}
// Create workers
for i := 0; i < cfg.MaxWorkers; i++ {
worker := NewSwapWorker(i, scanner.workerPool, scanner)
worker := NewEventWorker(i, scanner.workerPool, scanner)
scanner.workers = append(scanner.workers, worker)
worker.Start()
}
// Start cache cleanup routine
go scanner.cleanupCache()
return scanner
}
// NewSwapWorker creates a new swap worker
func NewSwapWorker(id int, workerPool chan chan SwapDetails, scanner *MarketScanner) *SwapWorker {
return &SwapWorker{
// NewEventWorker creates a new event worker
func NewEventWorker(id int, workerPool chan chan EventDetails, scanner *MarketScanner) *EventWorker {
return &EventWorker{
ID: id,
WorkerPool: workerPool,
JobChannel: make(chan SwapDetails),
JobChannel: make(chan EventDetails),
QuitChan: make(chan bool),
scanner: scanner,
}
}
// Start begins the worker
func (w *SwapWorker) Start() {
func (w *EventWorker) Start() {
go func() {
for {
// Register the worker in the worker pool
@@ -81,68 +92,124 @@ func (w *SwapWorker) Start() {
}
// Stop terminates the worker
func (w *SwapWorker) Stop() {
func (w *EventWorker) Stop() {
go func() {
w.QuitChan <- true
}()
}
// Process handles a swap detail
func (w *SwapWorker) Process(swap SwapDetails) {
// Analyze the swap in a separate goroutine to maintain throughput
// Process handles an event detail
func (w *EventWorker) Process(event EventDetails) {
// Analyze the event in a separate goroutine to maintain throughput
go func() {
defer w.scanner.wg.Done()
// Log the processing
w.scanner.logger.Debug(fmt.Sprintf("Worker %d processing swap in pool %s", w.ID, swap.PoolAddress))
// Analyze the swap
priceMovement, err := w.scanner.AnalyzeSwap(swap)
if err != nil {
w.scanner.logger.Error(fmt.Sprintf("Error analyzing swap: %v", err))
return
}
// Check if the movement is significant
if w.scanner.IsSignificantMovement(priceMovement, w.scanner.config.MinProfitThreshold) {
w.scanner.logger.Info(fmt.Sprintf("Significant price movement detected: %+v", priceMovement))
// TODO: Send to arbitrage engine
w.scanner.logger.Debug(fmt.Sprintf("Worker %d processing %s event in pool %s from protocol %s",
w.ID, event.Type.String(), event.PoolAddress, event.Protocol))
// Analyze based on event type
switch event.Type {
case events.Swap:
w.scanner.analyzeSwapEvent(event)
case events.AddLiquidity:
w.scanner.analyzeLiquidityEvent(event, true)
case events.RemoveLiquidity:
w.scanner.analyzeLiquidityEvent(event, false)
case events.NewPool:
w.scanner.analyzeNewPoolEvent(event)
default:
w.scanner.logger.Debug(fmt.Sprintf("Worker %d received unknown event type: %d", w.ID, event.Type))
}
}()
}
// SubmitSwap submits a swap for processing by the worker pool
func (s *MarketScanner) SubmitSwap(swap SwapDetails) {
// SubmitEvent submits an event for processing by the worker pool
func (s *MarketScanner) SubmitEvent(event EventDetails) {
s.wg.Add(1)
// Get an available worker job channel
jobChannel := <-s.workerPool
// Send the job to the worker
jobChannel <- swap
jobChannel <- event
}
// AnalyzeSwap analyzes a swap to determine if it's large enough to move the price
func (s *MarketScanner) AnalyzeSwap(swap SwapDetails) (*PriceMovement, error) {
// Calculate the price before the swap
priceBefore := uniswap.SqrtPriceX96ToPrice(swap.SqrtPriceX96.ToBig())
// For a more accurate calculation, we would need to:
// 1. Calculate the new sqrtPriceX96 after the swap
// 2. Convert that to a price
// 3. Calculate the price impact
priceMovement := &PriceMovement{
Token0: swap.Token0,
Token1: swap.Token1,
Pool: swap.PoolAddress,
AmountIn: new(big.Int).Add(swap.Amount0In, swap.Amount1In),
AmountOut: new(big.Int).Add(swap.Amount0Out, swap.Amount1Out),
PriceBefore: priceBefore,
TickBefore: swap.Tick,
// TickAfter would be calculated based on the swap size and liquidity
// analyzeSwapEvent analyzes a swap event for arbitrage opportunities
func (s *MarketScanner) analyzeSwapEvent(event EventDetails) {
s.logger.Debug(fmt.Sprintf("Analyzing swap event in pool %s", event.PoolAddress))
// Get pool data with caching
poolData, err := s.getPoolData(event.PoolAddress)
if err != nil {
s.logger.Error(fmt.Sprintf("Error getting pool data for %s: %v", event.PoolAddress, err))
return
}
// Calculate price impact
priceMovement, err := s.calculatePriceMovement(event, poolData)
if err != nil {
s.logger.Error(fmt.Sprintf("Error calculating price movement for pool %s: %v", event.PoolAddress, err))
return
}
// Check if the movement is significant
if s.isSignificantMovement(priceMovement, s.config.MinProfitThreshold) {
s.logger.Info(fmt.Sprintf("Significant price movement detected in pool %s: %+v", event.PoolAddress, priceMovement))
// Look for arbitrage opportunities
opportunities := s.findArbitrageOpportunities(event, priceMovement)
if len(opportunities) > 0 {
s.logger.Info(fmt.Sprintf("Found %d arbitrage opportunities for pool %s", len(opportunities), event.PoolAddress))
for _, opp := range opportunities {
s.logger.Info(fmt.Sprintf("Arbitrage opportunity: %+v", opp))
}
}
} else {
s.logger.Debug(fmt.Sprintf("Price movement in pool %s is not significant: %f", event.PoolAddress, priceMovement.PriceImpact))
}
}
// analyzeLiquidityEvent analyzes liquidity events (add/remove)
func (s *MarketScanner) analyzeLiquidityEvent(event EventDetails, isAdd bool) {
action := "adding"
if !isAdd {
action = "removing"
}
s.logger.Debug(fmt.Sprintf("Analyzing liquidity event (%s) in pool %s", action, event.PoolAddress))
// Update cached pool data
s.updatePoolData(event)
s.logger.Info(fmt.Sprintf("Liquidity %s event processed for pool %s", action, event.PoolAddress))
}
// analyzeNewPoolEvent analyzes new pool creation events
func (s *MarketScanner) analyzeNewPoolEvent(event EventDetails) {
s.logger.Info(fmt.Sprintf("New pool created: %s (protocol: %s)", event.PoolAddress, event.Protocol))
// Add to known pools
// In a real implementation, you would want to fetch and cache the pool data
s.logger.Debug(fmt.Sprintf("Added new pool %s to monitoring", event.PoolAddress))
}
// calculatePriceMovement calculates the price movement from a swap event
func (s *MarketScanner) calculatePriceMovement(event EventDetails, poolData *CachedData) (*PriceMovement, error) {
// Calculate the price before the swap
priceBefore := uniswap.SqrtPriceX96ToPrice(poolData.SqrtPriceX96.ToBig())
priceMovement := &PriceMovement{
Token0: event.Token0,
Token1: event.Token1,
Pool: event.PoolAddress,
Protocol: event.Protocol,
AmountIn: new(big.Int).Add(event.Amount0In, event.Amount1In),
AmountOut: new(big.Int).Add(event.Amount0Out, event.Amount1Out),
PriceBefore: priceBefore,
TickBefore: event.Tick,
Timestamp: event.Timestamp,
}
// Calculate price impact (simplified)
// In practice, this would involve more complex calculations using Uniswap V3 math
if priceMovement.AmountIn.Cmp(big.NewInt(0)) > 0 {
@@ -153,45 +220,39 @@ func (s *MarketScanner) AnalyzeSwap(swap SwapDetails) (*PriceMovement, error) {
priceImpact, _ := impact.Float64()
priceMovement.PriceImpact = priceImpact
}
return priceMovement, nil
}
// IsSignificantMovement determines if a price movement is significant enough to exploit
func (s *MarketScanner) IsSignificantMovement(movement *PriceMovement, threshold float64) bool {
// isSignificantMovement determines if a price movement is significant enough to exploit
func (s *MarketScanner) isSignificantMovement(movement *PriceMovement, threshold float64) bool {
// Check if the price impact is above our threshold
return movement.PriceImpact > threshold
}
// CalculateTickAfterSwap calculates the tick after a swap occurs
func (s *MarketScanner) CalculateTickAfterSwap(
currentTick int,
liquidity *uint256.Int,
amountIn *big.Int,
zeroForOne bool, // true if swapping token0 for token1
) int {
// This is a simplified implementation
// In practice, you would need to use the Uniswap V3 math formulas
// The actual calculation would involve:
// 1. Converting amounts to sqrt prices
// 2. Using the liquidity to determine the price movement
// 3. Calculating the new tick based on the price movement
// For now, we'll return a placeholder
return currentTick
}
// findArbitrageOpportunities looks for arbitrage opportunities based on price movements
func (s *MarketScanner) findArbitrageOpportunities(event EventDetails, movement *PriceMovement) []ArbitrageOpportunity {
s.logger.Debug(fmt.Sprintf("Searching for arbitrage opportunities for pool %s", event.PoolAddress))
// FindArbitrageOpportunities looks for arbitrage opportunities based on price movements
func (s *MarketScanner) FindArbitrageOpportunities(movements []*PriceMovement) []ArbitrageOpportunity {
opportunities := make([]ArbitrageOpportunity, 0)
// This would contain logic to:
// 1. Compare prices across different pools
// 1. Compare prices across different pools for the same token pair
// 2. Calculate potential profit after gas costs
// 3. Identify triangular arbitrage opportunities
// 4. Check if the opportunity is profitable
// For now, we'll return a mock opportunity for demonstration
opp := ArbitrageOpportunity{
Path: []string{event.Token0, event.Token1},
Pools: []string{event.PoolAddress, "0xMockPoolAddress"},
Profit: big.NewInt(1000000000000000000), // 1 ETH
GasEstimate: big.NewInt(200000000000000000), // 0.2 ETH
ROI: 5.0, // 500%
Protocol: event.Protocol,
}
opportunities = append(opportunities, opp)
return opportunities
}
@@ -201,7 +262,7 @@ func (s *MarketScanner) Stop() {
for _, worker := range s.workers {
worker.Stop()
}
// Wait for all jobs to complete
s.wg.Wait()
}
@@ -213,6 +274,7 @@ type ArbitrageOpportunity struct {
Profit *big.Int // Estimated profit in wei
GasEstimate *big.Int // Estimated gas cost
ROI float64 // Return on investment percentage
Protocol string // DEX protocol
}
// PriceMovement represents a potential price movement
@@ -220,6 +282,7 @@ type PriceMovement struct {
Token0 string // Token address
Token1 string // Token address
Pool string // Pool address
Protocol string // DEX protocol
AmountIn *big.Int // Amount of token being swapped in
AmountOut *big.Int // Amount of token being swapped out
PriceBefore *big.Float // Price before the swap
@@ -227,10 +290,13 @@ type PriceMovement struct {
PriceImpact float64 // Calculated price impact
TickBefore int // Tick before the swap
TickAfter int // Tick after the swap (to be calculated)
Timestamp time.Time // Event timestamp
}
// SwapDetails contains details about a detected swap
type SwapDetails struct {
// EventDetails contains details about a detected event
type EventDetails struct {
Type events.EventType
Protocol string
PoolAddress string
Token0 string
Token1 string
@@ -243,4 +309,117 @@ type SwapDetails struct {
Tick int
Timestamp time.Time
TransactionHash common.Hash
}
// CachedData represents cached pool data
type CachedData struct {
Address common.Address
Token0 common.Address
Token1 common.Address
Fee int64
Liquidity *uint256.Int
SqrtPriceX96 *uint256.Int
Tick int
TickSpacing int
LastUpdated time.Time
}
// getPoolData retrieves pool data with caching
func (s *MarketScanner) getPoolData(poolAddress string) (*CachedData, error) {
// Check cache first
cacheKey := fmt.Sprintf("pool_%s", poolAddress)
s.cacheMutex.RLock()
if data, exists := s.cache[cacheKey]; exists && time.Since(data.LastUpdated) < s.cacheTTL {
s.cacheMutex.RUnlock()
s.logger.Debug(fmt.Sprintf("Cache hit for pool %s", poolAddress))
return data, nil
}
s.cacheMutex.RUnlock()
// Use singleflight to prevent duplicate requests
result, err, _ := s.cacheGroup.Do(cacheKey, func() (interface{}, error) {
return s.fetchPoolData(poolAddress)
})
if err != nil {
return nil, err
}
poolData := result.(*CachedData)
// Update cache
s.cacheMutex.Lock()
s.cache[cacheKey] = poolData
s.cacheMutex.Unlock()
s.logger.Debug(fmt.Sprintf("Fetched and cached pool data for %s", poolAddress))
return poolData, nil
}
// fetchPoolData fetches pool data from the blockchain
func (s *MarketScanner) fetchPoolData(poolAddress string) (*CachedData, error) {
s.logger.Debug(fmt.Sprintf("Fetching pool data for %s", poolAddress))
// This is a simplified implementation
// In practice, you would interact with the Ethereum blockchain to get real data
address := common.HexToAddress(poolAddress)
// For now, we'll return mock data
pool := &CachedData{
Address: address,
Token0: common.HexToAddress("0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48"), // USDC
Token1: common.HexToAddress("0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"), // WETH
Fee: 3000, // 0.3%
Liquidity: uint256.NewInt(1000000000000000000), // 1 ETH equivalent
SqrtPriceX96: uint256.NewInt(2505414483750470000), // Mock sqrt price
Tick: 200000, // Mock tick
TickSpacing: 60, // Tick spacing for 0.3% fee
LastUpdated: time.Now(),
}
s.logger.Debug(fmt.Sprintf("Fetched pool data for %s", poolAddress))
return pool, nil
}
// updatePoolData updates cached pool data
func (s *MarketScanner) updatePoolData(event EventDetails) {
cacheKey := fmt.Sprintf("pool_%s", event.PoolAddress)
s.cacheMutex.Lock()
defer s.cacheMutex.Unlock()
// Update existing cache entry or create new one
data := &CachedData{
Address: common.HexToAddress(event.PoolAddress),
Token0: common.HexToAddress(event.Token0),
Token1: common.HexToAddress(event.Token1),
Liquidity: event.Liquidity,
SqrtPriceX96: event.SqrtPriceX96,
Tick: event.Tick,
LastUpdated: time.Now(),
}
s.cache[cacheKey] = data
s.logger.Debug(fmt.Sprintf("Updated cache for pool %s", event.PoolAddress))
}
// cleanupCache removes expired cache entries
func (s *MarketScanner) cleanupCache() {
ticker := time.NewTicker(10 * time.Minute)
defer ticker.Stop()
for {
select {
case <-ticker.C:
s.cacheMutex.Lock()
for key, data := range s.cache {
if time.Since(data.LastUpdated) > s.cacheTTL {
delete(s.cache, key)
s.logger.Debug(fmt.Sprintf("Removed expired cache entry: %s", key))
}
}
s.cacheMutex.Unlock()
}
}
}