Update module name to github.com/fraktal/mev-beta and fix channel closing issues in pipeline stages

This commit is contained in:
Krypto Kajun
2025-09-12 19:08:38 -05:00
parent fbb85e529a
commit 1113d82499
31 changed files with 3359 additions and 210 deletions

219
pkg/events/parser.go Normal file
View File

@@ -0,0 +1,219 @@
package events
import (
"math/big"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
"github.com/holiman/uint256"
)
// EventType represents the type of DEX event
type EventType int
const (
Unknown EventType = iota
Swap
AddLiquidity
RemoveLiquidity
NewPool
)
// String returns a string representation of the event type
func (et EventType) String() string {
switch et {
case Unknown:
return "Unknown"
case Swap:
return "Swap"
case AddLiquidity:
return "AddLiquidity"
case RemoveLiquidity:
return "RemoveLiquidity"
case NewPool:
return "NewPool"
default:
return "Unknown"
}
}
// Event represents a parsed DEX event
type Event struct {
Type EventType
Protocol string // UniswapV2, UniswapV3, SushiSwap, etc.
PoolAddress common.Address
Token0 common.Address
Token1 common.Address
Amount0 *big.Int
Amount1 *big.Int
SqrtPriceX96 *uint256.Int
Liquidity *uint256.Int
Tick int
Timestamp uint64
TransactionHash common.Hash
BlockNumber uint64
}
// EventParser parses DEX events from Ethereum transactions
type EventParser struct {
// Known DEX contract addresses
UniswapV2Factory common.Address
UniswapV3Factory common.Address
SushiSwapFactory common.Address
// Router addresses
UniswapV2Router01 common.Address
UniswapV2Router02 common.Address
UniswapV3Router common.Address
SushiSwapRouter common.Address
// Known pool addresses (for quick lookup)
knownPools map[common.Address]string
}
// NewEventParser creates a new event parser
func NewEventParser() *EventParser {
parser := &EventParser{
UniswapV2Factory: common.HexToAddress("0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f"),
UniswapV3Factory: common.HexToAddress("0x1F98431c8aD98523631AE4a59f267346ea31F984"),
SushiSwapFactory: common.HexToAddress("0xC0AEe478e3658e2610c5F7A4A2E1777cE9e4f2Ac"),
UniswapV2Router01: common.HexToAddress("0xf164fC0Ec4E93095b804a4795bBe1e041497b92a"),
UniswapV2Router02: common.HexToAddress("0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D"),
UniswapV3Router: common.HexToAddress("0xE592427A0AEce92De3Edee1F18E0157C05861564"),
SushiSwapRouter: common.HexToAddress("0xd9e1cE17f2641f24aE83637ab66a2cca9C378B9F"),
knownPools: make(map[common.Address]string),
}
// Pre-populate some known pools for demonstration
parser.knownPools[common.HexToAddress("0x88e6A0c2dDD26FEEb64F039a2c41296FcB3f5640")] = "UniswapV3"
parser.knownPools[common.HexToAddress("0xB4e16d0168e52d35CaCD2c6185b44281Ec28C9Dc")] = "UniswapV2"
return parser
}
// ParseTransaction parses a transaction for DEX events
func (ep *EventParser) ParseTransaction(tx *types.Transaction, blockNumber uint64, timestamp uint64) ([]*Event, error) {
events := make([]*Event, 0)
// Check if this is a DEX interaction
if !ep.IsDEXInteraction(tx) {
return events, nil
}
// Determine the protocol
protocol := ep.identifyProtocol(tx)
// For now, we'll return mock data for demonstration
if tx.To() != nil {
event := &Event{
Type: Swap,
Protocol: protocol,
PoolAddress: *tx.To(),
Token0: common.HexToAddress("0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48"), // USDC
Token1: common.HexToAddress("0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"), // WETH
Amount0: big.NewInt(1000000000), // 1000 USDC
Amount1: big.NewInt(500000000000000000), // 0.5 WETH
SqrtPriceX96: uint256.NewInt(2505414483750470000),
Liquidity: uint256.NewInt(1000000000000000000),
Tick: 200000,
Timestamp: timestamp,
TransactionHash: tx.Hash(),
BlockNumber: blockNumber,
}
events = append(events, event)
}
return events, nil
}
// IsDEXInteraction checks if a transaction interacts with a known DEX contract
func (ep *EventParser) IsDEXInteraction(tx *types.Transaction) bool {
if tx.To() == nil {
return false
}
to := *tx.To()
// Check factory contracts
if to == ep.UniswapV2Factory ||
to == ep.UniswapV3Factory ||
to == ep.SushiSwapFactory {
return true
}
// Check router contracts
if to == ep.UniswapV2Router01 ||
to == ep.UniswapV2Router02 ||
to == ep.UniswapV3Router ||
to == ep.SushiSwapRouter {
return true
}
// Check known pools
if _, exists := ep.knownPools[to]; exists {
return true
}
return false
}
// identifyProtocol identifies which DEX protocol a transaction is interacting with
func (ep *EventParser) identifyProtocol(tx *types.Transaction) string {
if tx.To() == nil {
return "Unknown"
}
to := *tx.To()
// Check factory contracts
if to == ep.UniswapV2Factory {
return "UniswapV2"
}
if to == ep.UniswapV3Factory {
return "UniswapV3"
}
if to == ep.SushiSwapFactory {
return "SushiSwap"
}
// Check router contracts
if to == ep.UniswapV2Router01 || to == ep.UniswapV2Router02 {
return "UniswapV2"
}
if to == ep.UniswapV3Router {
return "UniswapV3"
}
if to == ep.SushiSwapRouter {
return "SushiSwap"
}
// Check known pools
if protocol, exists := ep.knownPools[to]; exists {
return protocol
}
// Try to identify from function signature in transaction data
if len(tx.Data()) >= 4 {
sig := common.Bytes2Hex(tx.Data()[:4])
switch sig {
case "0xac9650d8": // multicall (Uniswap V3)
return "UniswapV3"
case "0x88316456": // swap (Uniswap V2)
return "UniswapV2"
case "0x128acb08": // swap (SushiSwap)
return "SushiSwap"
}
}
return "Unknown"
}
// AddKnownPool adds a pool address to the known pools map
func (ep *EventParser) AddKnownPool(address common.Address, protocol string) {
ep.knownPools[address] = protocol
}
// GetKnownPools returns all known pools
func (ep *EventParser) GetKnownPools() map[common.Address]string {
return ep.knownPools
}

162
pkg/events/parser_test.go Normal file
View File

@@ -0,0 +1,162 @@
package events
import (
"math/big"
"testing"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
"github.com/stretchr/testify/assert"
)
func TestEventTypeString(t *testing.T) {
assert.Equal(t, "Unknown", Unknown.String())
assert.Equal(t, "Swap", Swap.String())
assert.Equal(t, "AddLiquidity", AddLiquidity.String())
assert.Equal(t, "RemoveLiquidity", RemoveLiquidity.String())
assert.Equal(t, "NewPool", NewPool.String())
assert.Equal(t, "Unknown", EventType(999).String()) // Test unknown value
}
func TestNewEventParser(t *testing.T) {
parser := NewEventParser()
assert.NotNil(t, parser)
assert.NotNil(t, parser.knownPools)
assert.NotEmpty(t, parser.knownPools)
}
func TestIsDEXInteraction(t *testing.T) {
parser := NewEventParser()
// Test with Uniswap V2 factory address
tx1 := types.NewTransaction(0, parser.UniswapV2Factory, big.NewInt(0), 0, big.NewInt(0), nil)
assert.True(t, parser.IsDEXInteraction(tx1))
// Test with Uniswap V3 factory address
tx2 := types.NewTransaction(0, parser.UniswapV3Factory, big.NewInt(0), 0, big.NewInt(0), nil)
assert.True(t, parser.IsDEXInteraction(tx2))
// Test with SushiSwap factory address
tx3 := types.NewTransaction(0, parser.SushiSwapFactory, big.NewInt(0), 0, big.NewInt(0), nil)
assert.True(t, parser.IsDEXInteraction(tx3))
// Test with Uniswap V2 router address
tx4 := types.NewTransaction(0, parser.UniswapV2Router02, big.NewInt(0), 0, big.NewInt(0), nil)
assert.True(t, parser.IsDEXInteraction(tx4))
// Test with a known pool address
poolAddr := common.HexToAddress("0x88e6A0c2dDD26FEEb64F039a2c41296FcB3f5640")
parser.AddKnownPool(poolAddr, "UniswapV3")
tx5 := types.NewTransaction(0, poolAddr, big.NewInt(0), 0, big.NewInt(0), nil)
assert.True(t, parser.IsDEXInteraction(tx5))
// Test with a random address (should be false)
randomAddr := common.HexToAddress("0x1234567890123456789012345678901234567890")
tx6 := types.NewTransaction(0, randomAddr, big.NewInt(0), 0, big.NewInt(0), nil)
assert.False(t, parser.IsDEXInteraction(tx6))
// Test with contract creation transaction (nil To address)
tx7 := types.NewContractCreation(0, big.NewInt(0), 0, big.NewInt(0), nil)
assert.False(t, parser.IsDEXInteraction(tx7))
}
func TestIdentifyProtocol(t *testing.T) {
parser := NewEventParser()
// Test with Uniswap V2 factory address
tx1 := types.NewTransaction(0, parser.UniswapV2Factory, big.NewInt(0), 0, big.NewInt(0), nil)
assert.Equal(t, "UniswapV2", parser.identifyProtocol(tx1))
// Test with Uniswap V3 factory address
tx2 := types.NewTransaction(0, parser.UniswapV3Factory, big.NewInt(0), 0, big.NewInt(0), nil)
assert.Equal(t, "UniswapV3", parser.identifyProtocol(tx2))
// Test with SushiSwap factory address
tx3 := types.NewTransaction(0, parser.SushiSwapFactory, big.NewInt(0), 0, big.NewInt(0), nil)
assert.Equal(t, "SushiSwap", parser.identifyProtocol(tx3))
// Test with Uniswap V2 router address
tx4 := types.NewTransaction(0, parser.UniswapV2Router02, big.NewInt(0), 0, big.NewInt(0), nil)
assert.Equal(t, "UniswapV2", parser.identifyProtocol(tx4))
// Test with a known pool address
poolAddr := common.HexToAddress("0x88e6A0c2dDD26FEEb64F039a2c41296FcB3f5640")
parser.AddKnownPool(poolAddr, "UniswapV3")
tx5 := types.NewTransaction(0, poolAddr, big.NewInt(0), 0, big.NewInt(0), nil)
assert.Equal(t, "UniswapV3", parser.identifyProtocol(tx5))
// Test with a random address (should be Unknown)
randomAddr := common.HexToAddress("0x1234567890123456789012345678901234567890")
tx6 := types.NewTransaction(0, randomAddr, big.NewInt(0), 0, big.NewInt(0), nil)
assert.Equal(t, "Unknown", parser.identifyProtocol(tx6))
// Test with contract creation transaction (nil To address)
tx7 := types.NewContractCreation(0, big.NewInt(0), 0, big.NewInt(0), nil)
assert.Equal(t, "Unknown", parser.identifyProtocol(tx7))
}
func TestAddKnownPoolAndGetKnownPools(t *testing.T) {
parser := NewEventParser()
initialCount := len(parser.GetKnownPools())
// Add a new pool
addr := common.HexToAddress("0x1234567890123456789012345678901234567890")
parser.AddKnownPool(addr, "TestProtocol")
// Check that the pool was added
pools := parser.GetKnownPools()
assert.Equal(t, initialCount+1, len(pools))
assert.Equal(t, "TestProtocol", pools[addr])
// Add another pool
addr2 := common.HexToAddress("0xabcdefabcdefabcdefabcdefabcdefabcdefabcd")
parser.AddKnownPool(addr2, "AnotherProtocol")
// Check that both pools are in the map
pools = parser.GetKnownPools()
assert.Equal(t, initialCount+2, len(pools))
assert.Equal(t, "TestProtocol", pools[addr])
assert.Equal(t, "AnotherProtocol", pools[addr2])
}
func TestParseTransaction(t *testing.T) {
parser := NewEventParser()
// Create a transaction that interacts with a DEX
tx := types.NewTransaction(0, parser.UniswapV3Factory, big.NewInt(0), 0, big.NewInt(0), nil)
blockNumber := uint64(12345)
timestamp := uint64(1620000000)
// Parse the transaction
events, err := parser.ParseTransaction(tx, blockNumber, timestamp)
assert.NoError(t, err)
assert.Len(t, events, 1)
// Check the parsed event
event := events[0]
assert.Equal(t, Swap, event.Type)
assert.Equal(t, "UniswapV3", event.Protocol)
assert.Equal(t, parser.UniswapV3Factory, event.PoolAddress)
assert.Equal(t, blockNumber, event.BlockNumber)
assert.Equal(t, timestamp, event.Timestamp)
assert.Equal(t, tx.Hash(), event.TransactionHash)
assert.NotNil(t, event.Amount0)
assert.NotNil(t, event.Amount1)
assert.NotNil(t, event.SqrtPriceX96)
assert.NotNil(t, event.Liquidity)
}
func TestParseTransactionNonDEX(t *testing.T) {
parser := NewEventParser()
// Create a transaction that doesn't interact with a DEX
randomAddr := common.HexToAddress("0x1234567890123456789012345678901234567890")
tx := types.NewTransaction(0, randomAddr, big.NewInt(0), 0, big.NewInt(0), nil)
blockNumber := uint64(12345)
timestamp := uint64(1620000000)
// Parse the transaction
events, err := parser.ParseTransaction(tx, blockNumber, timestamp)
assert.NoError(t, err)
assert.Len(t, events, 0)
}

View File

@@ -6,9 +6,9 @@ import (
"sync"
"time"
"github.com/your-username/mev-beta/internal/config"
"github.com/your-username/mev-beta/internal/logger"
"github.com/your-username/mev-beta/internal/ratelimit"
"github.com/fraktal/mev-beta/internal/config"
"github.com/fraktal/mev-beta/internal/logger"
"github.com/fraktal/mev-beta/internal/ratelimit"
"github.com/ethereum/go-ethereum/core/types"
)

View File

@@ -6,8 +6,8 @@ import (
"sync"
"time"
"github.com/your-username/mev-beta/internal/config"
"github.com/your-username/mev-beta/internal/logger"
"github.com/fraktal/mev-beta/internal/config"
"github.com/fraktal/mev-beta/internal/logger"
"github.com/ethereum/go-ethereum/common"
"github.com/holiman/uint256"
"golang.org/x/sync/singleflight"

292
pkg/market/manager_test.go Normal file
View File

@@ -0,0 +1,292 @@
package market
import (
"context"
"testing"
"time"
"github.com/fraktal/mev-beta/internal/config"
"github.com/fraktal/mev-beta/internal/logger"
"github.com/ethereum/go-ethereum/common"
"github.com/holiman/uint256"
"github.com/stretchr/testify/assert"
)
func TestNewMarketManager(t *testing.T) {
// Create test config
cfg := &config.UniswapConfig{
Cache: config.CacheConfig{
Expiration: 300,
MaxSize: 10000,
},
}
// Create test logger
logger := logger.New("info", "text", "")
// Create market manager
manager := NewMarketManager(cfg, logger)
// Verify manager was created correctly
assert.NotNil(t, manager)
assert.Equal(t, cfg, manager.config)
assert.NotNil(t, manager.pools)
assert.Equal(t, time.Duration(cfg.Cache.Expiration)*time.Second, manager.cacheDuration)
assert.Equal(t, cfg.Cache.MaxSize, manager.maxCacheSize)
}
func TestGetPoolCacheHit(t *testing.T) {
// Create market manager
cfg := &config.UniswapConfig{
Cache: config.CacheConfig{
Expiration: 300,
MaxSize: 10000,
},
}
logger := logger.New("info", "text", "")
manager := NewMarketManager(cfg, logger)
// Add a pool to the cache
poolAddress := common.HexToAddress("0x88e6A0c2dDD26FEEb64F039a2c41296FcB3f5640")
pool := &PoolData{
Address: poolAddress,
Token0: common.HexToAddress("0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48"),
Token1: common.HexToAddress("0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"),
Fee: 3000,
Liquidity: uint256.NewInt(1000000000000000000),
SqrtPriceX96: uint256.NewInt(2505414483750470000),
Tick: 200000,
TickSpacing: 60,
LastUpdated: time.Now(),
}
manager.pools[poolAddress.Hex()] = pool
// Get the pool (should be a cache hit)
ctx := context.Background()
result, err := manager.GetPool(ctx, poolAddress)
// Verify results
assert.NoError(t, err)
assert.Equal(t, pool, result)
}
func TestGetPoolCacheMiss(t *testing.T) {
// Create market manager
cfg := &config.UniswapConfig{
Cache: config.CacheConfig{
Expiration: 300,
MaxSize: 10000,
},
}
logger := logger.New("info", "text", "")
manager := NewMarketManager(cfg, logger)
// Get a pool that's not in the cache (should trigger fetch)
poolAddress := common.HexToAddress("0x88e6A0c2dDD26FEEb64F039a2c41296FcB3f5640")
ctx := context.Background()
result, err := manager.GetPool(ctx, poolAddress)
// Verify results (should get mock data)
assert.NoError(t, err)
assert.NotNil(t, result)
assert.Equal(t, poolAddress, result.Address)
assert.Equal(t, "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48", result.Token0.Hex())
assert.Equal(t, "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2", result.Token1.Hex())
}
func TestGetPoolsByTokens(t *testing.T) {
// Create market manager
cfg := &config.UniswapConfig{
Cache: config.CacheConfig{
Expiration: 300,
MaxSize: 10000,
},
}
logger := logger.New("info", "text", "")
manager := NewMarketManager(cfg, logger)
// Add some pools to the cache
token0 := common.HexToAddress("0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48") // USDC
token1 := common.HexToAddress("0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2") // WETH
pool1 := &PoolData{
Address: common.HexToAddress("0x88e6A0c2dDD26FEEb64F039a2c41296FcB3f5640"),
Token0: token0,
Token1: token1,
Fee: 3000,
}
manager.pools[pool1.Address.Hex()] = pool1
pool2 := &PoolData{
Address: common.HexToAddress("0xB4e16d0168e52d35CaCD2c6185b44281Ec28C9Dc"),
Token0: token0,
Token1: token1,
Fee: 500,
}
manager.pools[pool2.Address.Hex()] = pool2
// Add a pool with different tokens
token2 := common.HexToAddress("0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984") // UNI
pool3 := &PoolData{
Address: common.HexToAddress("0x1234567890123456789012345678901234567890"),
Token0: token0,
Token1: token2,
Fee: 3000,
}
manager.pools[pool3.Address.Hex()] = pool3
// Get pools for the token pair
pools := manager.GetPoolsByTokens(token0, token1)
// Verify results
assert.Len(t, pools, 2)
// Check that both pools are in the result
pool1Found := false
pool2Found := false
for _, pool := range pools {
if pool.Address == pool1.Address {
pool1Found = true
}
if pool.Address == pool2.Address {
pool2Found = true
}
}
assert.True(t, pool1Found)
assert.True(t, pool2Found)
}
func TestGetAllPools(t *testing.T) {
// Create market manager
cfg := &config.UniswapConfig{
Cache: config.CacheConfig{
Expiration: 300,
MaxSize: 10000,
},
}
logger := logger.New("info", "text", "")
manager := NewMarketManager(cfg, logger)
// Add some pools to the cache
pool1 := &PoolData{
Address: common.HexToAddress("0x88e6A0c2dDD26FEEb64F039a2c41296FcB3f5640"),
Token0: common.HexToAddress("0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48"),
Token1: common.HexToAddress("0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"),
Fee: 3000,
}
manager.pools[pool1.Address.Hex()] = pool1
pool2 := &PoolData{
Address: common.HexToAddress("0xB4e16d0168e52d35CaCD2c6185b44281Ec28C9Dc"),
Token0: common.HexToAddress("0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48"),
Token1: common.HexToAddress("0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"),
Fee: 500,
}
manager.pools[pool2.Address.Hex()] = pool2
// Get all pools
pools := manager.GetAllPools()
// Verify results
assert.Len(t, pools, 2)
}
func TestUpdatePoolExisting(t *testing.T) {
// Create market manager
cfg := &config.UniswapConfig{
Cache: config.CacheConfig{
Expiration: 300,
MaxSize: 10000,
},
}
logger := logger.New("info", "text", "")
manager := NewMarketManager(cfg, logger)
// Add a pool to the cache
poolAddress := common.HexToAddress("0x88e6A0c2dDD26FEEb64F039a2c41296FcB3f5640")
originalLiquidity := uint256.NewInt(1000000000000000000)
originalSqrtPrice := uint256.NewInt(2505414483750470000)
originalTick := 200000
pool := &PoolData{
Address: poolAddress,
Liquidity: originalLiquidity,
SqrtPriceX96: originalSqrtPrice,
Tick: originalTick,
LastUpdated: time.Now().Add(-time.Hour), // Set to past time
}
manager.pools[poolAddress.Hex()] = pool
// Update the pool
newLiquidity := uint256.NewInt(2000000000000000000)
newSqrtPrice := uint256.NewInt(3000000000000000000)
newTick := 250000
manager.UpdatePool(poolAddress, newLiquidity, newSqrtPrice, newTick)
// Verify the pool was updated
updatedPool := manager.pools[poolAddress.Hex()]
assert.Equal(t, newLiquidity, updatedPool.Liquidity)
assert.Equal(t, newSqrtPrice, updatedPool.SqrtPriceX96)
assert.Equal(t, newTick, updatedPool.Tick)
// Check that the last updated time is more recent (allowing for small time differences)
assert.True(t, updatedPool.LastUpdated.Unix() >= pool.LastUpdated.Unix())
}
func TestUpdatePoolNew(t *testing.T) {
// Create market manager
cfg := &config.UniswapConfig{
Cache: config.CacheConfig{
Expiration: 300,
MaxSize: 10000,
},
}
logger := logger.New("info", "text", "")
manager := NewMarketManager(cfg, logger)
// Update a pool that doesn't exist yet
poolAddress := common.HexToAddress("0x88e6A0c2dDD26FEEb64F039a2c41296FcB3f5640")
liquidity := uint256.NewInt(1000000000000000000)
sqrtPrice := uint256.NewInt(2505414483750470000)
tick := 200000
manager.UpdatePool(poolAddress, liquidity, sqrtPrice, tick)
// Verify the pool was created
createdPool := manager.pools[poolAddress.Hex()]
assert.NotNil(t, createdPool)
assert.Equal(t, poolAddress, createdPool.Address)
assert.Equal(t, liquidity, createdPool.Liquidity)
assert.Equal(t, sqrtPrice, createdPool.SqrtPriceX96)
assert.Equal(t, tick, createdPool.Tick)
}
func TestGetCacheStats(t *testing.T) {
// Create market manager
cfg := &config.UniswapConfig{
Cache: config.CacheConfig{
Expiration: 300,
MaxSize: 10000,
},
}
logger := logger.New("info", "text", "")
manager := NewMarketManager(cfg, logger)
// Add some pools to the cache
pool1 := &PoolData{
Address: common.HexToAddress("0x88e6A0c2dDD26FEEb64F039a2c41296FcB3f5640"),
}
manager.pools[pool1.Address.Hex()] = pool1
pool2 := &PoolData{
Address: common.HexToAddress("0xB4e16d0168e52d35CaCD2c6185b44281Ec28C9Dc"),
}
manager.pools[pool2.Address.Hex()] = pool2
// Get cache stats
currentSize, maxSize := manager.GetCacheStats()
// Verify results
assert.Equal(t, 2, currentSize)
assert.Equal(t, 10000, maxSize)
}

View File

@@ -7,9 +7,12 @@ import (
"sync"
"time"
"github.com/your-username/mev-beta/internal/config"
"github.com/your-username/mev-beta/internal/logger"
"github.com/your-username/mev-beta/pkg/scanner"
"github.com/fraktal/mev-beta/internal/config"
"github.com/fraktal/mev-beta/internal/logger"
"github.com/fraktal/mev-beta/pkg/events"
"github.com/fraktal/mev-beta/pkg/scanner"
"github.com/fraktal/mev-beta/pkg/uniswap"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
"github.com/holiman/uint256"
)
@@ -23,10 +26,11 @@ type Pipeline struct {
stages []PipelineStage
bufferSize int
concurrency int
eventParser *events.EventParser
}
// PipelineStage represents a stage in the processing pipeline
type PipelineStage func(context.Context, <-chan *types.Transaction, chan<- *scanner.SwapDetails) error
type PipelineStage func(context.Context, <-chan *scanner.EventDetails, chan<- *scanner.EventDetails) error
// NewPipeline creates a new transaction processing pipeline
func NewPipeline(
@@ -35,14 +39,27 @@ func NewPipeline(
marketMgr *MarketManager,
scanner *scanner.MarketScanner,
) *Pipeline {
return &Pipeline{
pipeline := &Pipeline{
config: cfg,
logger: logger,
marketMgr: marketMgr,
scanner: scanner,
bufferSize: cfg.ChannelBufferSize,
concurrency: cfg.MaxWorkers,
eventParser: events.NewEventParser(),
}
// Add default stages
pipeline.AddStage(TransactionDecoderStage(cfg, logger, marketMgr))
return pipeline
}
// AddDefaultStages adds the default processing stages to the pipeline
func (p *Pipeline) AddDefaultStages() {
p.AddStage(TransactionDecoderStage(p.config, p.logger, p.marketMgr))
p.AddStage(MarketAnalysisStage(p.config, p.logger, p.marketMgr))
p.AddStage(ArbitrageDetectionStage(p.config, p.logger, p.marketMgr))
}
// AddStage adds a processing stage to the pipeline
@@ -51,52 +68,91 @@ func (p *Pipeline) AddStage(stage PipelineStage) {
}
// ProcessTransactions processes a batch of transactions through the pipeline
func (p *Pipeline) ProcessTransactions(ctx context.Context, transactions []*types.Transaction) error {
func (p *Pipeline) ProcessTransactions(ctx context.Context, transactions []*types.Transaction, blockNumber uint64, timestamp uint64) error {
if len(p.stages) == 0 {
return fmt.Errorf("no pipeline stages configured")
}
// Create the initial input channel
inputChan := make(chan *types.Transaction, p.bufferSize)
// Parse events from transactions
eventChan := make(chan *events.Event, p.bufferSize)
// Send transactions to the input channel
// Parse transactions in a goroutine
go func() {
defer close(inputChan)
defer close(eventChan)
for _, tx := range transactions {
select {
case inputChan <- tx:
case <-ctx.Done():
return
// Skip transactions that don't interact with DEX contracts
if !p.eventParser.IsDEXInteraction(tx) {
continue
}
events, err := p.eventParser.ParseTransaction(tx, blockNumber, timestamp)
if err != nil {
p.logger.Error(fmt.Sprintf("Error parsing transaction %s: %v", tx.Hash().Hex(), err))
continue
}
for _, event := range events {
select {
case eventChan <- event:
case <-ctx.Done():
return
}
}
}
}()
// Process through each stage
var currentChan <-chan *scanner.SwapDetails = nil
var currentChan <-chan *scanner.EventDetails = nil
for i, stage := range p.stages {
// Create output channel for this stage
outputChan := make(chan *scanner.SwapDetails, p.bufferSize)
outputChan := make(chan *scanner.EventDetails, p.bufferSize)
// For the first stage, we need to convert transactions to swap details
// For the first stage, we process events
if i == 0 {
// Special handling for first stage
go func(stage PipelineStage, input <-chan *types.Transaction, output chan<- *scanner.SwapDetails) {
go func(stage PipelineStage, input <-chan *events.Event, output chan<- *scanner.EventDetails) {
defer close(output)
err := stage(ctx, input, output)
// Convert events.Event to scanner.EventDetails
convertedInput := make(chan *scanner.EventDetails, p.bufferSize)
go func() {
defer close(convertedInput)
for event := range input {
eventDetails := &scanner.EventDetails{
Type: event.Type,
Protocol: event.Protocol,
PoolAddress: event.PoolAddress.Hex(),
Token0: event.Token0.Hex(),
Token1: event.Token1.Hex(),
Amount0In: event.Amount0,
Amount0Out: big.NewInt(0),
Amount1In: big.NewInt(0),
Amount1Out: event.Amount1,
SqrtPriceX96: event.SqrtPriceX96,
Liquidity: event.Liquidity,
Tick: event.Tick,
Timestamp: time.Unix(int64(event.Timestamp), 0),
TransactionHash: event.TransactionHash,
}
select {
case convertedInput <- eventDetails:
case <-ctx.Done():
return
}
}
}()
err := stage(ctx, convertedInput, output)
if err != nil {
p.logger.Error(fmt.Sprintf("Pipeline stage %d error: %v", i, err))
}
}(stage, inputChan, outputChan)
}(stage, eventChan, outputChan)
} else {
// For subsequent stages
go func(stage PipelineStage, input <-chan *scanner.SwapDetails, output chan<- *scanner.SwapDetails) {
go func(stage PipelineStage, input <-chan *scanner.EventDetails, output chan<- *scanner.EventDetails) {
defer close(output)
// We need to create a dummy input channel for this stage
// This is a simplification - in practice you'd have a more complex pipeline
dummyInput := make(chan *types.Transaction, p.bufferSize)
close(dummyInput)
err := stage(ctx, dummyInput, output)
err := stage(ctx, input, output)
if err != nil {
p.logger.Error(fmt.Sprintf("Pipeline stage %d error: %v", i, err))
}
@@ -115,16 +171,16 @@ func (p *Pipeline) ProcessTransactions(ctx context.Context, transactions []*type
}
// processSwapDetails processes the final output of the pipeline
func (p *Pipeline) processSwapDetails(ctx context.Context, swapDetails <-chan *scanner.SwapDetails) {
func (p *Pipeline) processSwapDetails(ctx context.Context, eventDetails <-chan *scanner.EventDetails) {
for {
select {
case swap, ok := <-swapDetails:
case event, ok := <-eventDetails:
if !ok {
return // Channel closed
}
// Submit to the market scanner for processing
p.scanner.SubmitSwap(*swap)
p.scanner.SubmitEvent(*event)
case <-ctx.Done():
return
@@ -138,26 +194,26 @@ func TransactionDecoderStage(
logger *logger.Logger,
marketMgr *MarketManager,
) PipelineStage {
return func(ctx context.Context, input <-chan *types.Transaction, output chan<- *scanner.SwapDetails) error {
return func(ctx context.Context, input <-chan *scanner.EventDetails, output chan<- *scanner.EventDetails) error {
var wg sync.WaitGroup
// Process transactions concurrently
// Process events concurrently
for i := 0; i < cfg.MaxWorkers; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for {
select {
case tx, ok := <-input:
case event, ok := <-input:
if !ok {
return // Channel closed
}
// Process the transaction
swapDetails := decodeTransaction(tx, logger)
if swapDetails != nil {
// Process the event (in this case, it's already decoded)
// In a real implementation, you might do additional processing here
if event != nil {
select {
case output <- swapDetails:
case output <- event:
case <-ctx.Done():
return
}
@@ -170,7 +226,7 @@ func TransactionDecoderStage(
}()
}
// Wait for all workers to finish
// Wait for all workers to finish, then close the output channel
go func() {
wg.Wait()
close(output)
@@ -180,70 +236,274 @@ func TransactionDecoderStage(
}
}
// decodeTransaction decodes a transaction to extract swap details
func decodeTransaction(tx *types.Transaction, logger *logger.Logger) *scanner.SwapDetails {
// This is a simplified implementation
// In practice, you would:
// 1. Check if the transaction is calling a Uniswap-like contract
// 2. Decode the function call data
// 3. Extract token addresses, amounts, etc.
// For now, we'll return mock data for demonstration
if tx.To() != nil {
swap := &scanner.SwapDetails{
PoolAddress: tx.To().Hex(),
Token0: "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48", // USDC
Token1: "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2", // WETH
Amount0In: big.NewInt(1000000000), // 1000 USDC
Amount0Out: big.NewInt(0),
Amount1In: big.NewInt(0),
Amount1Out: big.NewInt(500000000000000000), // 0.5 WETH
SqrtPriceX96: uint256.NewInt(2505414483750470000),
Liquidity: uint256.NewInt(1000000000000000000),
Tick: 200000,
Timestamp: time.Now(),
TransactionHash: tx.Hash(),
}
logger.Debug(fmt.Sprintf("Decoded swap transaction: %s", tx.Hash().Hex()))
return swap
}
return nil
}
// MarketAnalysisStage performs market analysis on swap details
// MarketAnalysisStage performs market analysis on event details
func MarketAnalysisStage(
cfg *config.BotConfig,
logger *logger.Logger,
marketMgr *MarketManager,
) PipelineStage {
return func(ctx context.Context, input <-chan *types.Transaction, output chan<- *scanner.SwapDetails) error {
// This is a placeholder for market analysis
// In practice, you would:
// 1. Get pool data from market manager
// 2. Analyze price impact
// 3. Check for arbitrage opportunities
return func(ctx context.Context, input <-chan *scanner.EventDetails, output chan<- *scanner.EventDetails) error {
var wg sync.WaitGroup
// Process events concurrently
for i := 0; i < cfg.MaxWorkers; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for {
select {
case event, ok := <-input:
if !ok {
return // Channel closed
}
// Only process swap events
if event.Type != events.Swap {
// Forward non-swap events without processing
select {
case output <- event:
case <-ctx.Done():
return
}
continue
}
// Get pool data from market manager
poolAddress := common.HexToAddress(event.PoolAddress)
poolData, err := marketMgr.GetPool(ctx, poolAddress)
if err != nil {
logger.Error(fmt.Sprintf("Error getting pool data for %s: %v", event.PoolAddress, err))
// Forward the event even if we can't get pool data
select {
case output <- event:
case <-ctx.Done():
return
}
continue
}
// Calculate price impact using Uniswap V3 math
priceImpact, err := calculatePriceImpact(event, poolData)
if err != nil {
logger.Error(fmt.Sprintf("Error calculating price impact for pool %s: %v", event.PoolAddress, err))
// Forward the event even if we can't calculate price impact
select {
case output <- event:
case <-ctx.Done():
return
}
continue
}
// Add price impact to the event
// Note: In a real implementation, you might want to create a new struct
// that extends EventDetails with additional fields
logger.Debug(fmt.Sprintf("Price impact for pool %s: %f", event.PoolAddress, priceImpact))
// Forward the processed event
select {
case output <- event:
case <-ctx.Done():
return
}
case <-ctx.Done():
return
}
}
}()
}
// Wait for all workers to finish, then close the output channel
go func() {
wg.Wait()
close(output)
}()
close(output)
return nil
}
}
// calculatePriceImpact calculates the price impact of a swap using Uniswap V3 math
func calculatePriceImpact(event *scanner.EventDetails, poolData *PoolData) (float64, error) {
// Convert event amounts to uint256 for calculations
amount0In := uint256.NewInt(0)
amount0In.SetFromBig(event.Amount0In)
amount1In := uint256.NewInt(0)
amount1In.SetFromBig(event.Amount1In)
// Determine which token is being swapped in
var amountIn *uint256.Int
if amount0In.Cmp(uint256.NewInt(0)) > 0 {
amountIn = amount0In
} else {
amountIn = amount1In
}
// If no amount is being swapped in, return 0 impact
if amountIn.Cmp(uint256.NewInt(0)) == 0 {
return 0.0, nil
}
// Calculate price impact as a percentage of liquidity
// priceImpact = amountIn / liquidity
liquidity := poolData.Liquidity
// If liquidity is 0, we can't calculate impact
if liquidity.Cmp(uint256.NewInt(0)) == 0 {
return 0.0, nil
}
// Calculate impact
impact := new(uint256.Int).Div(amountIn, liquidity)
// Convert to float64 for percentage
impactFloat := new(big.Float).SetInt(impact.ToBig())
percentage, _ := impactFloat.Float64()
// Convert to percentage (multiply by 100)
return percentage * 100.0, nil
}
// ArbitrageDetectionStage detects arbitrage opportunities
func ArbitrageDetectionStage(
cfg *config.BotConfig,
logger *logger.Logger,
marketMgr *MarketManager,
) PipelineStage {
return func(ctx context.Context, input <-chan *types.Transaction, output chan<- *scanner.SwapDetails) error {
// This is a placeholder for arbitrage detection
// In practice, you would:
// 1. Compare prices across multiple pools
// 2. Calculate potential profit
// 3. Filter based on profitability
return func(ctx context.Context, input <-chan *scanner.EventDetails, output chan<- *scanner.EventDetails) error {
var wg sync.WaitGroup
// Process events concurrently
for i := 0; i < cfg.MaxWorkers; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for {
select {
case event, ok := <-input:
if !ok {
return // Channel closed
}
// Only process swap events
if event.Type != events.Swap {
// Forward non-swap events without processing
select {
case output <- event:
case <-ctx.Done():
return
}
continue
}
// Look for arbitrage opportunities
opportunities, err := findArbitrageOpportunities(ctx, event, marketMgr, logger)
if err != nil {
logger.Error(fmt.Sprintf("Error finding arbitrage opportunities for pool %s: %v", event.PoolAddress, err))
// Forward the event even if we encounter an error
select {
case output <- event:
case <-ctx.Done():
return
}
continue
}
// Log any found opportunities
if len(opportunities) > 0 {
logger.Info(fmt.Sprintf("Found %d arbitrage opportunities for pool %s", len(opportunities), event.PoolAddress))
for _, opp := range opportunities {
logger.Info(fmt.Sprintf("Arbitrage opportunity: %+v", opp))
}
}
// Forward the processed event
select {
case output <- event:
case <-ctx.Done():
return
}
case <-ctx.Done():
return
}
}
}()
}
// Wait for all workers to finish, then close the output channel
go func() {
wg.Wait()
close(output)
}()
close(output)
return nil
}
}
// findArbitrageOpportunities looks for arbitrage opportunities based on a swap event
func findArbitrageOpportunities(ctx context.Context, event *scanner.EventDetails, marketMgr *MarketManager, logger *logger.Logger) ([]scanner.ArbitrageOpportunity, error) {
opportunities := make([]scanner.ArbitrageOpportunity, 0)
// Get all pools for the same token pair
token0 := common.HexToAddress(event.Token0)
token1 := common.HexToAddress(event.Token1)
pools := marketMgr.GetPoolsByTokens(token0, token1)
// If we don't have multiple pools, we can't do arbitrage
if len(pools) < 2 {
return opportunities, nil
}
// Get the pool that triggered the event
eventPoolAddress := common.HexToAddress(event.PoolAddress)
// Find the pool that triggered the event
var eventPool *PoolData
for _, pool := range pools {
if pool.Address == eventPoolAddress {
eventPool = pool
break
}
}
// If we can't find the event pool, return
if eventPool == nil {
return opportunities, nil
}
// Convert sqrtPriceX96 to price for the event pool
eventPoolPrice := uniswap.SqrtPriceX96ToPrice(eventPool.SqrtPriceX96.ToBig())
// Compare with other pools
for _, pool := range pools {
// Skip the event pool
if pool.Address == eventPoolAddress {
continue
}
// Convert sqrtPriceX96 to price for comparison pool
compPoolPrice := uniswap.SqrtPriceX96ToPrice(pool.SqrtPriceX96.ToBig())
// Calculate potential profit (simplified)
// In practice, this would involve more complex calculations
profit := new(big.Float).Sub(compPoolPrice, eventPoolPrice)
// If there's a price difference, we might have an opportunity
if profit.Cmp(big.NewFloat(0)) > 0 {
opp := scanner.ArbitrageOpportunity{
Path: []string{event.Token0, event.Token1},
Pools: []string{event.PoolAddress, pool.Address.Hex()},
Profit: big.NewInt(1000000000000000000), // 1 ETH (mock value)
GasEstimate: big.NewInt(200000000000000000), // 0.2 ETH (mock value)
ROI: 5.0, // 500% (mock value)
Protocol: event.Protocol,
}
opportunities = append(opportunities, opp)
}
}
return opportunities, nil
}

204
pkg/market/pipeline_test.go Normal file
View File

@@ -0,0 +1,204 @@
package market
import (
"context"
"math/big"
"testing"
"github.com/fraktal/mev-beta/internal/config"
"github.com/fraktal/mev-beta/internal/logger"
scannerpkg "github.com/fraktal/mev-beta/pkg/scanner"
"github.com/ethereum/go-ethereum/common"
"github.com/holiman/uint256"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
)
// MockMarketManager is a mock implementation of MarketManager for testing
type MockMarketManager struct {
mock.Mock
}
func (m *MockMarketManager) GetPool(ctx context.Context, poolAddress common.Address) (*PoolData, error) {
args := m.Called(ctx, poolAddress)
return args.Get(0).(*PoolData), args.Error(1)
}
func (m *MockMarketManager) GetPoolsByTokens(token0, token1 common.Address) []*PoolData {
args := m.Called(token0, token1)
return args.Get(0).([]*PoolData)
}
// MockLogger is a mock implementation of logger.Logger for testing
type MockLogger struct {
mock.Mock
}
func (m *MockLogger) Debug(msg string) {
m.Called(msg)
}
func (m *MockLogger) Info(msg string) {
m.Called(msg)
}
func (m *MockLogger) Warn(msg string) {
m.Called(msg)
}
func (m *MockLogger) Error(msg string, err ...interface{}) {
m.Called(msg, err)
}
func TestNewPipeline(t *testing.T) {
// Create mock config
cfg := &config.BotConfig{
MaxWorkers: 5,
ChannelBufferSize: 10,
}
// Create mock logger
logger := logger.New("info", "text", "")
// Create mock market manager
marketMgr := &MarketManager{}
// Create mock scanner
scannerObj := &scannerpkg.MarketScanner{}
// Create pipeline
pipeline := NewPipeline(cfg, logger, marketMgr, scannerObj)
// Verify pipeline was created correctly
assert.NotNil(t, pipeline)
assert.Equal(t, cfg, pipeline.config)
assert.Equal(t, logger, pipeline.logger)
assert.Equal(t, marketMgr, pipeline.marketMgr)
assert.Equal(t, scannerObj, pipeline.scanner)
assert.Equal(t, cfg.ChannelBufferSize, pipeline.bufferSize)
assert.Equal(t, cfg.MaxWorkers, pipeline.concurrency)
assert.NotNil(t, pipeline.eventParser)
assert.Len(t, pipeline.stages, 1) // Should have TransactionDecoderStage by default
}
func TestAddStage(t *testing.T) {
// Create pipeline
cfg := &config.BotConfig{
MaxWorkers: 5,
ChannelBufferSize: 10,
}
logger := logger.New("info", "text", "")
marketMgr := &MarketManager{}
scannerObj := &scannerpkg.MarketScanner{}
pipeline := NewPipeline(cfg, logger, marketMgr, scannerObj)
// Add a new stage
newStage := func(ctx context.Context, input <-chan *scannerpkg.EventDetails, output chan<- *scannerpkg.EventDetails) error {
return nil
}
pipeline.AddStage(newStage)
// Verify stage was added
assert.Len(t, pipeline.stages, 2) // TransactionDecoderStage + newStage
}
func TestAddDefaultStages(t *testing.T) {
// Create pipeline
cfg := &config.BotConfig{
MaxWorkers: 5,
ChannelBufferSize: 10,
}
logger := logger.New("info", "text", "")
marketMgr := &MarketManager{}
scannerObj := &scannerpkg.MarketScanner{}
pipeline := NewPipeline(cfg, logger, marketMgr, scannerObj)
// Add default stages
pipeline.AddDefaultStages()
// Verify stages were added (should be 4 total: TransactionDecoder, MarketAnalysis, ArbitrageDetection, plus the initial TransactionDecoder)
assert.Len(t, pipeline.stages, 4)
}
func TestTransactionDecoderStage(t *testing.T) {
// Create mock config
cfg := &config.BotConfig{
MaxWorkers: 1, // Use 1 worker for simplicity in test
ChannelBufferSize: 10,
}
// Create mock logger
log := logger.New("info", "text", "")
// Create mock market manager
marketMgr := &MarketManager{}
// Create the stage
stage := TransactionDecoderStage(cfg, log, marketMgr)
// Verify the stage function was created
assert.NotNil(t, stage)
}
func TestCalculatePriceImpact(t *testing.T) {
// Create test event
event := &scannerpkg.EventDetails{
Amount0In: big.NewInt(1000000000), // 1000 tokens
Amount1In: big.NewInt(0),
}
// Create test pool data
liquidity := uint256.NewInt(1000000000000000000) // 1 ETH in liquidity
poolData := &PoolData{
Liquidity: liquidity,
}
// Calculate price impact
impact, err := calculatePriceImpact(event, poolData)
// Verify results
assert.NoError(t, err)
assert.InDelta(t, 0.001, impact, 0.001) // 0.001% impact (1000/1000000000000000000 * 100)
}
func TestCalculatePriceImpactNoAmount(t *testing.T) {
// Create test event with no amount
event := &scannerpkg.EventDetails{
Amount0In: big.NewInt(0),
Amount1In: big.NewInt(0),
}
// Create test pool data
liquidity := uint256.NewInt(10000000000000000000) // 10 ETH in liquidity
poolData := &PoolData{
Liquidity: liquidity,
}
// Calculate price impact
impact, err := calculatePriceImpact(event, poolData)
// Verify results
assert.NoError(t, err)
assert.Equal(t, 0.0, impact)
}
func TestCalculatePriceImpactNoLiquidity(t *testing.T) {
// Create test event
event := &scannerpkg.EventDetails{
Amount0In: big.NewInt(1000000000),
Amount1In: big.NewInt(0),
}
// Create test pool data with zero liquidity
liquidity := uint256.NewInt(0)
poolData := &PoolData{
Liquidity: liquidity,
}
// Calculate price impact
impact, err := calculatePriceImpact(event, poolData)
// Verify results
assert.NoError(t, err)
assert.Equal(t, 0.0, impact)
}

View File

@@ -7,11 +7,11 @@ import (
"sync"
"time"
"github.com/your-username/mev-beta/internal/config"
"github.com/your-username/mev-beta/internal/logger"
"github.com/your-username/mev-beta/internal/ratelimit"
"github.com/your-username/mev-beta/pkg/market"
"github.com/your-username/mev-beta/pkg/scanner"
"github.com/fraktal/mev-beta/internal/config"
"github.com/fraktal/mev-beta/internal/logger"
"github.com/fraktal/mev-beta/internal/ratelimit"
"github.com/fraktal/mev-beta/pkg/market"
"github.com/fraktal/mev-beta/pkg/scanner"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/ethclient"
@@ -58,9 +58,9 @@ func NewArbitrumMonitor(
// Create pipeline
pipeline := market.NewPipeline(botCfg, logger, marketMgr, scanner)
// Add stages to pipeline
pipeline.AddStage(market.TransactionDecoderStage(botCfg, logger, marketMgr))
// Add default stages
pipeline.AddDefaultStages()
// Create fan manager
fanManager := market.NewFanManager(
@@ -174,11 +174,11 @@ func (m *ArbitrumMonitor) processBlock(ctx context.Context, blockNumber uint64)
return fmt.Errorf("failed to get block %d: %v", blockNumber, err)
}
// Process transactions using pipeline
// Process transactions through the pipeline
transactions := block.Transactions()
// Process transactions through the pipeline
if err := m.pipeline.ProcessTransactions(ctx, transactions); err != nil {
// Process transactions through the pipeline with block number and timestamp
if err := m.pipeline.ProcessTransactions(ctx, transactions, blockNumber, block.Time()); err != nil {
m.logger.Error(fmt.Sprintf("Pipeline processing error: %v", err))
}

View File

@@ -6,29 +6,35 @@ import (
"sync"
"time"
"github.com/your-username/mev-beta/internal/config"
"github.com/your-username/mev-beta/internal/logger"
"github.com/your-username/mev-beta/pkg/uniswap"
"github.com/fraktal/mev-beta/internal/config"
"github.com/fraktal/mev-beta/internal/logger"
"github.com/fraktal/mev-beta/pkg/events"
"github.com/fraktal/mev-beta/pkg/uniswap"
"github.com/ethereum/go-ethereum/common"
"github.com/holiman/uint256"
"golang.org/x/sync/singleflight"
)
// MarketScanner scans markets for price movement opportunities with concurrency
type MarketScanner struct {
config *config.BotConfig
logger *logger.Logger
workerPool chan chan SwapDetails
workers []*SwapWorker
workerPool chan chan EventDetails
workers []*EventWorker
wg sync.WaitGroup
cacheGroup singleflight.Group
cache map[string]*CachedData
cacheMutex sync.RWMutex
cacheTTL time.Duration
}
// SwapWorker represents a worker that processes swap details
type SwapWorker struct {
ID int
WorkerPool chan chan SwapDetails
JobChannel chan SwapDetails
QuitChan chan bool
scanner *MarketScanner
// EventWorker represents a worker that processes event details
type EventWorker struct {
ID int
WorkerPool chan chan EventDetails
JobChannel chan EventDetails
QuitChan chan bool
scanner *MarketScanner
}
// NewMarketScanner creates a new market scanner with concurrency support
@@ -36,33 +42,38 @@ func NewMarketScanner(cfg *config.BotConfig, logger *logger.Logger) *MarketScann
scanner := &MarketScanner{
config: cfg,
logger: logger,
workerPool: make(chan chan SwapDetails, cfg.MaxWorkers),
workers: make([]*SwapWorker, 0, cfg.MaxWorkers),
workerPool: make(chan chan EventDetails, cfg.MaxWorkers),
workers: make([]*EventWorker, 0, cfg.MaxWorkers),
cache: make(map[string]*CachedData),
cacheTTL: time.Duration(cfg.RPCTimeout) * time.Second,
}
// Create workers
for i := 0; i < cfg.MaxWorkers; i++ {
worker := NewSwapWorker(i, scanner.workerPool, scanner)
worker := NewEventWorker(i, scanner.workerPool, scanner)
scanner.workers = append(scanner.workers, worker)
worker.Start()
}
// Start cache cleanup routine
go scanner.cleanupCache()
return scanner
}
// NewSwapWorker creates a new swap worker
func NewSwapWorker(id int, workerPool chan chan SwapDetails, scanner *MarketScanner) *SwapWorker {
return &SwapWorker{
// NewEventWorker creates a new event worker
func NewEventWorker(id int, workerPool chan chan EventDetails, scanner *MarketScanner) *EventWorker {
return &EventWorker{
ID: id,
WorkerPool: workerPool,
JobChannel: make(chan SwapDetails),
JobChannel: make(chan EventDetails),
QuitChan: make(chan bool),
scanner: scanner,
}
}
// Start begins the worker
func (w *SwapWorker) Start() {
func (w *EventWorker) Start() {
go func() {
for {
// Register the worker in the worker pool
@@ -81,68 +92,124 @@ func (w *SwapWorker) Start() {
}
// Stop terminates the worker
func (w *SwapWorker) Stop() {
func (w *EventWorker) Stop() {
go func() {
w.QuitChan <- true
}()
}
// Process handles a swap detail
func (w *SwapWorker) Process(swap SwapDetails) {
// Analyze the swap in a separate goroutine to maintain throughput
// Process handles an event detail
func (w *EventWorker) Process(event EventDetails) {
// Analyze the event in a separate goroutine to maintain throughput
go func() {
defer w.scanner.wg.Done()
// Log the processing
w.scanner.logger.Debug(fmt.Sprintf("Worker %d processing swap in pool %s", w.ID, swap.PoolAddress))
// Analyze the swap
priceMovement, err := w.scanner.AnalyzeSwap(swap)
if err != nil {
w.scanner.logger.Error(fmt.Sprintf("Error analyzing swap: %v", err))
return
}
// Check if the movement is significant
if w.scanner.IsSignificantMovement(priceMovement, w.scanner.config.MinProfitThreshold) {
w.scanner.logger.Info(fmt.Sprintf("Significant price movement detected: %+v", priceMovement))
// TODO: Send to arbitrage engine
w.scanner.logger.Debug(fmt.Sprintf("Worker %d processing %s event in pool %s from protocol %s",
w.ID, event.Type.String(), event.PoolAddress, event.Protocol))
// Analyze based on event type
switch event.Type {
case events.Swap:
w.scanner.analyzeSwapEvent(event)
case events.AddLiquidity:
w.scanner.analyzeLiquidityEvent(event, true)
case events.RemoveLiquidity:
w.scanner.analyzeLiquidityEvent(event, false)
case events.NewPool:
w.scanner.analyzeNewPoolEvent(event)
default:
w.scanner.logger.Debug(fmt.Sprintf("Worker %d received unknown event type: %d", w.ID, event.Type))
}
}()
}
// SubmitSwap submits a swap for processing by the worker pool
func (s *MarketScanner) SubmitSwap(swap SwapDetails) {
// SubmitEvent submits an event for processing by the worker pool
func (s *MarketScanner) SubmitEvent(event EventDetails) {
s.wg.Add(1)
// Get an available worker job channel
jobChannel := <-s.workerPool
// Send the job to the worker
jobChannel <- swap
jobChannel <- event
}
// AnalyzeSwap analyzes a swap to determine if it's large enough to move the price
func (s *MarketScanner) AnalyzeSwap(swap SwapDetails) (*PriceMovement, error) {
// Calculate the price before the swap
priceBefore := uniswap.SqrtPriceX96ToPrice(swap.SqrtPriceX96.ToBig())
// For a more accurate calculation, we would need to:
// 1. Calculate the new sqrtPriceX96 after the swap
// 2. Convert that to a price
// 3. Calculate the price impact
priceMovement := &PriceMovement{
Token0: swap.Token0,
Token1: swap.Token1,
Pool: swap.PoolAddress,
AmountIn: new(big.Int).Add(swap.Amount0In, swap.Amount1In),
AmountOut: new(big.Int).Add(swap.Amount0Out, swap.Amount1Out),
PriceBefore: priceBefore,
TickBefore: swap.Tick,
// TickAfter would be calculated based on the swap size and liquidity
// analyzeSwapEvent analyzes a swap event for arbitrage opportunities
func (s *MarketScanner) analyzeSwapEvent(event EventDetails) {
s.logger.Debug(fmt.Sprintf("Analyzing swap event in pool %s", event.PoolAddress))
// Get pool data with caching
poolData, err := s.getPoolData(event.PoolAddress)
if err != nil {
s.logger.Error(fmt.Sprintf("Error getting pool data for %s: %v", event.PoolAddress, err))
return
}
// Calculate price impact
priceMovement, err := s.calculatePriceMovement(event, poolData)
if err != nil {
s.logger.Error(fmt.Sprintf("Error calculating price movement for pool %s: %v", event.PoolAddress, err))
return
}
// Check if the movement is significant
if s.isSignificantMovement(priceMovement, s.config.MinProfitThreshold) {
s.logger.Info(fmt.Sprintf("Significant price movement detected in pool %s: %+v", event.PoolAddress, priceMovement))
// Look for arbitrage opportunities
opportunities := s.findArbitrageOpportunities(event, priceMovement)
if len(opportunities) > 0 {
s.logger.Info(fmt.Sprintf("Found %d arbitrage opportunities for pool %s", len(opportunities), event.PoolAddress))
for _, opp := range opportunities {
s.logger.Info(fmt.Sprintf("Arbitrage opportunity: %+v", opp))
}
}
} else {
s.logger.Debug(fmt.Sprintf("Price movement in pool %s is not significant: %f", event.PoolAddress, priceMovement.PriceImpact))
}
}
// analyzeLiquidityEvent analyzes liquidity events (add/remove)
func (s *MarketScanner) analyzeLiquidityEvent(event EventDetails, isAdd bool) {
action := "adding"
if !isAdd {
action = "removing"
}
s.logger.Debug(fmt.Sprintf("Analyzing liquidity event (%s) in pool %s", action, event.PoolAddress))
// Update cached pool data
s.updatePoolData(event)
s.logger.Info(fmt.Sprintf("Liquidity %s event processed for pool %s", action, event.PoolAddress))
}
// analyzeNewPoolEvent analyzes new pool creation events
func (s *MarketScanner) analyzeNewPoolEvent(event EventDetails) {
s.logger.Info(fmt.Sprintf("New pool created: %s (protocol: %s)", event.PoolAddress, event.Protocol))
// Add to known pools
// In a real implementation, you would want to fetch and cache the pool data
s.logger.Debug(fmt.Sprintf("Added new pool %s to monitoring", event.PoolAddress))
}
// calculatePriceMovement calculates the price movement from a swap event
func (s *MarketScanner) calculatePriceMovement(event EventDetails, poolData *CachedData) (*PriceMovement, error) {
// Calculate the price before the swap
priceBefore := uniswap.SqrtPriceX96ToPrice(poolData.SqrtPriceX96.ToBig())
priceMovement := &PriceMovement{
Token0: event.Token0,
Token1: event.Token1,
Pool: event.PoolAddress,
Protocol: event.Protocol,
AmountIn: new(big.Int).Add(event.Amount0In, event.Amount1In),
AmountOut: new(big.Int).Add(event.Amount0Out, event.Amount1Out),
PriceBefore: priceBefore,
TickBefore: event.Tick,
Timestamp: event.Timestamp,
}
// Calculate price impact (simplified)
// In practice, this would involve more complex calculations using Uniswap V3 math
if priceMovement.AmountIn.Cmp(big.NewInt(0)) > 0 {
@@ -153,45 +220,39 @@ func (s *MarketScanner) AnalyzeSwap(swap SwapDetails) (*PriceMovement, error) {
priceImpact, _ := impact.Float64()
priceMovement.PriceImpact = priceImpact
}
return priceMovement, nil
}
// IsSignificantMovement determines if a price movement is significant enough to exploit
func (s *MarketScanner) IsSignificantMovement(movement *PriceMovement, threshold float64) bool {
// isSignificantMovement determines if a price movement is significant enough to exploit
func (s *MarketScanner) isSignificantMovement(movement *PriceMovement, threshold float64) bool {
// Check if the price impact is above our threshold
return movement.PriceImpact > threshold
}
// CalculateTickAfterSwap calculates the tick after a swap occurs
func (s *MarketScanner) CalculateTickAfterSwap(
currentTick int,
liquidity *uint256.Int,
amountIn *big.Int,
zeroForOne bool, // true if swapping token0 for token1
) int {
// This is a simplified implementation
// In practice, you would need to use the Uniswap V3 math formulas
// The actual calculation would involve:
// 1. Converting amounts to sqrt prices
// 2. Using the liquidity to determine the price movement
// 3. Calculating the new tick based on the price movement
// For now, we'll return a placeholder
return currentTick
}
// findArbitrageOpportunities looks for arbitrage opportunities based on price movements
func (s *MarketScanner) findArbitrageOpportunities(event EventDetails, movement *PriceMovement) []ArbitrageOpportunity {
s.logger.Debug(fmt.Sprintf("Searching for arbitrage opportunities for pool %s", event.PoolAddress))
// FindArbitrageOpportunities looks for arbitrage opportunities based on price movements
func (s *MarketScanner) FindArbitrageOpportunities(movements []*PriceMovement) []ArbitrageOpportunity {
opportunities := make([]ArbitrageOpportunity, 0)
// This would contain logic to:
// 1. Compare prices across different pools
// 1. Compare prices across different pools for the same token pair
// 2. Calculate potential profit after gas costs
// 3. Identify triangular arbitrage opportunities
// 4. Check if the opportunity is profitable
// For now, we'll return a mock opportunity for demonstration
opp := ArbitrageOpportunity{
Path: []string{event.Token0, event.Token1},
Pools: []string{event.PoolAddress, "0xMockPoolAddress"},
Profit: big.NewInt(1000000000000000000), // 1 ETH
GasEstimate: big.NewInt(200000000000000000), // 0.2 ETH
ROI: 5.0, // 500%
Protocol: event.Protocol,
}
opportunities = append(opportunities, opp)
return opportunities
}
@@ -201,7 +262,7 @@ func (s *MarketScanner) Stop() {
for _, worker := range s.workers {
worker.Stop()
}
// Wait for all jobs to complete
s.wg.Wait()
}
@@ -213,6 +274,7 @@ type ArbitrageOpportunity struct {
Profit *big.Int // Estimated profit in wei
GasEstimate *big.Int // Estimated gas cost
ROI float64 // Return on investment percentage
Protocol string // DEX protocol
}
// PriceMovement represents a potential price movement
@@ -220,6 +282,7 @@ type PriceMovement struct {
Token0 string // Token address
Token1 string // Token address
Pool string // Pool address
Protocol string // DEX protocol
AmountIn *big.Int // Amount of token being swapped in
AmountOut *big.Int // Amount of token being swapped out
PriceBefore *big.Float // Price before the swap
@@ -227,10 +290,13 @@ type PriceMovement struct {
PriceImpact float64 // Calculated price impact
TickBefore int // Tick before the swap
TickAfter int // Tick after the swap (to be calculated)
Timestamp time.Time // Event timestamp
}
// SwapDetails contains details about a detected swap
type SwapDetails struct {
// EventDetails contains details about a detected event
type EventDetails struct {
Type events.EventType
Protocol string
PoolAddress string
Token0 string
Token1 string
@@ -243,4 +309,117 @@ type SwapDetails struct {
Tick int
Timestamp time.Time
TransactionHash common.Hash
}
// CachedData represents cached pool data
type CachedData struct {
Address common.Address
Token0 common.Address
Token1 common.Address
Fee int64
Liquidity *uint256.Int
SqrtPriceX96 *uint256.Int
Tick int
TickSpacing int
LastUpdated time.Time
}
// getPoolData retrieves pool data with caching
func (s *MarketScanner) getPoolData(poolAddress string) (*CachedData, error) {
// Check cache first
cacheKey := fmt.Sprintf("pool_%s", poolAddress)
s.cacheMutex.RLock()
if data, exists := s.cache[cacheKey]; exists && time.Since(data.LastUpdated) < s.cacheTTL {
s.cacheMutex.RUnlock()
s.logger.Debug(fmt.Sprintf("Cache hit for pool %s", poolAddress))
return data, nil
}
s.cacheMutex.RUnlock()
// Use singleflight to prevent duplicate requests
result, err, _ := s.cacheGroup.Do(cacheKey, func() (interface{}, error) {
return s.fetchPoolData(poolAddress)
})
if err != nil {
return nil, err
}
poolData := result.(*CachedData)
// Update cache
s.cacheMutex.Lock()
s.cache[cacheKey] = poolData
s.cacheMutex.Unlock()
s.logger.Debug(fmt.Sprintf("Fetched and cached pool data for %s", poolAddress))
return poolData, nil
}
// fetchPoolData fetches pool data from the blockchain
func (s *MarketScanner) fetchPoolData(poolAddress string) (*CachedData, error) {
s.logger.Debug(fmt.Sprintf("Fetching pool data for %s", poolAddress))
// This is a simplified implementation
// In practice, you would interact with the Ethereum blockchain to get real data
address := common.HexToAddress(poolAddress)
// For now, we'll return mock data
pool := &CachedData{
Address: address,
Token0: common.HexToAddress("0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48"), // USDC
Token1: common.HexToAddress("0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"), // WETH
Fee: 3000, // 0.3%
Liquidity: uint256.NewInt(1000000000000000000), // 1 ETH equivalent
SqrtPriceX96: uint256.NewInt(2505414483750470000), // Mock sqrt price
Tick: 200000, // Mock tick
TickSpacing: 60, // Tick spacing for 0.3% fee
LastUpdated: time.Now(),
}
s.logger.Debug(fmt.Sprintf("Fetched pool data for %s", poolAddress))
return pool, nil
}
// updatePoolData updates cached pool data
func (s *MarketScanner) updatePoolData(event EventDetails) {
cacheKey := fmt.Sprintf("pool_%s", event.PoolAddress)
s.cacheMutex.Lock()
defer s.cacheMutex.Unlock()
// Update existing cache entry or create new one
data := &CachedData{
Address: common.HexToAddress(event.PoolAddress),
Token0: common.HexToAddress(event.Token0),
Token1: common.HexToAddress(event.Token1),
Liquidity: event.Liquidity,
SqrtPriceX96: event.SqrtPriceX96,
Tick: event.Tick,
LastUpdated: time.Now(),
}
s.cache[cacheKey] = data
s.logger.Debug(fmt.Sprintf("Updated cache for pool %s", event.PoolAddress))
}
// cleanupCache removes expired cache entries
func (s *MarketScanner) cleanupCache() {
ticker := time.NewTicker(10 * time.Minute)
defer ticker.Stop()
for {
select {
case <-ticker.C:
s.cacheMutex.Lock()
for key, data := range s.cache {
if time.Since(data.LastUpdated) > s.cacheTTL {
delete(s.cache, key)
s.logger.Debug(fmt.Sprintf("Removed expired cache entry: %s", key))
}
}
s.cacheMutex.Unlock()
}
}
}

View File

@@ -0,0 +1,213 @@
package scanner
import (
"math/big"
"testing"
"time"
"github.com/fraktal/mev-beta/internal/config"
"github.com/fraktal/mev-beta/internal/logger"
"github.com/fraktal/mev-beta/pkg/events"
"github.com/ethereum/go-ethereum/common"
"github.com/holiman/uint256"
"github.com/stretchr/testify/assert"
)
func TestNewMarketScanner(t *testing.T) {
// Create test config
cfg := &config.BotConfig{
MaxWorkers: 5,
RPCTimeout: 30,
}
// Create test logger
logger := logger.New("info", "text", "")
// Create market scanner
scanner := NewMarketScanner(cfg, logger)
// Verify scanner was created correctly
assert.NotNil(t, scanner)
assert.Equal(t, cfg, scanner.config)
assert.Equal(t, logger, scanner.logger)
assert.NotNil(t, scanner.workerPool)
assert.NotNil(t, scanner.workers)
assert.NotNil(t, scanner.cache)
assert.NotNil(t, scanner.cacheTTL)
assert.Equal(t, time.Duration(cfg.RPCTimeout)*time.Second, scanner.cacheTTL)
assert.Equal(t, cfg.MaxWorkers, len(scanner.workers))
}
func TestEventTypeString(t *testing.T) {
// Test all event types
assert.Equal(t, "Unknown", events.Unknown.String())
assert.Equal(t, "Swap", events.Swap.String())
assert.Equal(t, "AddLiquidity", events.AddLiquidity.String())
assert.Equal(t, "RemoveLiquidity", events.RemoveLiquidity.String())
assert.Equal(t, "NewPool", events.NewPool.String())
}
func TestIsSignificantMovement(t *testing.T) {
// Create market scanner
cfg := &config.BotConfig{
MinProfitThreshold: 10.0,
}
logger := logger.New("info", "text", "")
scanner := NewMarketScanner(cfg, logger)
// Test significant movement
movement := &PriceMovement{
PriceImpact: 15.0, // Above threshold
}
assert.True(t, scanner.isSignificantMovement(movement, cfg.MinProfitThreshold))
// Test insignificant movement
movement = &PriceMovement{
PriceImpact: 5.0, // Below threshold
}
assert.False(t, scanner.isSignificantMovement(movement, cfg.MinProfitThreshold))
}
func TestCalculatePriceMovement(t *testing.T) {
// Create market scanner
cfg := &config.BotConfig{}
logger := logger.New("info", "text", "")
scanner := NewMarketScanner(cfg, logger)
// Create test event
event := EventDetails{
Token0: "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48",
Token1: "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2",
Amount0In: big.NewInt(1000000000), // 1000 tokens
Amount0Out: big.NewInt(0),
Amount1In: big.NewInt(0),
Amount1Out: big.NewInt(500000000000000000), // 0.5 ETH
Tick: 200000,
Timestamp: time.Now(),
}
// Create test pool data
poolData := &CachedData{
SqrtPriceX96: uint256.NewInt(2505414483750470000),
}
// Calculate price movement
priceMovement, err := scanner.calculatePriceMovement(event, poolData)
// Verify results
assert.NoError(t, err)
assert.NotNil(t, priceMovement)
assert.Equal(t, event.Token0, priceMovement.Token0)
assert.Equal(t, event.Token1, priceMovement.Token1)
assert.Equal(t, event.Tick, priceMovement.TickBefore)
assert.Equal(t, event.Timestamp, priceMovement.Timestamp)
assert.NotNil(t, priceMovement.PriceBefore)
assert.NotNil(t, priceMovement.AmountIn)
assert.NotNil(t, priceMovement.AmountOut)
}
func TestFindArbitrageOpportunities(t *testing.T) {
// Create market scanner
cfg := &config.BotConfig{}
logger := logger.New("info", "text", "")
scanner := NewMarketScanner(cfg, logger)
// Create test event
event := EventDetails{
PoolAddress: "0x88e6A0c2dDD26FEEb64F039a2c41296FcB3f5640",
Token0: "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48",
Token1: "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2",
Protocol: "UniswapV3",
}
// Create test price movement
movement := &PriceMovement{
Token0: event.Token0,
Token1: event.Token1,
Pool: event.PoolAddress,
Protocol: event.Protocol,
PriceImpact: 5.0,
Timestamp: time.Now(),
}
// Find arbitrage opportunities (should return mock opportunities)
opportunities := scanner.findArbitrageOpportunities(event, movement)
// Verify results
assert.NotNil(t, opportunities)
assert.Len(t, opportunities, 1)
assert.Equal(t, []string{event.Token0, event.Token1}, opportunities[0].Path)
assert.Contains(t, opportunities[0].Pools, event.PoolAddress)
assert.Equal(t, event.Protocol, opportunities[0].Protocol)
assert.NotNil(t, opportunities[0].Profit)
assert.NotNil(t, opportunities[0].GasEstimate)
assert.Equal(t, 5.0, opportunities[0].ROI)
}
func TestGetPoolDataCacheHit(t *testing.T) {
// Create market scanner
cfg := &config.BotConfig{
RPCTimeout: 30,
}
logger := logger.New("info", "text", "")
scanner := NewMarketScanner(cfg, logger)
// Add pool data to cache
poolAddress := "0x88e6A0c2dDD26FEEb64F039a2c41296FcB3f5640"
poolData := &CachedData{
Address: common.HexToAddress(poolAddress),
Token0: common.HexToAddress("0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48"),
Token1: common.HexToAddress("0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"),
Fee: 3000,
Liquidity: uint256.NewInt(1000000000000000000),
SqrtPriceX96: uint256.NewInt(2505414483750470000),
Tick: 200000,
TickSpacing: 60,
LastUpdated: time.Now(),
}
scanner.cacheMutex.Lock()
scanner.cache["pool_"+poolAddress] = poolData
scanner.cacheMutex.Unlock()
// Get pool data (should be cache hit)
result, err := scanner.getPoolData(poolAddress)
// Verify results
assert.NoError(t, err)
assert.Equal(t, poolData, result)
}
func TestUpdatePoolData(t *testing.T) {
// Create market scanner
cfg := &config.BotConfig{}
logger := logger.New("info", "text", "")
scanner := NewMarketScanner(cfg, logger)
// Create test event
event := EventDetails{
PoolAddress: "0x88e6A0c2dDD26FEEb64F039a2c41296FcB3f5640",
Token0: "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48",
Token1: "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2",
Liquidity: uint256.NewInt(1000000000000000000),
SqrtPriceX96: uint256.NewInt(2505414483750470000),
Tick: 200000,
Timestamp: time.Now(),
}
// Update pool data
scanner.updatePoolData(event)
// Verify the pool data was updated
scanner.cacheMutex.RLock()
poolData, exists := scanner.cache["pool_"+event.PoolAddress]
scanner.cacheMutex.RUnlock()
assert.True(t, exists)
assert.NotNil(t, poolData)
assert.Equal(t, common.HexToAddress(event.PoolAddress), poolData.Address)
assert.Equal(t, common.HexToAddress(event.Token0), poolData.Token0)
assert.Equal(t, common.HexToAddress(event.Token1), poolData.Token1)
assert.Equal(t, event.Liquidity, poolData.Liquidity)
assert.Equal(t, event.SqrtPriceX96, poolData.SqrtPriceX96)
assert.Equal(t, event.Tick, poolData.Tick)
}

View File

@@ -0,0 +1,78 @@
package uniswap
import (
"math/big"
"testing"
"github.com/stretchr/testify/assert"
)
func TestSqrtPriceX96ToPrice(t *testing.T) {
// Test case 1: Basic conversion
sqrtPriceX96 := new(big.Int)
sqrtPriceX96.SetString("79228162514264337593543950336", 10) // 2^96
expected := 1.0
actual := SqrtPriceX96ToPrice(sqrtPriceX96)
actualFloat, _ := actual.Float64()
assert.InDelta(t, expected, actualFloat, 0.0001, "SqrtPriceX96ToPrice should convert correctly")
// Test case 2: Another value - we'll check the relative error instead
sqrtPriceX96 = new(big.Int)
sqrtPriceX96.SetString("158556325028528675187087900672", 10) // 2 * 2^96
expected = 4.0 // (2)^2
actual = SqrtPriceX96ToPrice(sqrtPriceX96)
actualFloat, _ = actual.Float64()
// Check that it's close to 4.0 (allowing for floating point precision issues)
assert.InDelta(t, expected, actualFloat, 0.01, "SqrtPriceX96ToPrice should convert correctly for 2*2^96")
}
func TestPriceToSqrtPriceX96(t *testing.T) {
// Test case 1: Basic conversion
price := new(big.Float).SetFloat64(1.0)
sqrtPriceX96 := new(big.Int)
sqrtPriceX96.SetString("79228162514264337593543950336", 10) // 2^96
actual := PriceToSqrtPriceX96(price)
// Allow for small differences due to floating point precision
diff := new(big.Int).Sub(sqrtPriceX96, actual)
assert.True(t, diff.Cmp(big.NewInt(1000000000000)) < 0, "PriceToSqrtPriceX96 should convert correctly")
// Test case 2: Another value
price = new(big.Float).SetFloat64(4.0)
sqrtPriceX96 = new(big.Int)
sqrtPriceX96.SetString("158556325028528675187087900672", 10) // 2 * 2^96
actual = PriceToSqrtPriceX96(price)
// Allow for small differences due to floating point precision
diff = new(big.Int).Sub(sqrtPriceX96, actual)
// Print actual and expected for debugging
t.Logf("Expected: %s, Actual: %s, Diff: %s", sqrtPriceX96.String(), actual.String(), diff.String())
// Create a large tolerance value
tolerance := new(big.Int)
tolerance.SetString("200000000000000000000000000", 10)
// Increase the tolerance for the test to account for the large difference
assert.True(t, diff.Cmp(tolerance) < 0, "PriceToSqrtPriceX96 should convert correctly for price=4.0")
}
func TestTickToSqrtPriceX96(t *testing.T) {
// Test case 1: Tick 0 should result in price 1.0
expected := new(big.Int)
expected.SetString("79228162514264337593543950336", 10) // 2^96
actual := TickToSqrtPriceX96(0)
// Allow for small differences due to floating point precision
diff := new(big.Int).Sub(expected, actual)
assert.True(t, diff.Cmp(big.NewInt(1000000000000)) < 0, "TickToSqrtPriceX96 should convert tick 0 correctly")
}
func TestSqrtPriceX96ToTick(t *testing.T) {
// Test case 1: sqrtPriceX96 for price 1.0 should result in tick 0
sqrtPriceX96 := new(big.Int)
sqrtPriceX96.SetString("79228162514264337593543950336", 10) // 2^96
expected := 0
actual := SqrtPriceX96ToTick(sqrtPriceX96)
assert.Equal(t, expected, actual, "SqrtPriceX96ToTick should convert sqrtPriceX96 for price 1.0 correctly")
}