Files
mev-beta/pkg/parsers/swap_logger.go
Administrator 37c91144b2
Some checks failed
V2 CI/CD Pipeline / Unit Tests (100% Coverage Required) (push) Has been cancelled
V2 CI/CD Pipeline / Pre-Flight Checks (push) Has been cancelled
V2 CI/CD Pipeline / Build & Dependencies (push) Has been cancelled
V2 CI/CD Pipeline / Code Quality & Linting (push) Has been cancelled
V2 CI/CD Pipeline / Integration Tests (push) Has been cancelled
V2 CI/CD Pipeline / Performance Benchmarks (push) Has been cancelled
V2 CI/CD Pipeline / Decimal Precision Validation (push) Has been cancelled
V2 CI/CD Pipeline / Modularity Validation (push) Has been cancelled
V2 CI/CD Pipeline / Final Validation Summary (push) Has been cancelled
feat(parsers): implement UniswapV2 parser with logging and validation
**Implementation:**
- Created UniswapV2Parser with ParseLog() and ParseReceipt() methods
- Proper event signature detection (Swap event)
- Token extraction from pool cache with decimal scaling
- Automatic scaling to 18 decimals for internal representation
- Support for multiple swaps per transaction

**Testing:**
- Comprehensive unit tests with 100% coverage
- Tests for valid/invalid events, batch parsing, edge cases
- Mock logger and pool cache for isolated testing

**Validation & Logging:**
- SwapLogger: Saves detected swaps to JSON files for testing
  - Individual swap logging with raw log data
  - Batch logging for multi-swap transactions
  - Log cleanup for old entries (configurable retention)

- ArbiscanValidator: Verifies parsed swaps against Arbiscan API
  - Compares pool address, tx hash, block number, log index
  - Validates sender and recipient addresses
  - Detects and logs discrepancies for investigation
  - Batch validation support for transactions with multiple swaps

**Type System Updates:**
- Exported ScaleToDecimals() function for use across parsers
- Updated tests to use exported function name
- Consistent decimal handling (USDC 6, WBTC 8, WETH 18)

**Use Cases:**
1. Real-time parsing: parser.ParseLog() for individual events
2. Transaction analysis: parser.ParseReceipt() for all swaps
3. Accuracy verification: validator.ValidateSwap() against Arbiscan
4. Testing: Load saved logs and replay for regression testing

**Task:** P2-002 (UniswapV2 parser base implementation)
**Coverage:** 100% (enforced in CI/CD)
**Protocol:** UniswapV2 on Arbitrum

🤖 Generated with [Claude Code](https://claude.com/claude-code)
Co-Authored-By: Claude <noreply@anthropic.com>
2025-11-10 15:31:26 +01:00

217 lines
5.4 KiB
Go

package parsers
import (
"context"
"encoding/json"
"fmt"
"os"
"path/filepath"
"sync"
"time"
"github.com/your-org/mev-bot/pkg/types"
)
// SwapLogger logs detected swaps to files for testing and accuracy verification
type SwapLogger struct {
logDir string
mu sync.Mutex
logger types.Logger
}
// SwapLogEntry represents a logged swap event with metadata
type SwapLogEntry struct {
Timestamp time.Time `json:"timestamp"`
SwapEvent *types.SwapEvent `json:"swap_event"`
RawLogData string `json:"raw_log_data"` // Hex-encoded log data
RawLogTopics []string `json:"raw_log_topics"` // Hex-encoded topics
Parser string `json:"parser"` // Which parser detected this
}
// NewSwapLogger creates a new swap logger
func NewSwapLogger(logDir string, logger types.Logger) (*SwapLogger, error) {
// Create log directory if it doesn't exist
if err := os.MkdirAll(logDir, 0755); err != nil {
return nil, fmt.Errorf("failed to create log directory: %w", err)
}
return &SwapLogger{
logDir: logDir,
logger: logger,
}, nil
}
// LogSwap logs a detected swap event
func (s *SwapLogger) LogSwap(ctx context.Context, event *types.SwapEvent, rawLogData []byte, rawLogTopics []string, parser string) error {
s.mu.Lock()
defer s.mu.Unlock()
// Create log entry
entry := SwapLogEntry{
Timestamp: time.Now(),
SwapEvent: event,
RawLogData: fmt.Sprintf("0x%x", rawLogData),
RawLogTopics: rawLogTopics,
Parser: parser,
}
// Marshal to JSON
data, err := json.MarshalIndent(entry, "", " ")
if err != nil {
return fmt.Errorf("failed to marshal log entry: %w", err)
}
// Create filename based on timestamp and tx hash
filename := fmt.Sprintf("%s_%s.json",
time.Now().Format("2006-01-02_15-04-05"),
event.TxHash.Hex()[2:10], // First 8 chars of tx hash
)
// Write to file
logPath := filepath.Join(s.logDir, filename)
if err := os.WriteFile(logPath, data, 0644); err != nil {
return fmt.Errorf("failed to write log file: %w", err)
}
s.logger.Debug("logged swap event",
"txHash", event.TxHash.Hex(),
"protocol", event.Protocol,
"parser", parser,
"logPath", logPath,
)
return nil
}
// LogSwapBatch logs multiple swap events from the same transaction
func (s *SwapLogger) LogSwapBatch(ctx context.Context, events []*types.SwapEvent, parser string) error {
if len(events) == 0 {
return nil
}
s.mu.Lock()
defer s.mu.Unlock()
// Create batch entry
type BatchEntry struct {
Timestamp time.Time `json:"timestamp"`
TxHash string `json:"tx_hash"`
Parser string `json:"parser"`
SwapCount int `json:"swap_count"`
Swaps []*types.SwapEvent `json:"swaps"`
}
entry := BatchEntry{
Timestamp: time.Now(),
TxHash: events[0].TxHash.Hex(),
Parser: parser,
SwapCount: len(events),
Swaps: events,
}
// Marshal to JSON
data, err := json.MarshalIndent(entry, "", " ")
if err != nil {
return fmt.Errorf("failed to marshal batch entry: %w", err)
}
// Create filename
filename := fmt.Sprintf("%s_%s_batch.json",
time.Now().Format("2006-01-02_15-04-05"),
events[0].TxHash.Hex()[2:10],
)
// Write to file
logPath := filepath.Join(s.logDir, filename)
if err := os.WriteFile(logPath, data, 0644); err != nil {
return fmt.Errorf("failed to write batch log file: %w", err)
}
s.logger.Debug("logged swap batch",
"txHash", events[0].TxHash.Hex(),
"swapCount", len(events),
"parser", parser,
"logPath", logPath,
)
return nil
}
// GetLogDir returns the log directory path
func (s *SwapLogger) GetLogDir() string {
return s.logDir
}
// LoadSwapLog loads a swap log entry from a file
func LoadSwapLog(filePath string) (*SwapLogEntry, error) {
data, err := os.ReadFile(filePath)
if err != nil {
return nil, fmt.Errorf("failed to read log file: %w", err)
}
var entry SwapLogEntry
if err := json.Unmarshal(data, &entry); err != nil {
return nil, fmt.Errorf("failed to unmarshal log entry: %w", err)
}
return &entry, nil
}
// ListSwapLogs returns all swap log files in the log directory
func (s *SwapLogger) ListSwapLogs() ([]string, error) {
s.mu.Lock()
defer s.mu.Unlock()
entries, err := os.ReadDir(s.logDir)
if err != nil {
return nil, fmt.Errorf("failed to read log directory: %w", err)
}
var logFiles []string
for _, entry := range entries {
if !entry.IsDir() && filepath.Ext(entry.Name()) == ".json" {
logFiles = append(logFiles, filepath.Join(s.logDir, entry.Name()))
}
}
return logFiles, nil
}
// CleanOldLogs removes log files older than the specified duration
func (s *SwapLogger) CleanOldLogs(maxAge time.Duration) (int, error) {
s.mu.Lock()
defer s.mu.Unlock()
entries, err := os.ReadDir(s.logDir)
if err != nil {
return 0, fmt.Errorf("failed to read log directory: %w", err)
}
cutoff := time.Now().Add(-maxAge)
removed := 0
for _, entry := range entries {
if entry.IsDir() || filepath.Ext(entry.Name()) != ".json" {
continue
}
info, err := entry.Info()
if err != nil {
s.logger.Warn("failed to get file info", "file", entry.Name(), "error", err)
continue
}
if info.ModTime().Before(cutoff) {
filePath := filepath.Join(s.logDir, entry.Name())
if err := os.Remove(filePath); err != nil {
s.logger.Warn("failed to remove old log file", "file", filePath, "error", err)
continue
}
removed++
}
}
s.logger.Info("cleaned old swap logs", "removed", removed, "maxAge", maxAge)
return removed, nil
}