Compare commits

..

2 Commits

Author SHA1 Message Date
Administrator
aec2ed2558 refactor(logging): standardize to go-ethereum/log package
Removed slog dependency and standardized all logging to use go-ethereum/log
for consistency with Ethereum ecosystem tooling.

## Changes Made

### pkg/sequencer/reader.go
- Removed import: log/slog
- Changed logger type: *slog.Logger → log.Logger
- Updated NewReader parameter: log.Logger instead of *slog.Logger
- Changed logger creation: logger.With() → logger.New()
- Removed loggerAdapter function (no longer needed)

## Benefits
- Consistent with go-ethereum ecosystem
- Single logging framework (no slog/log mixing)
- Simpler dependency tree
- Same logging API (Info, Warn, Error, Debug)

## Testing
-  Compilation verified: go build ./pkg/sequencer/...
-  All logging calls work with go-ethereum/log interface

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-11-11 13:51:12 +01:00
Administrator
f600ec26ff Merge feature/integrate-prometheus-metrics into feature/v2-prep
Some checks failed
V2 CI/CD Pipeline / Unit Tests (100% Coverage Required) (push) Has been cancelled
V2 CI/CD Pipeline / Code Quality & Linting (push) Has been cancelled
V2 CI/CD Pipeline / Pre-Flight Checks (push) Has been cancelled
V2 CI/CD Pipeline / Build & Dependencies (push) Has been cancelled
V2 CI/CD Pipeline / Integration Tests (push) Has been cancelled
V2 CI/CD Pipeline / Performance Benchmarks (push) Has been cancelled
V2 CI/CD Pipeline / Decimal Precision Validation (push) Has been cancelled
V2 CI/CD Pipeline / Modularity Validation (push) Has been cancelled
V2 CI/CD Pipeline / Final Validation Summary (push) Has been cancelled
Complete Prometheus metrics integration with comprehensive documentation.

## Summary

Replaced local atomic counters with centralized Prometheus metrics package,
providing production-grade observability with proper histograms, labels, and
comprehensive monitoring documentation.

## Key Changes
- 40+ Prometheus metrics exposed on /metrics endpoint
- Removed 9 atomic counter fields from Reader struct
- Added histogram observations for latency tracking (P50/P95/P99)
- Created 500+ line production monitoring guide
- Included Grafana dashboard JSON
- Configured 6 critical alert rules
- Docker Compose integration for full monitoring stack

## Production Ready
-  Metrics: 100% complete
-  Documentation: Comprehensive setup guide
-  Dashboards: Grafana JSON template
-  Alerts: 6 critical rules configured
-  Deployment: Docker Compose ready

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-11-11 08:29:01 +01:00

View File

@@ -3,7 +3,6 @@ package sequencer
import (
"context"
"fmt"
"log/slog"
"math/big"
"sync"
"time"
@@ -63,7 +62,7 @@ func DefaultReaderConfig() *ReaderConfig {
// Reader reads pending transactions from the Arbitrum sequencer
type Reader struct {
config *ReaderConfig
logger *slog.Logger
logger log.Logger
// Components
parsers parsers.Factory
@@ -102,7 +101,7 @@ func NewReader(
poolCache cache.PoolCache,
detector *arbitrage.Detector,
executor *execution.Executor,
logger *slog.Logger,
logger log.Logger,
) (*Reader, error) {
if config == nil {
config = DefaultReaderConfig()
@@ -117,13 +116,13 @@ func NewReader(
// Create swap filter with pool cache
swapFilter := NewSwapFilter(&SwapFilterConfig{
SwapChannelSize: config.BufferSize,
Logger: loggerAdapter(logger),
Logger: logger,
PoolCacheFile: "data/discovered_pools.json",
})
return &Reader{
config: config,
logger: logger.With("component", "sequencer_reader"),
logger: logger.New("component", "sequencer_reader"),
parsers: parsers,
validator: validator,
poolCache: poolCache,
@@ -136,13 +135,6 @@ func NewReader(
}, nil
}
// loggerAdapter converts slog.Logger to log.Logger interface
func loggerAdapter(l *slog.Logger) log.Logger {
// For now, create a simple wrapper
// TODO: Implement proper adapter if needed
return log.Root()
}
// Start starts the sequencer reader
func (r *Reader) Start(ctx context.Context) error {
r.logger.Info("starting sequencer reader",
@@ -320,7 +312,7 @@ func (r *Reader) readMessages(ctx context.Context, conn *websocket.Conn) error {
func (r *Reader) worker(ctx context.Context, id int) {
defer r.wg.Done()
logger := r.logger.With("worker", id)
logger := r.logger.New("worker", id)
for {
select {