From ca4d3299eff122365e9f736bf331e6e48f21a1a8 Mon Sep 17 00:00:00 2001 From: thedevbirb Date: Tue, 26 Nov 2024 17:08:32 +0100 Subject: [PATCH 01/39] fix(ssz): order of enum variants is important --- crates/common/src/bid_submission/submission.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/common/src/bid_submission/submission.rs b/crates/common/src/bid_submission/submission.rs index 281fe50..b169c46 100644 --- a/crates/common/src/bid_submission/submission.rs +++ b/crates/common/src/bid_submission/submission.rs @@ -24,8 +24,8 @@ use helix_utils::signing::compute_builder_signing_root; #[ssz(transparent)] #[serde(untagged)] pub enum SignedBidSubmission { - Deneb(SignedBidSubmissionDeneb), DenebWithProofs(SignedBidSubmissionDenebWithProofs), + Deneb(SignedBidSubmissionDeneb), Capella(SignedBidSubmissionCapella), } From dd97cb20bb2b9e5baee12b1ebddeb9876fad4fb4 Mon Sep 17 00:00:00 2001 From: thedevbirb Date: Fri, 15 Nov 2024 15:34:49 +0100 Subject: [PATCH 02/39] refactor(api/builder): submit_block(_with_proofs) now have the same impl --- crates/api/src/builder/api.rs | 345 +++++++--------------------------- crates/api/src/router.rs | 3 +- 2 files changed, 65 insertions(+), 283 deletions(-) diff --git a/crates/api/src/builder/api.rs b/crates/api/src/builder/api.rs index b3cf264..27a13cb 100644 --- a/crates/api/src/builder/api.rs +++ b/crates/api/src/builder/api.rs @@ -50,7 +50,9 @@ use helix_common::{ BidSubmission, BidTrace, SignedBidSubmission, }, chain_info::ChainInfo, - proofs::{verify_multiproofs, InclusionProofs, SignedConstraints}, + proofs::{ + verify_multiproofs, InclusionProofs, SignedConstraints, SignedConstraintsWithProofData, + }, signing::RelaySigningContext, simulator::BlockSimError, versioned_payload::PayloadAndBlobs, @@ -299,6 +301,7 @@ where /// 6. Saves the bid to auctioneer and db. /// /// Implements this API: + /// Implements this API: pub async fn submit_block( Extension(api): Extension>>, req: Request, @@ -448,253 +451,36 @@ where ) .await?; - // If cancellations are enabled, then abort now if there is a later submission - if is_cancellations_enabled { - if let Err(err) = - api.check_for_later_submissions(&payload, trace.receive, &request_id).await - { - warn!(request_id = %request_id, error = %err, "already processing later submission"); - return Err(err) - } - } - - // Save bid to auctioneer - match api - .save_bid_to_auctioneer( - &payload, - &mut trace, - is_cancellations_enabled, - floor_bid_value, - &request_id, - ) - .await? - { - // If the bid was succesfully saved then we gossip the header and payload to all other - // relays. - Some((builder_bid, execution_payload)) => { - api.gossip_new_submission( - &payload, - execution_payload, - builder_bid, - is_cancellations_enabled, - trace.receive, - &request_id, - ) - .await; - } - None => { /* Bid wasn't saved so no need to gossip as it will never be served */ } - } - - // Log some final info - trace.request_finish = get_nanos_timestamp()?; - info!( - request_id = %request_id, - trace = ?trace, - request_duration_ns = trace.request_finish.saturating_sub(trace.receive), - "submit_block request finished" - ); - - let optimistic_version = if was_simulated_optimistically { - OptimisticVersion::V1 - } else { - OptimisticVersion::NotOptimistic - }; - - // Save submission to db. - tokio::spawn(async move { - if let Err(err) = api - .db - .store_block_submission(payload, Arc::new(trace), optimistic_version as i16) - .await - { - error!( - error = %err, - "failed to store block submission", - ) - } - }); - - Ok(StatusCode::OK) - } - - /// Handles the submission of a new block with inclusion proofs. - /// - /// This function extends the `submit_block` functionality to also handle inclusion proofs: - /// 1. Receives the request and decodes the payload into a `SignedBidSubmission` object. - /// 2. Validates the builder and checks against the next proposer duty. - /// 3. Verifies the signature of the payload. - /// 4. Fetches the constraints for the slot and verifies the inclusion proofs. - /// 5. Runs further validations against the auctioneer. - /// 6. Simulates the block to validate the payment. - /// 7. Saves the bid and inclusion proof to the auctioneer. - /// - /// Implements this API: - pub async fn submit_block_with_proofs( - Extension(api): Extension>>, - req: Request, - ) -> Result { - let request_id = Uuid::new_v4(); - let mut trace = SubmissionTrace { receive: get_nanos_timestamp()?, ..Default::default() }; - let (head_slot, next_duty) = api.curr_slot_info.read().await.clone(); - - info!( - request_id = %request_id, - event = "submit_block_with_proofs", - head_slot = head_slot, - timestamp_request_start = trace.receive, - ); - - // Decode the incoming request body into a payload with proofs - let (payload, is_cancellations_enabled) = - decode_payload(req, &mut trace, &request_id).await?; - let block_hash = payload.message().block_hash.clone(); - - // Verify that we have a validator connected for this slot - if next_duty.is_none() { - warn!(request_id = %request_id, "could not find slot duty"); - return Err(BuilderApiError::ProposerDutyNotFound) - } - let next_duty = next_duty.unwrap(); - - debug!( - request_id = %request_id, - builder_pub_key = ?payload.builder_public_key(), - block_value = %payload.value(), - block_hash = ?block_hash, - "submit_block_with_proofs -- payload decoded", - ); - - // Verify the payload is for the current slot - if payload.slot() <= head_slot { - warn!( - request_id = %request_id, - "submission is for a past slot", - ); - return Err(BuilderApiError::SubmissionForPastSlot { - current_slot: head_slot, - submission_slot: payload.slot(), - }) - } - - // Fetch the next payload attributes and validate basic information - let payload_attributes = api - .fetch_payload_attributes(payload.slot(), payload.parent_hash(), &request_id) - .await?; - - // Handle duplicates. - if let Err(err) = api - .check_for_duplicate_block_hash( - &block_hash, - payload.slot(), - payload.parent_hash(), - payload.proposer_public_key(), - &request_id, - ) - .await - { - match err { - BuilderApiError::DuplicateBlockHash { block_hash } => { - // We dont return the error here as we want to continue processing the request. - // This mitigates the risk of someone sending an invalid payload - // with a valid header, which would block subsequent submissions with the same - // header and valid payload. - debug!( - request_id = %request_id, - block_hash = ?block_hash, - builder_pub_key = ?payload.builder_public_key(), - "block hash already seen" - ); - } - _ => return Err(err), - } - } - - // Verify the payload value is above the floor bid - let floor_bid_value = api - .check_if_bid_is_below_floor( - payload.slot(), - payload.parent_hash(), - payload.proposer_public_key(), - payload.builder_public_key(), - payload.value(), - is_cancellations_enabled, - &request_id, - ) - .await?; - trace.floor_bid_checks = get_nanos_timestamp()?; - - // Fetch builder info - let builder_info = api.fetch_builder_info(payload.builder_public_key()).await; - - // Handle trusted builders check - if !api.check_if_trusted_builder(&next_duty, &builder_info).await { - let proposer_trusted_builders = next_duty.entry.preferences.trusted_builders.unwrap(); - warn!( - request_id = %request_id, - builder_pub_key = ?payload.builder_public_key(), - proposer_trusted_builders = ?proposer_trusted_builders, - "builder not in proposer trusted builders list", - ); - return Err(BuilderApiError::BuilderNotInProposersTrustedList { - proposer_trusted_builders, - }) - } - - // Verify payload has not already been delivered - match api.auctioneer.get_last_slot_delivered().await { - Ok(Some(slot)) => { - if payload.slot() <= slot { - warn!(request_id = %request_id, "payload already delivered"); - return Err(BuilderApiError::PayloadAlreadyDelivered) + // If constraints from the [Constraints API](https://docs.boltprotocol.xyz/technical-docs/api/relay#blocks_with_proofs) + // are available, verify inclusion proofs and save them to cache + // + // NOTE: this check must always be performed because otherwise a builder might trick + // the relay into accepting as best bid a block without invalid inclusion proofs when they + // are needed. + if let Some(constraints) = api.auctioneer.get_constraints(payload.slot()).await? { + let should_verify_and_save_proofs = api + .relay_config + .constraints_api_config + .max_block_value_to_verify_wei + .map_or(true, |max_block_value_to_verify| { + payload.value() <= max_block_value_to_verify + }); + if should_verify_and_save_proofs { + if let Err(err) = + api.verify_and_save_inclusion_proofs(&payload, constraints, &request_id).await + { + warn!(request_id = %request_id, error = %err, "failed to verify and save inclusion proofs"); + return Err(err) } - } - Ok(None) => {} - Err(err) => { - error!(request_id = %request_id, error = %err, "failed to get last slot delivered"); - } - } - - // Sanity check the payload - if let Err(err) = sanity_check_block_submission( - &payload, - payload.bid_trace(), - &next_duty, - &payload_attributes, - &api.chain_info, - ) { - warn!(request_id = %request_id, error = %err, "failed sanity check"); - return Err(err) - } - trace.pre_checks = get_nanos_timestamp()?; - - let (payload, was_simulated_optimistically) = api - .verify_submitted_block( - payload, - next_duty, - &builder_info, - &mut trace, - &request_id, - &payload_attributes, - ) - .await?; - - // Fetch constraints, and if available verify inclusion proofs and save them to cache - let should_verify_and_save_proofs = api - .relay_config - .constraints_api_config - .max_block_value_to_verify_wei - .map_or(true, |max_block_value_to_verify| payload.value() <= max_block_value_to_verify); - if should_verify_and_save_proofs { - if let Err(err) = api.verify_and_save_inclusion_proofs(&payload, &request_id).await { - warn!(request_id = %request_id, error = %err, "failed to verify and save inclusion proofs"); - return Err(err) + } else { + info!( + request_id = %request_id, + block_value = %payload.value(), + "block value is greater than max value to verify, inclusion proof verification and saving is skipped", + ); } } else { - info!( - request_id = %request_id, - block_value = %payload.value(), - "block value is greater than max value to verify, inclusion proof verification and saving is skipped", - ); + info!(%request_id, "no constraints found for slot, proof verification is not needed"); } // If cancellations are enabled, then abort now if there is a later submission @@ -740,7 +526,7 @@ where request_id = %request_id, trace = ?trace, request_duration_ns = trace.request_finish.saturating_sub(trace.receive), - "submit_block_with_proofs request finished" + "submit_block request finished" ); let optimistic_version = if was_simulated_optimistically { @@ -758,7 +544,7 @@ where { error!( error = %err, - "failed to store block submission with proofs", + "failed to store block submission", ) } }); @@ -2183,43 +1969,40 @@ where async fn verify_and_save_inclusion_proofs( &self, payload: &SignedBidSubmission, + constraints: Vec, request_id: &Uuid, ) -> Result<(), BuilderApiError> { - if let Some(constraints) = self.auctioneer.get_constraints(payload.slot()).await? { - let transactions_root: B256 = payload - .transactions() - .clone() - .hash_tree_root()? - .to_vec() - .as_slice() - .try_into() - .map_err(|error| { - error!(?error, "failed to convert root to hash32"); - BuilderApiError::InternalError - })?; - let proofs = payload.proofs().ok_or(BuilderApiError::InclusionProofsNotFound)?; - let constraints_proofs: Vec<_> = constraints.iter().map(|c| &c.proof_data).collect(); - - verify_multiproofs(constraints_proofs.as_slice(), proofs, transactions_root).map_err( - |e| { - error!(error = %e, "failed to verify inclusion proofs"); - BuilderApiError::InclusionProofVerificationFailed(e) - }, - )?; + let transactions_root: B256 = payload + .transactions() + .clone() + .hash_tree_root()? + .to_vec() + .as_slice() + .try_into() + .map_err(|error| { + error!(?error, "failed to convert root to hash32"); + BuilderApiError::InternalError + })?; + let proofs = payload.proofs().ok_or(BuilderApiError::InclusionProofsNotFound)?; + let constraints_proofs: Vec<_> = constraints.iter().map(|c| &c.proof_data).collect(); - // Save inclusion proof to auctioneer. - self.save_inclusion_proof( - payload.slot(), - payload.proposer_public_key(), - payload.block_hash(), - proofs, - request_id, - ) - .await?; - info!(%request_id, "inclusion proofs verified and saved to auctioneer"); - } else { - info!(%request_id, "no constraints found for slot, proof verification is not needed"); - }; + verify_multiproofs(constraints_proofs.as_slice(), proofs, transactions_root).map_err( + |e| { + error!(error = %e, "failed to verify inclusion proofs"); + BuilderApiError::InclusionProofVerificationFailed(e) + }, + )?; + + // Save inclusion proof to auctioneer. + self.save_inclusion_proof( + payload.slot(), + payload.proposer_public_key(), + payload.block_hash(), + proofs, + request_id, + ) + .await?; + info!(%request_id, "inclusion proofs verified and saved to auctioneer"); Ok(()) } } diff --git a/crates/api/src/router.rs b/crates/api/src/router.rs index a245009..04e9ba5 100644 --- a/crates/api/src/router.rs +++ b/crates/api/src/router.rs @@ -86,8 +86,7 @@ pub fn build_router( router = router.route(&route.path(), post(BuilderApiProd::submit_block_v2)); } Route::SubmitBlockWithProofs => { - router = - router.route(&route.path(), post(BuilderApiProd::submit_block_with_proofs)); + router = router.route(&route.path(), post(BuilderApiProd::submit_block)); } Route::SubmitHeader => { router = router.route(&route.path(), post(BuilderApiProd::submit_header)); From f81a92ee390b524eb4893761c8bafd433e6d1b7a Mon Sep 17 00:00:00 2001 From: thedevbirb Date: Wed, 27 Nov 2024 15:35:05 +0100 Subject: [PATCH 03/39] fix(constraints-api): don't read from db for checking proposer duties, use auctioneer instead --- crates/api/src/constraints/api.rs | 110 ++++++++++++++---- crates/api/src/constraints/tests.rs | 2 +- crates/api/src/router.rs | 2 +- crates/api/src/service.rs | 2 +- crates/api/src/test_utils.rs | 23 ++-- crates/common/src/api/constraints_api.rs | 8 +- crates/common/src/proofs.rs | 10 +- crates/housekeeper/src/chain_event_updater.rs | 2 +- 8 files changed, 114 insertions(+), 45 deletions(-) diff --git a/crates/api/src/constraints/api.rs b/crates/api/src/constraints/api.rs index ba42bef..32522ab 100644 --- a/crates/api/src/constraints/api.rs +++ b/crates/api/src/constraints/api.rs @@ -3,7 +3,7 @@ use axum::{ http::{Request, StatusCode}, Extension, }; -use ethereum_consensus::{deneb::Slot, ssz}; +use ethereum_consensus::{deneb::Slot, phase0::mainnet::SLOTS_PER_EPOCH, ssz}; use helix_common::{ api::constraints_api::{ SignableBLS, SignedDelegation, SignedRevocation, DELEGATION_ACTION, @@ -14,15 +14,22 @@ use helix_common::{ proofs::{ConstraintsMessage, SignedConstraints, SignedConstraintsWithProofData}, ConstraintSubmissionTrace, ConstraintsApiConfig, }; -use helix_database::DatabaseService; use helix_datastore::Auctioneer; +use helix_housekeeper::{ChainUpdate, SlotUpdate}; use helix_utils::signing::{verify_signed_message, COMMIT_BOOST_DOMAIN}; use std::{ collections::HashSet, sync::Arc, time::{SystemTime, UNIX_EPOCH}, }; -use tokio::{sync::broadcast, time::Instant}; +use tokio::{ + sync::{ + broadcast, + mpsc::{self, error::SendError, Sender}, + RwLock, + }, + time::Instant, +}; use tracing::{error, info, trace, warn}; use uuid::Uuid; @@ -34,14 +41,15 @@ use super::error::Conflict; pub(crate) const MAX_REQUEST_LENGTH: usize = 1024 * 1024 * 5; #[derive(Clone)] -pub struct ConstraintsApi +pub struct ConstraintsApi where A: Auctioneer + 'static, - DB: DatabaseService + 'static, { auctioneer: Arc, - db: Arc, chain_info: Arc, + /// Information about the current head slot and next proposer duty + curr_slot_info: Arc>, + constraints_api_config: Arc, constraints_handle: ConstraintsHandle, @@ -60,26 +68,44 @@ impl ConstraintsHandle { } } -impl ConstraintsApi +impl ConstraintsApi where A: Auctioneer + 'static, - DB: DatabaseService + 'static, { pub fn new( auctioneer: Arc, - db: Arc, chain_info: Arc, + slot_update_subscription: Sender>, constraints_handle: ConstraintsHandle, constraints_api_config: Arc, ) -> Self { - Self { auctioneer, db, chain_info, constraints_handle, constraints_api_config } + let api = Self { + auctioneer, + chain_info, + curr_slot_info: Arc::new(RwLock::new(Default::default())), + constraints_handle, + constraints_api_config, + }; + + // Spin up the housekeep task + let api_clone = api.clone(); + tokio::spawn(async move { + if let Err(err) = api_clone.housekeep(slot_update_subscription).await { + error!( + error = %err, + "ConstraintsApi. housekeep task encountered an error", + ); + } + }); + + api } /// Handles the submission of batch of signed constraints. /// /// Implements this API: pub async fn submit_constraints( - Extension(api): Extension>>, + Extension(api): Extension>>, req: Request, ) -> Result { let request_id = Uuid::new_v4(); @@ -108,14 +134,18 @@ where return Err(ConstraintsApiError::InvalidConstraints) } - // PERF: can we avoid calling the db? - let maybe_validator_pubkey = api.db.get_proposer_duties().await?.iter().find_map(|d| { - if d.slot == first_constraints.slot { - Some(d.entry.registration.message.public_key.clone()) + let maybe_validator_pubkey = + if let Some(duties) = api.curr_slot_info.read().await.new_duties.as_ref() { + duties.iter().find_map(|d| { + if d.slot == first_constraints.slot { + Some(d.entry.registration.message.public_key.clone()) + } else { + None + } + }) } else { None - } - }); + }; let Some(validator_pubkey) = maybe_validator_pubkey else { error!(request_id = %request_id, slot = first_constraints.slot, "Missing proposer info"); @@ -200,7 +230,7 @@ where /// /// Implements this API: pub async fn delegate( - Extension(api): Extension>>, + Extension(api): Extension>>, req: Request, ) -> Result { let request_id = Uuid::new_v4(); @@ -274,7 +304,7 @@ where /// /// Implements this API: pub async fn revoke( - Extension(api): Extension>>, + Extension(api): Extension>>, req: Request, ) -> Result { let request_id = Uuid::new_v4(); @@ -347,10 +377,9 @@ where } // Helpers -impl ConstraintsApi +impl ConstraintsApi where A: Auctioneer + 'static, - DB: DatabaseService + 'static, { async fn save_constraints_to_auctioneer( &self, @@ -379,6 +408,45 @@ where } } +// STATE SYNC +impl ConstraintsApi +where + A: Auctioneer + 'static, +{ + /// Subscribes to slot head updater. + /// Updates the current slot and next proposer duty. + pub async fn housekeep( + &self, + slot_update_subscription: Sender>, + ) -> Result<(), SendError>> { + let (tx, mut rx) = mpsc::channel(20); + slot_update_subscription.send(tx).await?; + + while let Some(slot_update) = rx.recv().await { + if let ChainUpdate::SlotUpdate(slot_update) = slot_update { + self.handle_new_slot(slot_update).await; + } + } + + Ok(()) + } + + /// Handle a new slot update. + /// Updates the next proposer duty for the new slot. + async fn handle_new_slot(&self, slot_update: SlotUpdate) { + let epoch = slot_update.slot / self.chain_info.seconds_per_slot; + info!( + epoch = epoch, + slot = slot_update.slot, + slot_start_next_epoch = (epoch + 1) * SLOTS_PER_EPOCH, + next_proposer_duty = ?slot_update.next_duty, + "ConstraintsApi - housekeep: Updated head slot", + ); + + *self.curr_slot_info.write().await = slot_update + } +} + /// Checks if the constraints for the given slot conflict with the existing constraints. /// Returns a [Conflict] in case of a conflict, None otherwise. /// diff --git a/crates/api/src/constraints/tests.rs b/crates/api/src/constraints/tests.rs index 1516c0e..3f7f2bb 100644 --- a/crates/api/src/constraints/tests.rs +++ b/crates/api/src/constraints/tests.rs @@ -152,7 +152,7 @@ async fn send_request(req_url: &str, encoding: Encoding, req_payload: Vec) - async fn start_api_server() -> ( oneshot::Sender<()>, HttpServiceConfig, - Arc>, + Arc>, Arc>, Receiver>, ) { diff --git a/crates/api/src/router.rs b/crates/api/src/router.rs index a245009..6cf8f9a 100644 --- a/crates/api/src/router.rs +++ b/crates/api/src/router.rs @@ -46,7 +46,7 @@ pub type ProposerApiProd = ProposerApi< pub type DataApiProd = DataApi; -pub type ConstraintsApiProd = ConstraintsApi; +pub type ConstraintsApiProd = ConstraintsApi; pub fn build_router( router_config: &mut RouterConfig, diff --git a/crates/api/src/service.rs b/crates/api/src/service.rs index 7dffc27..fc5fbcb 100644 --- a/crates/api/src/service.rs +++ b/crates/api/src/service.rs @@ -190,8 +190,8 @@ impl ApiService { let constraints_api = Arc::new(ConstraintsApiProd::new( auctioneer.clone(), - db.clone(), chain_info.clone(), + slot_update_sender.clone(), constraints_handle, constraints_api_config, )); diff --git a/crates/api/src/test_utils.rs b/crates/api/src/test_utils.rs index fac23de..923e3b0 100644 --- a/crates/api/src/test_utils.rs +++ b/crates/api/src/test_utils.rs @@ -298,7 +298,7 @@ pub fn data_api_app() -> (Router, Arc>, Arc ( Router, - Arc>, + Arc>, Arc>, Receiver>, ) { @@ -322,14 +322,13 @@ pub fn constraints_api_app() -> ( ); let builder_api_service = Arc::new(builder_api_service); - let constraints_api_service = - Arc::new(ConstraintsApi::::new( - auctioneer.clone(), - database.clone(), - Arc::new(ChainInfo::for_mainnet()), - handler, - Arc::new(ConstraintsApiConfig::default()), - )); + let constraints_api_service = Arc::new(ConstraintsApi::::new( + auctioneer.clone(), + Arc::new(ChainInfo::for_mainnet()), + slot_update_sender, + handler, + Arc::new(ConstraintsApiConfig::default()), + )); let router = Router::new() .route( @@ -358,15 +357,15 @@ pub fn constraints_api_app() -> ( ) .route( &Route::SubmitBuilderConstraints.path(), - post(ConstraintsApi::::submit_constraints), + post(ConstraintsApi::::submit_constraints), ) .route( &Route::DelegateSubmissionRights.path(), - post(ConstraintsApi::::delegate), + post(ConstraintsApi::::delegate), ) .route( &Route::RevokeSubmissionRights.path(), - post(ConstraintsApi::::revoke), + post(ConstraintsApi::::revoke), ) .layer(RequestBodyLimitLayer::new(MAX_PAYLOAD_LENGTH)) .layer(Extension(builder_api_service.clone())) diff --git a/crates/common/src/api/constraints_api.rs b/crates/common/src/api/constraints_api.rs index c244847..280c51f 100644 --- a/crates/common/src/api/constraints_api.rs +++ b/crates/common/src/api/constraints_api.rs @@ -31,8 +31,8 @@ impl SignableBLS for DelegationMessage { fn digest(&self) -> [u8; 32] { let mut hasher = Sha256::new(); hasher.update([self.action]); - hasher.update(&self.validator_pubkey.to_vec()); - hasher.update(&self.delegatee_pubkey.to_vec()); + hasher.update(self.validator_pubkey.as_slice()); + hasher.update(self.delegatee_pubkey.as_slice()); hasher.finalize().into() } @@ -55,8 +55,8 @@ impl SignableBLS for RevocationMessage { fn digest(&self) -> [u8; 32] { let mut hasher = Sha256::new(); hasher.update([self.action]); - hasher.update(&self.validator_pubkey.to_vec()); - hasher.update(&self.delegatee_pubkey.to_vec()); + hasher.update(self.validator_pubkey.as_slice()); + hasher.update(self.delegatee_pubkey.as_slice()); hasher.finalize().into() } diff --git a/crates/common/src/proofs.rs b/crates/common/src/proofs.rs index 33a52ac..7388cc4 100644 --- a/crates/common/src/proofs.rs +++ b/crates/common/src/proofs.rs @@ -5,7 +5,7 @@ use ethereum_consensus::{ primitives::{BlsPublicKey, BlsSignature}, ssz::prelude::*, }; -use reth_primitives::{PooledTransactionsElement, TxHash, B256}; +use reth_primitives::{Bytes, PooledTransactionsElement, TxHash, B256}; use sha2::{Digest, Sha256}; use tree_hash::Hash256; @@ -90,9 +90,11 @@ impl TryFrom for SignedConstraintsWithProofData { fn try_from(value: SignedConstraints) -> Result { let mut transactions = Vec::with_capacity(value.message.transactions.len()); - for transaction in value.message.transactions.to_vec().iter() { - let tx = PooledTransactionsElement::decode_enveloped(transaction.to_vec().into()) - .map_err(|e| ProofError::DecodingFailed(e.to_string()))?; + for transaction in value.message.transactions.iter() { + let tx = PooledTransactionsElement::decode_enveloped(Bytes::copy_from_slice( + transaction.as_slice(), + )) + .map_err(|e| ProofError::DecodingFailed(e.to_string()))?; let tx_hash = *tx.hash(); diff --git a/crates/housekeeper/src/chain_event_updater.rs b/crates/housekeeper/src/chain_event_updater.rs index 1df5fd0..8e1b29c 100644 --- a/crates/housekeeper/src/chain_event_updater.rs +++ b/crates/housekeeper/src/chain_event_updater.rs @@ -36,7 +36,7 @@ pub struct PayloadAttributesUpdate { } /// Payload for head event updates sent to subscribers. -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Default)] pub struct SlotUpdate { pub slot: u64, pub next_duty: Option, From 5848eecc9e4aefb897fdf49a920b476a5d6cc876 Mon Sep 17 00:00:00 2001 From: thedevbirb Date: Wed, 27 Nov 2024 14:40:29 +0100 Subject: [PATCH 04/39] fix(constraints-api): review constraints saving logic --- crates/api/src/constraints/api.rs | 37 ++++++++++++++++++------------- 1 file changed, 21 insertions(+), 16 deletions(-) diff --git a/crates/api/src/constraints/api.rs b/crates/api/src/constraints/api.rs index 32522ab..5519290 100644 --- a/crates/api/src/constraints/api.rs +++ b/crates/api/src/constraints/api.rs @@ -3,7 +3,7 @@ use axum::{ http::{Request, StatusCode}, Extension, }; -use ethereum_consensus::{deneb::Slot, phase0::mainnet::SLOTS_PER_EPOCH, ssz}; +use ethereum_consensus::{phase0::mainnet::SLOTS_PER_EPOCH, ssz}; use helix_common::{ api::constraints_api::{ SignableBLS, SignedDelegation, SignedRevocation, DELEGATION_ACTION, @@ -200,18 +200,18 @@ where // Send to the constraints channel api.constraints_handle.send_constraints(constraint.clone()); + // Decode the constraints and generate proof data. + let constraints_with_proofs = SignedConstraintsWithProofData::try_from(constraint).map_err(|err| { + error!(request_id = %request_id, "Failed to decode constraints transactions and generate proof data"); + err + })?; + // Finally add the constraints to the redis cache - if let Err(err) = api - .save_constraints_to_auctioneer( - &mut trace, - constraint.message.slot, - constraint, - &request_id, - ) - .await - { - error!(request_id = %request_id, error = %err, "Failed to save constraints to auctioneer"); - }; + api.save_constraints_to_auctioneer(&mut trace, constraints_with_proofs, &request_id) + .await.map_err(|err| { + error!(request_id = %request_id, error = %err, "Failed to save constraints to auctioneer"); + err + })?; } // Log some final info @@ -384,12 +384,17 @@ where async fn save_constraints_to_auctioneer( &self, trace: &mut ConstraintSubmissionTrace, - slot: Slot, - constraint: SignedConstraints, + constraints_with_proofs: SignedConstraintsWithProofData, request_id: &Uuid, ) -> Result<(), ConstraintsApiError> { - let message_with_data = SignedConstraintsWithProofData::try_from(constraint)?; - match self.auctioneer.save_constraints(slot, message_with_data).await { + match self + .auctioneer + .save_constraints( + constraints_with_proofs.signed_constraints.message.slot, + constraints_with_proofs, + ) + .await + { Ok(()) => { trace.auctioneer_update = get_nanos_timestamp()?; info!( From 8a807b9a7909f46125e50fceea3d67b7519780df Mon Sep 17 00:00:00 2001 From: owen Date: Wed, 16 Oct 2024 12:41:50 +0100 Subject: [PATCH 05/39] move trace to compture full request --- crates/api/src/proposer/api.rs | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/crates/api/src/proposer/api.rs b/crates/api/src/proposer/api.rs index 2dc1fcf..8346875 100644 --- a/crates/api/src/proposer/api.rs +++ b/crates/api/src/proposer/api.rs @@ -170,6 +170,10 @@ where return Err(ProposerApiError::EmptyRequest) } + let request_id = Uuid::new_v4(); + let mut trace = + RegisterValidatorsTrace { receive: get_nanos_timestamp()?, ..Default::default() }; + // Get optional api key from headers let api_key = headers.get("x-api-key").and_then(|key| key.to_str().ok()); @@ -227,10 +231,6 @@ where } } - let request_id = Uuid::new_v4(); - let mut trace = - RegisterValidatorsTrace { receive: get_nanos_timestamp()?, ..Default::default() }; - let (head_slot, _) = *proposer_api.curr_slot_info.read().await; let num_registrations = registrations.len(); trace!( @@ -314,7 +314,6 @@ where valid_registrations.push(reg); } } - trace.registrations_complete = get_nanos_timestamp()?; let successful_registrations = valid_registrations.len(); @@ -347,6 +346,8 @@ where } }); + trace.registrations_complete = get_nanos_timestamp()?; + info!( request_id = %request_id, trace = ?trace, From e12423c268ee50a1683038831220d75430eaee6b Mon Sep 17 00:00:00 2001 From: owen Date: Thu, 17 Oct 2024 12:10:59 +0100 Subject: [PATCH 06/39] adds user agent to payloads and reg --- crates/api/src/proposer/api.rs | 24 ++++++++++-- crates/common/src/validator.rs | 8 +++- crates/database/src/mock_database_service.rs | 3 ++ .../V24__adds_ua_for_reg_and_payload.sql | 5 +++ .../src/postgres/postgres_db_row_parsing.rs | 1 + .../src/postgres/postgres_db_service.rs | 29 +++++++++++---- .../src/postgres/postgres_db_service_tests.rs | 37 ++++++++++++++++--- crates/database/src/traits.rs | 3 ++ 8 files changed, 91 insertions(+), 19 deletions(-) create mode 100644 crates/database/src/postgres/migrations/V24__adds_ua_for_reg_and_payload.sql diff --git a/crates/api/src/proposer/api.rs b/crates/api/src/proposer/api.rs index 8346875..b550b4e 100644 --- a/crates/api/src/proposer/api.rs +++ b/crates/api/src/proposer/api.rs @@ -231,6 +231,9 @@ where } } + let user_agent = + headers.get("user-agent").and_then(|v| v.to_str().ok()).map(|v| v.to_string()); + let (head_slot, _) = *proposer_api.curr_slot_info.read().await; let num_registrations = registrations.len(); trace!( @@ -335,7 +338,7 @@ where tokio::spawn(async move { if let Err(err) = proposer_api .db - .save_validator_registrations(valid_registrations_infos, pool_name) + .save_validator_registrations(valid_registrations_infos, pool_name, user_agent) .await { error!( @@ -635,12 +638,15 @@ where /// Implements this API: pub async fn get_payload( Extension(proposer_api): Extension>>, - _headers: HeaderMap, + headers: HeaderMap, req: Request, ) -> Result { let mut trace = GetPayloadTrace { receive: get_nanos_timestamp()?, ..Default::default() }; let request_id = Uuid::new_v4(); + let user_agent = + headers.get("user-agent").and_then(|v| v.to_str().ok()).map(|v| v.to_string()); + let signed_blinded_block: SignedBlindedBeaconBlock = match deserialize_get_payload_bytes(req).await { Ok(signed_block) => signed_block, @@ -671,7 +677,10 @@ where error!(request_id = %request_id, error = %e, "failed to broadcast get payload"); }; - match proposer_api._get_payload(signed_blinded_block, &mut trace, &request_id).await { + match proposer_api + ._get_payload(signed_blinded_block, &mut trace, &request_id, user_agent) + .await + { Ok(get_payload_response) => Ok(axum::Json(get_payload_response)), Err(err) => { // Save error to DB @@ -693,6 +702,7 @@ where mut signed_blinded_block: SignedBlindedBeaconBlock, trace: &mut GetPayloadTrace, request_id: &Uuid, + user_agent: Option, ) -> Result { let block_hash = signed_blinded_block.message().body().execution_payload_header().block_hash().clone(); @@ -910,6 +920,7 @@ where &proposer_public_key, &trace_clone, &request_id_clone, + user_agent, ) .await; }); @@ -945,6 +956,7 @@ where &proposer_public_key, trace, request_id, + user_agent, ) .await; @@ -1265,6 +1277,7 @@ where payload.signed_blinded_beacon_block, &mut trace, &payload.request_id, + None, ) .await { @@ -1349,6 +1362,7 @@ where proposer_public_key: &BlsPublicKey, trace: &GetPayloadTrace, request_id: &Uuid, + user_agent: Option, ) { let bid_trace = match self .auctioneer @@ -1374,7 +1388,9 @@ where let trace = trace.clone(); let request_id = *request_id; tokio::spawn(async move { - if let Err(err) = db.save_delivered_payload(&bid_trace, payload, &trace).await { + if let Err(err) = + db.save_delivered_payload(&bid_trace, payload, &trace, user_agent).await + { error!(request_id = %request_id, error = %err, "error saving payload to database"); } }); diff --git a/crates/common/src/validator.rs b/crates/common/src/validator.rs index a4a9216..bb37d74 100644 --- a/crates/common/src/validator.rs +++ b/crates/common/src/validator.rs @@ -42,14 +42,20 @@ pub struct SignedValidatorRegistrationEntry { pub registration_info: ValidatorRegistrationInfo, pub inserted_at: u64, pub pool_name: Option, + pub user_agent: Option, } impl SignedValidatorRegistrationEntry { - pub fn new(registration_info: ValidatorRegistrationInfo, pool_name: Option) -> Self { + pub fn new( + registration_info: ValidatorRegistrationInfo, + pool_name: Option, + user_agent: Option, + ) -> Self { Self { registration_info, inserted_at: SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_millis() as u64, pool_name, + user_agent, } } diff --git a/crates/database/src/mock_database_service.rs b/crates/database/src/mock_database_service.rs index a963913..b5a8120 100644 --- a/crates/database/src/mock_database_service.rs +++ b/crates/database/src/mock_database_service.rs @@ -50,6 +50,7 @@ impl DatabaseService for MockDatabaseService { &self, _entry: ValidatorRegistrationInfo, _pool_name: Option, + _user_agent: Option, ) -> Result<(), DatabaseError> { Ok(()) } @@ -57,6 +58,7 @@ impl DatabaseService for MockDatabaseService { &self, _entries: Vec, _pool_name: Option, + _user_agent: Option, ) -> Result<(), DatabaseError> { Ok(()) } @@ -140,6 +142,7 @@ impl DatabaseService for MockDatabaseService { _bid_trace: &BidTrace, _payload: Arc, _latency_trace: &GetPayloadTrace, + _user_agent: Option, ) -> Result<(), DatabaseError> { Ok(()) } diff --git a/crates/database/src/postgres/migrations/V24__adds_ua_for_reg_and_payload.sql b/crates/database/src/postgres/migrations/V24__adds_ua_for_reg_and_payload.sql new file mode 100644 index 0000000..fd32954 --- /dev/null +++ b/crates/database/src/postgres/migrations/V24__adds_ua_for_reg_and_payload.sql @@ -0,0 +1,5 @@ +ALTER TABLE validator_registrations +ADD COLUMN "user_agent" varchar; + +ALTER TABLE delivered_payload +ADD COLUMN "user_agent" varchar; \ No newline at end of file diff --git a/crates/database/src/postgres/postgres_db_row_parsing.rs b/crates/database/src/postgres/postgres_db_row_parsing.rs index 73941e3..0a4d2bd 100644 --- a/crates/database/src/postgres/postgres_db_row_parsing.rs +++ b/crates/database/src/postgres/postgres_db_row_parsing.rs @@ -261,6 +261,7 @@ impl FromRow for SignedValidatorRegistrationEntry { row.get::<&str, std::time::SystemTime>("inserted_at"), )?, pool_name: None, + user_agent: None, }) } } diff --git a/crates/database/src/postgres/postgres_db_service.rs b/crates/database/src/postgres/postgres_db_service.rs index d3efccc..c5db6d9 100644 --- a/crates/database/src/postgres/postgres_db_service.rs +++ b/crates/database/src/postgres/postgres_db_service.rs @@ -47,6 +47,7 @@ struct RegistrationParams<'a> { public_key: &'a [u8], signature: &'a [u8], inserted_at: SystemTime, + user_agent: Option, } struct PreferenceParams<'a> { @@ -259,6 +260,7 @@ impl PostgresDatabaseService { public_key: public_key.as_ref(), signature: signature.as_ref(), inserted_at, + user_agent: entry.user_agent.clone(), }); structured_params_for_pref.push(PreferenceParams { @@ -288,13 +290,14 @@ impl PostgresDatabaseService { &tuple.public_key, &tuple.signature, &tuple.inserted_at, + &tuple.user_agent, ] }) .collect(); // Construct the SQL statement with multiple VALUES clauses - let mut sql = String::from("INSERT INTO validator_registrations (fee_recipient, gas_limit, timestamp, public_key, signature, inserted_at) VALUES "); - let num_params_per_row = 6; + let mut sql = String::from("INSERT INTO validator_registrations (fee_recipient, gas_limit, timestamp, public_key, signature, inserted_at, user_agent) VALUES "); + let num_params_per_row = 7; let values_clauses: Vec = (0..params.len() / num_params_per_row) .map(|row| { let placeholders: Vec = (1..=num_params_per_row) @@ -306,7 +309,7 @@ impl PostgresDatabaseService { // Join the values clauses and append them to the SQL statement sql.push_str(&values_clauses.join(", ")); - sql.push_str(" ON CONFLICT (public_key) DO UPDATE SET fee_recipient = excluded.fee_recipient, gas_limit = excluded.gas_limit, timestamp = excluded.timestamp, signature = excluded.signature, inserted_at = excluded.inserted_at"); + sql.push_str(" ON CONFLICT (public_key) DO UPDATE SET fee_recipient = excluded.fee_recipient, gas_limit = excluded.gas_limit, timestamp = excluded.timestamp, signature = excluded.signature, inserted_at = excluded.inserted_at, user_agent = excluded.user_agent"); // Execute the query transaction.execute(&sql, ¶ms[..]).await?; @@ -409,6 +412,7 @@ impl DatabaseService for PostgresDatabaseService { &self, registration_info: ValidatorRegistrationInfo, pool_name: Option, + user_agent: Option, ) -> Result<(), DatabaseError> { let registration = registration_info.registration.message.clone(); @@ -447,8 +451,8 @@ impl DatabaseService for PostgresDatabaseService { match transaction.execute( " - INSERT INTO validator_registrations (fee_recipient, gas_limit, timestamp, public_key, signature, inserted_at) - VALUES ($1, $2, $3, $4, $5,$6) + INSERT INTO validator_registrations (fee_recipient, gas_limit, timestamp, public_key, signature, inserted_at, user_agent) + VALUES ($1, $2, $3, $4, $5,$6,$7) ON CONFLICT (public_key) DO UPDATE SET fee_recipient = excluded.fee_recipient, @@ -464,6 +468,7 @@ impl DatabaseService for PostgresDatabaseService { &(public_key.as_ref()), &(signature.as_ref()), &(inserted_at), + &(user_agent) ], ).await { Ok(_) => { @@ -471,6 +476,7 @@ impl DatabaseService for PostgresDatabaseService { registration_info, inserted_at: inserted_at.duration_since(UNIX_EPOCH).unwrap().as_millis() as u64, pool_name, + user_agent, }); } Err(e) => { @@ -487,6 +493,7 @@ impl DatabaseService for PostgresDatabaseService { &self, mut entries: Vec, pool_name: Option, + user_agent: Option, ) -> Result<(), DatabaseError> { entries.retain(|entry| { if let Some(existing_entry) = @@ -506,7 +513,11 @@ impl DatabaseService for PostgresDatabaseService { .insert(entry.registration.message.public_key.clone()); self.validator_registration_cache.insert( entry.registration.message.public_key.clone(), - SignedValidatorRegistrationEntry::new(entry.clone(), pool_name.clone()), + SignedValidatorRegistrationEntry::new( + entry.clone(), + pool_name.clone(), + user_agent.clone(), + ), ); } @@ -904,6 +915,7 @@ impl DatabaseService for PostgresDatabaseService { bid_trace: &BidTrace, payload: Arc, latency_trace: &GetPayloadTrace, + user_agent: Option, ) -> Result<(), DatabaseError> { let region_id = self.region; let mut client = self.pool.get().await?; @@ -911,9 +923,9 @@ impl DatabaseService for PostgresDatabaseService { transaction.execute( " INSERT INTO delivered_payload - (block_hash, payload_parent_hash, fee_recipient, state_root, receipts_root, logs_bloom, prev_randao, timestamp, block_number, gas_limit, gas_used, extra_data, base_fee_per_gas) + (block_hash, payload_parent_hash, fee_recipient, state_root, receipts_root, logs_bloom, prev_randao, timestamp, block_number, gas_limit, gas_used, extra_data, base_fee_per_gas, user_agent) VALUES - ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13) + ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14) ON CONFLICT (block_hash) DO NOTHING ", @@ -931,6 +943,7 @@ impl DatabaseService for PostgresDatabaseService { &(payload.execution_payload.gas_used() as i32), &(payload.execution_payload.extra_data().as_ref()), &(PostgresNumeric::from(*payload.execution_payload.base_fee_per_gas())), + &(user_agent), ], ).await?; diff --git a/crates/database/src/postgres/postgres_db_service_tests.rs b/crates/database/src/postgres/postgres_db_service_tests.rs index 32f02e5..07c89c3 100644 --- a/crates/database/src/postgres/postgres_db_service_tests.rs +++ b/crates/database/src/postgres/postgres_db_service_tests.rs @@ -127,7 +127,11 @@ mod tests { let registration = get_randomized_signed_validator_registration(); db_service - .save_validator_registration(registration.clone(), Some("test".to_string())) + .save_validator_registration( + registration.clone(), + Some("test".to_string()), + None, + ) .await .unwrap(); sleep(Duration::from_secs(5)).await; @@ -156,7 +160,11 @@ mod tests { .collect::>(); db_service - .save_validator_registrations(registrations.clone(), Some("test".to_string())) + .save_validator_registrations( + registrations.clone(), + Some("test".to_string()), + None, + ) .await .unwrap(); sleep(Duration::from_secs(5)).await; @@ -187,7 +195,11 @@ mod tests { .collect::>(); db_service - .save_validator_registrations(registrations.clone(), Some("test".to_string())) + .save_validator_registrations( + registrations.clone(), + Some("test".to_string()), + None, + ) .await .unwrap(); @@ -227,7 +239,11 @@ mod tests { let registration = get_randomized_signed_validator_registration(); db_service - .save_validator_registration(registration.clone(), Some("test".to_string())) + .save_validator_registration( + registration.clone(), + Some("test".to_string()), + None, + ) .await .unwrap(); @@ -248,7 +264,11 @@ mod tests { for i in 0..10 { let registration = get_randomized_signed_validator_registration(); db_service - .save_validator_registration(registration.clone(), Some("test".to_string())) + .save_validator_registration( + registration.clone(), + Some("test".to_string()), + None, + ) .await .unwrap(); @@ -581,7 +601,12 @@ mod tests { PayloadAndBlobs { execution_payload: execution_payload.clone(), blobs_bundle: None }; db_service - .save_delivered_payload(&bid_trace, Arc::new(payload_and_blobs), &latency_trace) + .save_delivered_payload( + &bid_trace, + Arc::new(payload_and_blobs), + &latency_trace, + None, + ) .await?; Ok(()) } diff --git a/crates/database/src/traits.rs b/crates/database/src/traits.rs index a8220b5..3411678 100644 --- a/crates/database/src/traits.rs +++ b/crates/database/src/traits.rs @@ -34,12 +34,14 @@ pub trait DatabaseService: Send + Sync + Clone { &self, entry: ValidatorRegistrationInfo, pool_name: Option, + user_agent: Option, ) -> Result<(), DatabaseError>; async fn save_validator_registrations( &self, entries: Vec, pool_name: Option, + user_agent: Option, ) -> Result<(), DatabaseError>; async fn is_registration_update_required( @@ -97,6 +99,7 @@ pub trait DatabaseService: Send + Sync + Clone { bid_trace: &BidTrace, payload: Arc, latency_trace: &GetPayloadTrace, + user_agent: Option, ) -> Result<(), DatabaseError>; async fn store_block_submission( From 033d34023a5a5785fa7045daf7b5d6a407fead0d Mon Sep 17 00:00:00 2001 From: owen Date: Thu, 17 Oct 2024 19:54:42 +0100 Subject: [PATCH 07/39] speed up query --- crates/database/src/postgres/postgres_db_service.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/database/src/postgres/postgres_db_service.rs b/crates/database/src/postgres/postgres_db_service.rs index c5db6d9..956b26d 100644 --- a/crates/database/src/postgres/postgres_db_service.rs +++ b/crates/database/src/postgres/postgres_db_service.rs @@ -1361,12 +1361,12 @@ impl DatabaseService for PostgresDatabaseService { block_submission.gas_used gas_used, block_submission.block_number block_number, block_submission.num_txs num_txs - FROM - delivered_payload + FROM + block_submission INNER JOIN - block_submission - ON - block_submission.block_hash = delivered_payload.block_hash + delivered_payload + ON + block_submission.block_number = delivered_payload.block_number and block_submission.block_hash = delivered_payload.block_hash ", ); From 236caac72599e20a19400ffaefbf2437f9eae7c6 Mon Sep 17 00:00:00 2001 From: owen Date: Mon, 21 Oct 2024 14:12:03 +0100 Subject: [PATCH 08/39] dead lock fix --- crates/database/src/postgres/postgres_db_service.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/database/src/postgres/postgres_db_service.rs b/crates/database/src/postgres/postgres_db_service.rs index 956b26d..86ba613 100644 --- a/crates/database/src/postgres/postgres_db_service.rs +++ b/crates/database/src/postgres/postgres_db_service.rs @@ -659,7 +659,7 @@ impl DatabaseService for PostgresDatabaseService { transaction .execute( " - INSERT INTO proposer_duties_archive SELECT * FROM proposer_duties ON CONFLICT (slot_number) DO UPDATE SET public_key = excluded.public_key, validator_index = excluded.validator_index; + INSERT INTO proposer_duties_archive SELECT * FROM proposer_duties order by slot_number ON CONFLICT (slot_number) DO UPDATE SET public_key = excluded.public_key, validator_index = excluded.validator_index; ", &[], ) From 294bf9bfcae2500b949c46746948a864e96acd5a Mon Sep 17 00:00:00 2001 From: owen Date: Tue, 22 Oct 2024 20:59:57 +0100 Subject: [PATCH 09/39] enforce limits on data api --- crates/api/src/relay_data/api.rs | 26 +++++++++++++++++++++++--- crates/api/src/relay_data/error.rs | 8 ++++---- 2 files changed, 27 insertions(+), 7 deletions(-) diff --git a/crates/api/src/relay_data/api.rs b/crates/api/src/relay_data/api.rs index 0f5291d..47f187f 100644 --- a/crates/api/src/relay_data/api.rs +++ b/crates/api/src/relay_data/api.rs @@ -43,12 +43,28 @@ impl DataApi { pub async fn proposer_payload_delivered( Extension(data_api): Extension>>, Extension(cache): Extension>, - Query(params): Query, + Query(mut params): Query, ) -> Result { if params.slot.is_some() && params.cursor.is_some() { return Err(DataApiError::SlotAndCursor) } + if params.limit.is_some() && params.limit.unwrap() > 200 { + return Err(DataApiError::LimitReached{limit: 200}); + } + + if params.limit.is_none() { + params.limit = Some(200); + } + + if params.limit.is_some() && params.limit.unwrap() > 200 { + return Err(DataApiError::LimitReached{limit: 200}); + } + + if params.limit.is_none() { + params.limit = Some(200); + } + let cache_key = format!("{:?}", params); if let Some(cached_result) = cache.get(&cache_key) { @@ -81,7 +97,7 @@ impl DataApi { pub async fn builder_bids_received( Extension(data_api): Extension>>, Extension(cache): Extension>, - Query(params): Query, + Query(mut params): Query, ) -> Result { if params.slot.is_none() && params.block_hash.is_none() && @@ -92,7 +108,11 @@ impl DataApi { } if params.limit.is_some() && params.limit.unwrap() > 500 { - return Err(DataApiError::LimitReached) + return Err(DataApiError::LimitReached{limit: 500}); + } + + if params.limit.is_none() { + params.limit = Some(500); } let cache_key = format!("{:?}", params); diff --git a/crates/api/src/relay_data/error.rs b/crates/api/src/relay_data/error.rs index 64668d3..f3a136c 100644 --- a/crates/api/src/relay_data/error.rs +++ b/crates/api/src/relay_data/error.rs @@ -9,8 +9,8 @@ pub enum DataApiError { SlotAndCursor, #[error("need to query for specific slot or block_hash or block_number or builder_pubkey")] MissingFilter, - #[error("maximum limit is 500")] - LimitReached, + #[error("maximum limit is {limit}")] + LimitReached { limit: u64 }, #[error("internal server error")] InternalServerError, } @@ -26,8 +26,8 @@ impl IntoResponse for DataApiError { "need to query for specific slot or block_hash or block_number or builder_pubkey", ) .into_response(), - DataApiError::LimitReached => { - (StatusCode::BAD_REQUEST, "maximum limit is 500").into_response() + DataApiError::LimitReached{limit} => { + (StatusCode::BAD_REQUEST, format!("maximum limit is {limit}")).into_response() } DataApiError::InternalServerError => { (StatusCode::INTERNAL_SERVER_ERROR, "internal server error").into_response() From 4f85228c3192a5f08c9a954186cf00ae6c4e0286 Mon Sep 17 00:00:00 2001 From: owen Date: Wed, 30 Oct 2024 11:26:42 +0000 Subject: [PATCH 10/39] move header check above proposer check --- crates/api/src/builder/api.rs | 48 +++++++++++++++++------------------ 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/crates/api/src/builder/api.rs b/crates/api/src/builder/api.rs index 27a13cb..bd9dc8f 100644 --- a/crates/api/src/builder/api.rs +++ b/crates/api/src/builder/api.rs @@ -322,6 +322,18 @@ where decode_payload(req, &mut trace, &request_id).await?; let block_hash = payload.message().block_hash.clone(); + // Verify the payload is for the current slot + if payload.slot() <= head_slot { + warn!( + request_id = %request_id, + "submission is for a past slot", + ); + return Err(BuilderApiError::SubmissionForPastSlot { + current_slot: head_slot, + submission_slot: payload.slot(), + }); + } + // Verify that we have a validator connected for this slot if next_duty.is_none() { warn!(request_id = %request_id, "could not find slot duty"); @@ -337,18 +349,6 @@ where "payload decoded", ); - // Verify the payload is for the current slot - if payload.slot() <= head_slot { - warn!( - request_id = %request_id, - "submission is for a past slot", - ); - return Err(BuilderApiError::SubmissionForPastSlot { - current_slot: head_slot, - submission_slot: payload.slot(), - }) - } - // Fetch the next payload attributes and validate basic information let payload_attributes = api .fetch_payload_attributes(payload.slot(), payload.parent_hash(), &request_id) @@ -575,6 +575,18 @@ where decode_header_submission(req, &mut trace, &request_id).await?; let block_hash = payload.block_hash().clone(); + // Verify the payload is for the current slot + if payload.slot() <= head_slot { + warn!( + request_id = %request_id, + "submission is for a past slot", + ); + return Err(BuilderApiError::SubmissionForPastSlot { + current_slot: head_slot, + submission_slot: payload.slot(), + }); + } + // Verify that we have a validator connected for this slot if next_duty.is_none() { warn!(request_id = %request_id, "could not find slot duty"); @@ -590,18 +602,6 @@ where "header submission decoded", ); - // Verify the payload is for the current slot - if payload.slot() <= head_slot { - warn!( - request_id = %request_id, - "submission is for a past slot", - ); - return Err(BuilderApiError::SubmissionForPastSlot { - current_slot: head_slot, - submission_slot: payload.slot(), - }) - } - // Fetch the next payload attributes and validate basic information let payload_attributes = api .fetch_payload_attributes(payload.slot(), payload.parent_hash(), &request_id) From 63d6beba6108c4fd02b90bc35708dbca7bd189d5 Mon Sep 17 00:00:00 2001 From: owen Date: Wed, 30 Oct 2024 20:46:43 +0000 Subject: [PATCH 11/39] store val prefs for slot and use to filter regional data api --- crates/api/src/relay_data/api.rs | 2 +- crates/database/src/mock_database_service.rs | 1 + .../migrations/V25__adds_slot_val_prefs.sql | 8 ++++ .../src/postgres/postgres_db_service.rs | 43 ++++++++++++++++++- .../src/postgres/postgres_db_service_tests.rs | 3 +- crates/database/src/traits.rs | 1 + 6 files changed, 55 insertions(+), 3 deletions(-) create mode 100644 crates/database/src/postgres/migrations/V25__adds_slot_val_prefs.sql diff --git a/crates/api/src/relay_data/api.rs b/crates/api/src/relay_data/api.rs index 47f187f..59a0589 100644 --- a/crates/api/src/relay_data/api.rs +++ b/crates/api/src/relay_data/api.rs @@ -121,7 +121,7 @@ impl DataApi { return Ok(Json(cached_result)) } - match data_api.db.get_bids(¶ms.into()).await { + match data_api.db.get_bids(¶ms.into(), data_api.validator_preferences.clone()).await { Ok(result) => { let response = result.into_iter().map(|b| b.into()).collect::>(); diff --git a/crates/database/src/mock_database_service.rs b/crates/database/src/mock_database_service.rs index b5a8120..6630622 100644 --- a/crates/database/src/mock_database_service.rs +++ b/crates/database/src/mock_database_service.rs @@ -210,6 +210,7 @@ impl DatabaseService for MockDatabaseService { async fn get_bids( &self, _filters: &BidFilters, + _validator_preferences: Arc, ) -> Result, DatabaseError> { let mut bid = BidSubmissionDocument::default(); bid.bid_trace.value = U256::from(1000); diff --git a/crates/database/src/postgres/migrations/V25__adds_slot_val_prefs.sql b/crates/database/src/postgres/migrations/V25__adds_slot_val_prefs.sql new file mode 100644 index 0000000..fa656df --- /dev/null +++ b/crates/database/src/postgres/migrations/V25__adds_slot_val_prefs.sql @@ -0,0 +1,8 @@ +CREATE TABLE slot_preferences ( + "slot_number" integer PRIMARY KEY, + "proposer_pubkey" bytea, + "filtering" smallint, + "trusted_builders" varchar[], + "header_delay" boolean, + "gossip_blobs" boolean, +); \ No newline at end of file diff --git a/crates/database/src/postgres/postgres_db_service.rs b/crates/database/src/postgres/postgres_db_service.rs index 86ba613..c571847 100644 --- a/crates/database/src/postgres/postgres_db_service.rs +++ b/crates/database/src/postgres/postgres_db_service.rs @@ -1130,6 +1130,20 @@ impl DatabaseService for PostgresDatabaseService { ], ).await?; + transaction.execute( + " + INSERT INTO slot_preferences (slot_number, proposer_pubkey, filtering, trusted_builders, header_delay, gossip_blobs) + SELECT $1::bytea, $2, filtering, trusted_builders, header_delay, gossip_blobs + FROM validator_preferences + WHERE public_key = $1::bytea + ON CONFLICT (slot_number) DO NOTHING; + ", + &[ + &(submission.slot() as i32), + &(submission.proposer_public_key().as_ref()), + ], + ).await?; + transaction.commit().await?; Ok(()) @@ -1279,6 +1293,7 @@ impl DatabaseService for PostgresDatabaseService { async fn get_bids( &self, filters: &BidFilters, + validator_preferences: Arc, ) -> Result, DatabaseError> { let filters = PgBidFilters::from(filters); @@ -1300,9 +1315,26 @@ impl DatabaseService for PostgresDatabaseService { block_submission LEFT JOIN header_submission ON block_submission.block_hash = header_submission.block_hash - WHERE 1 = 1 "); + let filtering = match validator_preferences.filtering { + Filtering::Regional => Some(1_i16), + Filtering::Global => None, + }; + + if filtering.is_some() { + query.push_str( + " + LEFT JOIN + slot_preferences + ON + block_submission.slot_number = slot_preferences.slot_number + ", + ); + } + + query.push_str(" WHERE 1 = 1"); + let mut param_index = 1; let mut params: Vec> = Vec::new(); @@ -1335,6 +1367,15 @@ impl DatabaseService for PostgresDatabaseService { params.push(Box::new(block_hash)); } + if let Some(filtering) = filtering { + query.push_str(&format!( + " AND (slot_preferences.filtering = ${} OR slot_preferences.filtering IS NULL)", + param_index + )); + params.push(Box::new(filtering)); + param_index += 1; + } + let params_refs: Vec<&(dyn ToSql + Sync)> = params.iter().map(|p| &**p as &(dyn ToSql + Sync)).collect(); diff --git a/crates/database/src/postgres/postgres_db_service_tests.rs b/crates/database/src/postgres/postgres_db_service_tests.rs index 07c89c3..d04682c 100644 --- a/crates/database/src/postgres/postgres_db_service_tests.rs +++ b/crates/database/src/postgres/postgres_db_service_tests.rs @@ -532,7 +532,8 @@ mod tests { builder_pubkey: None, order_by: None, }; - let bids = db_service.get_bids(&filter).await?; + let validator_preferences = ValidatorPreferences::default(); + let bids = db_service.get_bids(&filter, Arc::new(validator_preferences)).await?; println!("Bids: {:?}", bids); Ok(()) } diff --git a/crates/database/src/traits.rs b/crates/database/src/traits.rs index 3411678..c9db776 100644 --- a/crates/database/src/traits.rs +++ b/crates/database/src/traits.rs @@ -145,6 +145,7 @@ pub trait DatabaseService: Send + Sync + Clone { async fn get_bids( &self, filters: &BidFilters, + validator_preferences: Arc, ) -> Result, DatabaseError>; async fn get_delivered_payloads( From 574da668c0856e80885342ebe4813b49e3a9d952 Mon Sep 17 00:00:00 2001 From: owen Date: Tue, 3 Dec 2024 20:57:27 +0000 Subject: [PATCH 12/39] adds multi simulator plus minor fixes --- Cargo.lock | 101 +++++++ Cargo.toml | 4 +- crates/api/src/builder/api.rs | 123 +++++--- crates/api/src/builder/simulator/mod.rs | 1 + .../src/builder/simulator/multi_simulator.rs | 64 ++++ .../builder/simulator/optimistic_simulator.rs | 46 ++- crates/api/src/builder/tests.rs | 2 +- crates/api/src/gossiper/error.rs | 3 + crates/api/src/gossiper/grpc_gossiper.rs | 56 +++- crates/api/src/proposer/api.rs | 280 +++++++++--------- crates/api/src/proposer/error.rs | 6 + crates/api/src/router.rs | 25 +- crates/api/src/service.rs | 22 +- crates/beacon-client/src/beacon_client.rs | 2 +- crates/common/src/config.rs | 10 +- .../migrations/V25__adds_slot_val_prefs.sql | 4 +- .../src/postgres/postgres_db_service.rs | 19 +- crates/housekeeper/src/housekeeper.rs | 4 +- 18 files changed, 514 insertions(+), 258 deletions(-) create mode 100644 crates/api/src/builder/simulator/multi_simulator.rs diff --git a/Cargo.lock b/Cargo.lock index d4ce4ea..1000b36 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -58,6 +58,21 @@ dependencies = [ "memchr", ] +[[package]] +name = "alloc-no-stdlib" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" + +[[package]] +name = "alloc-stdlib" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" +dependencies = [ + "alloc-no-stdlib", +] + [[package]] name = "allocator-api2" version = "0.2.16" @@ -454,6 +469,22 @@ dependencies = [ "serde_json", ] +[[package]] +name = "async-compression" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd066d0b4ef8ecb03a55319dc13aa6910616d0f44008a045bb1835af830abff5" +dependencies = [ + "brotli", + "flate2", + "futures-core", + "memchr", + "pin-project-lite", + "tokio", + "zstd 0.13.0", + "zstd-safe 7.0.0", +] + [[package]] name = "async-stream" version = "0.3.5" @@ -805,6 +836,27 @@ dependencies = [ "zeroize", ] +[[package]] +name = "brotli" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74f7971dbd9326d58187408ab83117d8ac1bb9c17b085fdacd1cf2f598719b6b" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", + "brotli-decompressor", +] + +[[package]] +name = "brotli-decompressor" +version = "4.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a45bd2e4095a8b518033b128020dd4a55aab1c0a381ba4404a472630f4bc362" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", +] + [[package]] name = "bs58" version = "0.4.0" @@ -3074,6 +3126,12 @@ dependencies = [ "pin-project-lite", ] +[[package]] +name = "http-range-header" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08a397c49fec283e3d6211adbe480be95aae5f304cfb923e9970e08956d5168a" + [[package]] name = "httparse" version = "1.8.0" @@ -3346,6 +3404,16 @@ version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" +[[package]] +name = "iri-string" +version = "0.7.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc0f0a572e8ffe56e2ff4f769f32ffe919282c3916799f8b68688b6030063bea" +dependencies = [ + "memchr", + "serde", +] + [[package]] name = "is-terminal" version = "0.4.9" @@ -6897,14 +6965,29 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0da193277a4e2c33e59e09b5861580c33dd0a637c3883d0fa74ba40c0374af2e" dependencies = [ + "async-compression", + "base64 0.21.2", "bitflags 2.4.1", "bytes 1.5.0", + "futures-core", + "futures-util", "http 1.0.0", "http-body 1.0.0", "http-body-util", + "http-range-header", + "httpdate", + "iri-string", + "mime", + "mime_guess", + "percent-encoding", "pin-project-lite", + "tokio", + "tokio-util", + "tower", "tower-layer", "tower-service", + "tracing", + "uuid 1.7.0", ] [[package]] @@ -7757,6 +7840,15 @@ dependencies = [ "zstd-safe 6.0.6", ] +[[package]] +name = "zstd" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bffb3309596d527cfcba7dfc6ed6052f1d39dfbd7c867aa2e865e4a449c10110" +dependencies = [ + "zstd-safe 7.0.0", +] + [[package]] name = "zstd-safe" version = "5.0.2+zstd.1.5.2" @@ -7777,6 +7869,15 @@ dependencies = [ "zstd-sys", ] +[[package]] +name = "zstd-safe" +version = "7.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43747c7422e2924c11144d5229878b98180ef8b06cca4ab5af37afc8a8d8ea3e" +dependencies = [ + "zstd-sys", +] + [[package]] name = "zstd-sys" version = "2.0.8+zstd.1.5.5" diff --git a/Cargo.toml b/Cargo.toml index d352c80..7bd8200 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -44,8 +44,8 @@ reqwest = { version = "0.11.23", features = [ "blocking", ] } tokio = { version = "1.33.0", features = ["full"] } -tokio-stream = { version = "0.1.15", features = ["sync"] } -tower-http = { version = "0.5.1", features = ["limit"] } +tokio-stream = {version = "0.1.15", features = ["sync"]} +tower-http = { version = "0.5.1", features = ["full"] } url = "2.4" # Serialization and Data Format diff --git a/crates/api/src/builder/api.rs b/crates/api/src/builder/api.rs index bd9dc8f..ab8166d 100644 --- a/crates/api/src/builder/api.rs +++ b/crates/api/src/builder/api.rs @@ -324,8 +324,9 @@ where // Verify the payload is for the current slot if payload.slot() <= head_slot { - warn!( + debug!( request_id = %request_id, + block_hash = ?block_hash, "submission is for a past slot", ); return Err(BuilderApiError::SubmissionForPastSlot { @@ -336,13 +337,18 @@ where // Verify that we have a validator connected for this slot if next_duty.is_none() { - warn!(request_id = %request_id, "could not find slot duty"); - return Err(BuilderApiError::ProposerDutyNotFound) + warn!( + request_id = %request_id, + block_hash = ?block_hash, + "could not find slot duty" + ); + return Err(BuilderApiError::ProposerDutyNotFound); } let next_duty = next_duty.unwrap(); - debug!( + info!( request_id = %request_id, + event = "submit_block", builder_pub_key = ?payload.builder_public_key(), block_value = %payload.value(), block_hash = ?block_hash, @@ -351,7 +357,7 @@ where // Fetch the next payload attributes and validate basic information let payload_attributes = api - .fetch_payload_attributes(payload.slot(), payload.parent_hash(), &request_id) + .fetch_payload_attributes(payload.slot(), payload.parent_hash(), &block_hash, &request_id) .await?; // Handle duplicates. @@ -402,7 +408,7 @@ where // Handle trusted builders check if !api.check_if_trusted_builder(&next_duty, &builder_info).await { let proposer_trusted_builders = next_duty.entry.preferences.trusted_builders.unwrap(); - warn!( + debug!( request_id = %request_id, builder_pub_key = ?payload.builder_public_key(), proposer_trusted_builders = ?proposer_trusted_builders, @@ -417,8 +423,8 @@ where match api.auctioneer.get_last_slot_delivered().await { Ok(Some(slot)) => { if payload.slot() <= slot { - warn!(request_id = %request_id, "payload already delivered"); - return Err(BuilderApiError::PayloadAlreadyDelivered) + debug!(request_id = %request_id, "payload already delivered"); + return Err(BuilderApiError::PayloadAlreadyDelivered); } } Ok(None) => {} @@ -522,7 +528,7 @@ where // Log some final info trace.request_finish = get_nanos_timestamp()?; - info!( + debug!( request_id = %request_id, trace = ?trace, request_duration_ns = trace.request_finish.saturating_sub(trace.receive), @@ -577,8 +583,9 @@ where // Verify the payload is for the current slot if payload.slot() <= head_slot { - warn!( + debug!( request_id = %request_id, + block_hash = ?block_hash, "submission is for a past slot", ); return Err(BuilderApiError::SubmissionForPastSlot { @@ -589,13 +596,18 @@ where // Verify that we have a validator connected for this slot if next_duty.is_none() { - warn!(request_id = %request_id, "could not find slot duty"); - return Err(BuilderApiError::ProposerDutyNotFound) + warn!( + request_id = %request_id, + block_hash = ?block_hash, + "could not find slot duty" + ); + return Err(BuilderApiError::ProposerDutyNotFound); } let next_duty = next_duty.unwrap(); - debug!( + info!( request_id = %request_id, + event = "submit_header", builder_pub_key = ?payload.builder_public_key(), block_value = %payload.value(), block_hash = ?block_hash, @@ -604,7 +616,7 @@ where // Fetch the next payload attributes and validate basic information let payload_attributes = api - .fetch_payload_attributes(payload.slot(), payload.parent_hash(), &request_id) + .fetch_payload_attributes(payload.slot(), payload.parent_hash(), &block_hash, &request_id) .await?; // Fetch builder info @@ -666,7 +678,7 @@ where // Handle trusted builders check if !api.check_if_trusted_builder(&next_duty, &builder_info).await { let proposer_trusted_builders = next_duty.entry.preferences.trusted_builders.unwrap(); - warn!( + debug!( request_id = %request_id, builder_pub_key = ?payload.builder_public_key(), proposer_trusted_builders = ?proposer_trusted_builders, @@ -690,8 +702,8 @@ where match api.auctioneer.get_last_slot_delivered().await { Ok(Some(slot)) => { if payload.slot() <= slot { - warn!(request_id = %request_id, "payload already delivered"); - return Err(BuilderApiError::PayloadAlreadyDelivered) + debug!(request_id = %request_id, "payload already delivered"); + return Err(BuilderApiError::PayloadAlreadyDelivered); } } Ok(None) => {} @@ -808,8 +820,9 @@ where let builder_pub_key = payload.builder_public_key().clone(); let block_hash = payload.message().block_hash.clone(); - debug!( + info!( request_id = %request_id, + event = "submit_block_v2", builder_pub_key = ?builder_pub_key, block_value = %payload.value(), block_hash = ?payload.block_hash(), @@ -831,8 +844,9 @@ where // Verify the payload is for the current slot if payload.slot() <= head_slot { - warn!( + debug!( request_id = %request_id, + block_hash = ?block_hash, "submission is for a past slot", ); return Err(BuilderApiError::SubmissionForPastSlot { @@ -845,14 +859,18 @@ where // Note: in `submit_block_v2` we have to do this check after decoding // so we can send a `PayloadReceived` message. if next_duty.is_none() { - warn!(request_id = %request_id, "could not find slot duty"); - return Err(BuilderApiError::ProposerDutyNotFound) + warn!( + request_id = %request_id, + block_hash = ?block_hash, + "could not find slot duty" + ); + return Err(BuilderApiError::ProposerDutyNotFound); } let next_duty = next_duty.unwrap(); // Fetch the next payload attributes and validate basic information let payload_attributes = api - .fetch_payload_attributes(payload.slot(), payload.parent_hash(), &request_id) + .fetch_payload_attributes(payload.slot(), payload.parent_hash(), &block_hash, &request_id) .await?; // Fetch builder info @@ -889,8 +907,8 @@ where match api.auctioneer.get_last_slot_delivered().await { Ok(Some(slot)) => { if payload.slot() <= slot { - warn!(request_id = %request_id, "payload already delivered"); - return Err(BuilderApiError::PayloadAlreadyDelivered) + debug!(request_id = %request_id, "payload already delivered"); + return Err(BuilderApiError::PayloadAlreadyDelivered); } } Ok(None) => {} @@ -968,7 +986,7 @@ where // Log some final info trace.request_finish = get_nanos_timestamp()?; - info!( + debug!( request_id = %request_id, trace = ?trace, request_duration_ns = trace.request_finish.saturating_sub(trace.receive), @@ -1012,7 +1030,7 @@ where // Verify the cancellation is for the current slot if slot <= head_slot { - warn!( + debug!( request_id = %request_id, "cancellation is for a past slot", ); @@ -1028,8 +1046,8 @@ where match api.auctioneer.get_last_slot_delivered().await { Ok(Some(del_slot)) => { if slot <= del_slot { - warn!(request_id = %request_id, "payload already delivered"); - return Err(BuilderApiError::PayloadAlreadyDelivered) + debug!(request_id = %request_id, "payload already delivered"); + return Err(BuilderApiError::PayloadAlreadyDelivered); } } Ok(None) => {} @@ -1093,7 +1111,7 @@ where { pub async fn process_gossiped_header(&self, req: BroadcastHeaderParams) { let request_id = Uuid::new_v4(); - info!( + debug!( request_id = %request_id, block_hash = ?req.signed_builder_bid.block_hash(), "received gossiped header", @@ -1108,7 +1126,7 @@ where // Verify that the gossiped header is not for a past slot let (head_slot, _) = self.curr_slot_info.read().await.clone(); if req.slot <= head_slot { - warn!( + debug!( request_id = %request_id, "received gossiped header for a past slot", ); @@ -1134,8 +1152,8 @@ where match self.auctioneer.get_last_slot_delivered().await { Ok(Some(slot)) => { if req.slot <= slot { - warn!(request_id = %request_id, "payload already delivered"); - return + debug!(request_id = %request_id, "payload already delivered"); + return; } } Ok(None) => {} @@ -1186,7 +1204,7 @@ where trace.auctioneer_update = get_nanos_timestamp().unwrap_or_default(); - info!(request_id = %request_id, "succesfully saved gossiped header"); + debug!(request_id = %request_id, "succesfully saved gossiped header"); // Save latency trace to db let db = self.db.clone(); @@ -1205,7 +1223,7 @@ where pub async fn process_gossiped_payload(&self, req: BroadcastPayloadParams) { let request_id = Uuid::new_v4(); - info!( + debug!( request_id = %request_id, block_hash = ?req.execution_payload.execution_payload.block_hash(), "received gossiped payload", @@ -1233,7 +1251,7 @@ where // Verify that the gossiped payload is not for a past slot let (head_slot, _) = self.curr_slot_info.read().await.clone(); if req.slot <= head_slot { - warn!( + debug!( request_id = %request_id, "received gossiped payload for a past slot", ); @@ -1244,8 +1262,8 @@ where match self.auctioneer.get_last_slot_delivered().await { Ok(Some(slot)) => { if req.slot <= slot { - warn!(request_id = %request_id, "payload already delivered"); - return + debug!(request_id = %request_id, "payload already delivered"); + return; } } Ok(None) => {} @@ -1273,7 +1291,7 @@ where trace.auctioneer_update = get_nanos_timestamp().unwrap_or_default(); - info!(request_id = %request_id, "succesfully saved gossiped payload"); + debug!(request_id = %request_id, "succesfully saved gossiped payload"); // Save gossiped payload trace to db let db = self.db.clone(); @@ -1296,7 +1314,7 @@ where /// Processes a gossiped cancellation message. No need to verify the signature as the message /// is gossiped internally and verification has been performed upstream. pub async fn process_gossiped_cancellation(&self, req: BroadcastCancellationParams) { - info!( + debug!( request_id = %req.request_id, "received gossiped cancellation", ); @@ -1317,7 +1335,7 @@ where match self.auctioneer.get_last_slot_delivered().await { Ok(Some(del_slot)) => { if slot <= del_slot { - warn!(request_id = %req.request_id, "payload already delivered"); + debug!(request_id = %req.request_id, "payload already delivered"); } } Ok(None) => {} @@ -1508,7 +1526,7 @@ where { Ok(false) => Ok(()), Ok(true) => { - warn!(request_id = %request_id, block_hash = ?block_hash, "duplicate block hash"); + debug!(request_id = %request_id, block_hash = ?block_hash, "duplicate block hash"); Err(BuilderApiError::DuplicateBlockHash { block_hash: block_hash.clone() }) } Err(err) => { @@ -1664,7 +1682,7 @@ where Ok(top_bid_value) => { let top_bid_value = top_bid_value.unwrap_or(U256::ZERO); is_top_bid = payload.value() > top_bid_value; - info!(request_id = %request_id, top_bid_value = ?top_bid_value, new_bid_is_top_bid = is_top_bid); + debug!(request_id = %request_id, top_bid_value = ?top_bid_value, new_bid_is_top_bid = is_top_bid); } Err(err) => { error!(request_id = %request_id, error = %err, "failed to get top bid value from auctioneer"); @@ -1692,7 +1710,7 @@ where match result { Ok(sim_optimistic) => { - info!(request_id = %request_id, "block simulation successful"); + debug!(request_id = %request_id, "block simulation successful"); trace.simulation = get_nanos_timestamp()?; debug!(request_id = %request_id, sim_latency = trace.simulation.saturating_sub(trace.signature)); @@ -1819,13 +1837,18 @@ where &self, slot: u64, parent_hash: &Hash32, + block_hash: &Hash32, request_id: &Uuid, ) -> Result { let payload_attributes_key = get_payload_attributes_key(parent_hash, slot); let payload_attributes = self.payload_attributes.read().await.get(&payload_attributes_key).cloned().ok_or_else( || { - warn!(request_id = %request_id, "payload attributes not yet known"); + warn!( + request_id = %request_id, + block_hash = ?block_hash, + "payload attributes not yet known" + ); BuilderApiError::PayloadAttributesNotYetKnown }, )?; @@ -2042,7 +2065,7 @@ where /// Updates the next proposer duty and prepares the get_validators() response. async fn handle_new_slot(&self, slot_update: SlotUpdate) { let epoch = slot_update.slot / SLOTS_PER_EPOCH; - debug!( + info!( epoch = epoch, slot_head = slot_update.slot, slot_start_next_epoch = (epoch + 1) * SLOTS_PER_EPOCH, @@ -2072,9 +2095,11 @@ where return } - debug!( + info!( + slot = payload_attributes.slot, randao = ?payload_attributes.payload_attributes.prev_randao, timestamp = payload_attributes.payload_attributes.timestamp, + "updated payload attributes", ); // Discard payload attributes if already known @@ -2171,7 +2196,7 @@ pub async fn decode_payload( }; trace.decode = get_nanos_timestamp()?; - info!( + debug!( request_id = %request_id, timestamp_after_decoding = trace.decode, decode_latency_ns = trace.decode.saturating_sub(trace.receive), @@ -2323,7 +2348,7 @@ pub async fn decode_header_submission( }; trace.decode = get_nanos_timestamp()?; - info!( + debug!( request_id = %request_id, timestamp_after_decoding = Instant::now().elapsed().as_nanos(), decode_latency_ns = trace.decode.saturating_sub(trace.receive), @@ -2451,7 +2476,7 @@ fn log_save_bid_info( bid_update_finish: u64, request_id: &Uuid, ) { - info!( + debug!( request_id = %request_id, bid_update_latency = bid_update_finish.saturating_sub(bid_update_start), was_bid_saved_in = update_bid_result.was_bid_saved, diff --git a/crates/api/src/builder/simulator/mod.rs b/crates/api/src/builder/simulator/mod.rs index d6dac97..92c1ba0 100644 --- a/crates/api/src/builder/simulator/mod.rs +++ b/crates/api/src/builder/simulator/mod.rs @@ -1,5 +1,6 @@ pub mod mock_simulator; pub mod optimistic_simulator; +pub mod multi_simulator; mod optimistic_simulator_tests; pub mod rpc_simulator; pub mod traits; diff --git a/crates/api/src/builder/simulator/multi_simulator.rs b/crates/api/src/builder/simulator/multi_simulator.rs new file mode 100644 index 0000000..7e9f3b9 --- /dev/null +++ b/crates/api/src/builder/simulator/multi_simulator.rs @@ -0,0 +1,64 @@ +use axum::async_trait; +use helix_common::{simulator::BlockSimError, BuilderInfo}; +use tokio::sync::mpsc::Sender; +use uuid::Uuid; +use std::sync::{ + atomic::{AtomicUsize, Ordering}, + Arc, +}; + +use crate::builder::DbInfo; + +use super::{traits::BlockSimulator, BlockSimRequest}; + +#[derive(Clone)] +pub struct MultiSimulator { + pub simulators: Vec, + next_index: Arc, +} + +impl MultiSimulator { + pub fn new(simulators: Vec) -> Self { + Self { + simulators, + next_index: Arc::new(AtomicUsize::new(0)), + } + } + + pub fn clone_for_async(&self) -> Self { + self.clone() + } +} + +#[async_trait] +impl BlockSimulator for MultiSimulator { + async fn process_request( + &self, + request: BlockSimRequest, + builder_info: &BuilderInfo, + is_top_bid: bool, + sim_result_saver_sender: Sender, + request_id: Uuid, + ) -> Result { + // Load balancing: round-robin selection + let index = self + .next_index + .fetch_update(Ordering::SeqCst, Ordering::SeqCst, |x| { + Some((x + 1) % self.simulators.len()) + }) + .unwrap_or(0); + + let simulator = &self.simulators[index]; + + // Process the request with the selected simulator + simulator + .process_request( + request, + builder_info, + is_top_bid, + sim_result_saver_sender, + request_id, + ) + .await + } +} diff --git a/crates/api/src/builder/simulator/optimistic_simulator.rs b/crates/api/src/builder/simulator/optimistic_simulator.rs index fe4f158..aa99811 100644 --- a/crates/api/src/builder/simulator/optimistic_simulator.rs +++ b/crates/api/src/builder/simulator/optimistic_simulator.rs @@ -4,7 +4,7 @@ use async_trait::async_trait; use ethereum_consensus::primitives::{BlsPublicKey, Hash32}; use reqwest::Client; use tokio::sync::{mpsc::Sender, RwLock}; -use tracing::{error, info, warn}; +use tracing::{debug, error, info, warn}; use uuid::Uuid; use helix_common::{simulator::BlockSimError, BuilderInfo}; @@ -71,32 +71,20 @@ impl OptimisticSimulator ) .await { - if let BlockSimError::BlockValidationFailed(_) = err { - if builder_info.is_optimistic { - if err.is_severe() { - warn!( - request_id=%request_id, - builder=%request.message.builder_public_key, - block_hash=%request.execution_payload.block_hash(), - err=%err, - "Block simulation resulted in an error. Demoting builder...", - ); - self.demote_builder_due_to_error( - &request.message.builder_public_key, - request.execution_payload.block_hash(), - err.to_string(), - ) - .await; - } else { - warn!( - request_id=%request_id, - builder=%request.message.builder_public_key, - block_hash=%request.execution_payload.block_hash(), - err=%err, - "Block simulation resulted in a non-severe error. NOT demoting builder...", - ); - } - } + if builder_info.is_optimistic { + warn!( + request_id=%request_id, + builder=%request.message.builder_public_key, + block_hash=%request.execution_payload.block_hash(), + err=%err, + "Block simulation resulted in an error. Demoting builder...", + ); + self.demote_builder_due_to_error( + &request.message.builder_public_key, + request.execution_payload.block_hash(), + err.to_string(), + ) + .await; } return Err(err) } @@ -169,7 +157,7 @@ impl BlockSimulator for OptimisticSimulator< request_id: Uuid, ) -> Result { if self.should_process_optimistically(&request, builder_info).await { - info!( + debug!( request_id=%request_id, block_hash=%request.execution_payload.block_hash(), "optimistically processing request" @@ -191,7 +179,7 @@ impl BlockSimulator for OptimisticSimulator< Ok(true) } else { - info!( + debug!( request_id=%request_id, block_hash=?request.execution_payload.block_hash(), block_parent_hash=?request.execution_payload.parent_hash(), diff --git a/crates/api/src/builder/tests.rs b/crates/api/src/builder/tests.rs index d9f1d4e..073fe74 100644 --- a/crates/api/src/builder/tests.rs +++ b/crates/api/src/builder/tests.rs @@ -14,7 +14,7 @@ use core::panic; use ethereum_consensus::{ builder::{SignedValidatorRegistration, ValidatorRegistration}, configs::mainnet::CAPELLA_FORK_EPOCH, - deneb::Withdrawal, + deneb::Transaction, Withdrawal, phase0::mainnet::SLOTS_PER_EPOCH, primitives::{BlsPublicKey, BlsSignature}, ssz::{self, prelude::*}, diff --git a/crates/api/src/gossiper/error.rs b/crates/api/src/gossiper/error.rs index 9cf54d0..26b0356 100644 --- a/crates/api/src/gossiper/error.rs +++ b/crates/api/src/gossiper/error.rs @@ -9,4 +9,7 @@ pub enum GossipError { #[error("Failed to reconnect")] ReconnectFailed, // Add other error common as needed + + #[error("Broadcast timed out")] + TimeoutError, } diff --git a/crates/api/src/gossiper/grpc_gossiper.rs b/crates/api/src/gossiper/grpc_gossiper.rs index 4ff7e6e..20d2914 100644 --- a/crates/api/src/gossiper/grpc_gossiper.rs +++ b/crates/api/src/gossiper/grpc_gossiper.rs @@ -72,13 +72,21 @@ impl GrpcGossiperClient { }; if let Some(mut client) = client { - if let Err(err) = client.broadcast_header(request).await { - return Err(GossipError::BroadcastError(err)) + let result = tokio::time::timeout(Duration::from_secs(5), client.broadcast_header(request)).await; + match result { + Ok(Ok(_)) => Ok(()), + Ok(Err(err)) => { + error!(err = %err, "Client call failed."); + return Err(GossipError::BroadcastError(err)); + }, + Err(_) => { + error!("Client call timed out."); + return Err(GossipError::TimeoutError); + }, } } else { return Err(GossipError::ClientNotConnected) } - Ok(()) } pub async fn broadcast_payload( @@ -92,13 +100,21 @@ impl GrpcGossiperClient { }; if let Some(mut client) = client { - if let Err(err) = client.broadcast_payload(request).await { - return Err(GossipError::BroadcastError(err)) + let result = tokio::time::timeout(Duration::from_secs(5), client.broadcast_payload(request)).await; + match result { + Ok(Ok(_)) => Ok(()), + Ok(Err(err)) => { + error!(err = %err, "Client call failed."); + return Err(GossipError::BroadcastError(err)); + }, + Err(_) => { + error!("Client call timed out."); + return Err(GossipError::TimeoutError); + }, } } else { return Err(GossipError::ClientNotConnected) } - Ok(()) } pub async fn broadcast_get_payload( @@ -112,13 +128,21 @@ impl GrpcGossiperClient { }; if let Some(mut client) = client { - if let Err(err) = client.broadcast_get_payload(request).await { - return Err(GossipError::BroadcastError(err)) + let result = tokio::time::timeout(Duration::from_secs(5), client.broadcast_get_payload(request)).await; + match result { + Ok(Ok(_)) => Ok(()), + Ok(Err(err)) => { + error!(err = %err, "Client call failed."); + return Err(GossipError::BroadcastError(err)); + }, + Err(_) => { + error!("Client call timed out."); + return Err(GossipError::TimeoutError); + }, } } else { return Err(GossipError::ClientNotConnected) } - Ok(()) } pub async fn broadcast_cancellation( @@ -132,13 +156,21 @@ impl GrpcGossiperClient { }; if let Some(mut client) = client { - if let Err(err) = client.broadcast_cancellation(request).await { - return Err(GossipError::BroadcastError(err)) + let result = tokio::time::timeout(Duration::from_secs(5), client.broadcast_cancellation(request)).await; + match result { + Ok(Ok(_)) => Ok(()), + Ok(Err(err)) => { + error!(err = %err, "Client call failed."); + return Err(GossipError::BroadcastError(err)); + }, + Err(_) => { + error!("Client call timed out."); + return Err(GossipError::TimeoutError); + }, } } else { return Err(GossipError::ClientNotConnected) } - Ok(()) } } diff --git a/crates/api/src/proposer/api.rs b/crates/api/src/proposer/api.rs index b550b4e..1b61efc 100644 --- a/crates/api/src/proposer/api.rs +++ b/crates/api/src/proposer/api.rs @@ -23,10 +23,10 @@ use ethereum_consensus::{ }, }; +use reth_primitives::kzg; use tokio::{ sync::{ - mpsc::{self, error::SendError, Receiver, Sender}, - RwLock, + mpsc::{self, error::SendError, Receiver, Sender}, oneshot, RwLock }, time::{sleep, Instant}, }; @@ -170,7 +170,11 @@ where return Err(ProposerApiError::EmptyRequest) } - let request_id = Uuid::new_v4(); + let request_id = Uuid::parse_str(headers + .get("x-request-id") + .map(|v| v.to_str().unwrap_or_default()) + .unwrap_or_default()).unwrap(); + let mut trace = RegisterValidatorsTrace { receive: get_nanos_timestamp()?, ..Default::default() }; @@ -320,22 +324,23 @@ where let successful_registrations = valid_registrations.len(); - // Add validator preferences to each registration - let mut valid_registrations_infos = Vec::new(); + // Bulk write registrations to db + tokio::spawn(async move { - for reg in valid_registrations { - let mut preferences = validator_preferences.clone(); + // Add validator preferences to each registration + let mut valid_registrations_infos = Vec::new(); - if proposer_api.auctioneer.is_primev_proposer(®.message.public_key).await? { - preferences.trusted_builders = Some(vec!["PrimevBuilder".to_string()]); - } + for reg in valid_registrations { + let mut preferences = validator_preferences.clone(); - valid_registrations_infos - .push(ValidatorRegistrationInfo { registration: reg, preferences }); - } + if proposer_api.auctioneer.is_primev_proposer(®.message.public_key).await.unwrap_or_default() { + preferences.trusted_builders = Some(vec!["PrimevBuilder".to_string()]); + } + + valid_registrations_infos + .push(ValidatorRegistrationInfo { registration: reg, preferences }); + } - // Bulk write registrations to db - tokio::spawn(async move { if let Err(err) = proposer_api .db .save_validator_registrations(valid_registrations_infos, pool_name, user_agent) @@ -380,7 +385,11 @@ where return Err(ProposerApiError::ServiceUnavailableError) } - let request_id = Uuid::new_v4(); + let request_id = Uuid::parse_str(headers + .get("x-request-id") + .map(|v| v.to_str().unwrap_or_default()) + .unwrap_or_default()).unwrap(); + let mut trace = GetHeaderTrace { receive: get_nanos_timestamp()?, ..Default::default() }; let (head_slot, duty) = proposer_api.curr_slot_info.read().await.clone(); @@ -398,7 +407,7 @@ where // Dont allow requests for past slots if bid_request.slot < head_slot { - warn!(request_id = %request_id, "request for past slot"); + debug!(request_id = %request_id, "request for past slot"); return Err(ProposerApiError::RequestForPastSlot { request_slot: bid_request.slot, head_slot, @@ -439,7 +448,7 @@ where return Err(ProposerApiError::BidValueZero) } - info!( + debug!( request_id = %request_id, value = ?bid.value(), block_hash = ?bid.block_hash(), @@ -642,7 +651,10 @@ where req: Request, ) -> Result { let mut trace = GetPayloadTrace { receive: get_nanos_timestamp()?, ..Default::default() }; - let request_id = Uuid::new_v4(); + let request_id = Uuid::parse_str(headers + .get("x-request-id") + .map(|v| v.to_str().unwrap_or_default()) + .unwrap_or_default()).unwrap(); let user_agent = headers.get("user-agent").and_then(|v| v.to_str().ok()).map(|v| v.to_string()); @@ -830,29 +842,15 @@ where return Err(err) } - let message = signed_blinded_block.message(); - let body = message.body(); - let provided_header = body.execution_payload_header(); - let local_header = - match try_execution_header_from_payload(&mut versioned_payload.execution_payload) { - Ok(header) => header, - Err(err) => { - error!( - request_id = %request_id, - error = %err, - "error converting execution payload to header", - ); - return Err(err.into()) - } - }; - if let Err(err) = self.validate_header_equality(&local_header, provided_header) { + if let Err(err) = self.validate_block_equality(&mut versioned_payload, &signed_blinded_block, request_id) { error!( - request_id = %request_id, + %request_id, error = %err, - "execution payload header invalid, does not match known ExecutionPayload", + "execution payload invalid, does not match known ExecutionPayload", ); return Err(err) } + trace.validation_complete = get_nanos_timestamp()?; let unblinded_payload = @@ -881,84 +879,66 @@ where // Publish and validate payload with multi-beacon-client let fork = unblinded_payload.version(); - if is_trusted_proposer { - let self_clone = self.clone(); - let unblinded_payload_clone = unblinded_payload.clone(); - let request_id_clone = *request_id; - let mut trace_clone = trace.clone(); - let payload_clone = payload.clone(); - tokio::spawn(async move { - if let Err(err) = self_clone - .multi_beacon_client - .publish_block( - unblinded_payload_clone.clone(), - Some(BroadcastValidation::ConsensusAndEquivocation), - fork, - ) - .await - { - error!(request_id = %request_id_clone, error = %err, "error publishing block"); - }; + let (tx, rx) = oneshot::channel(); - trace_clone.beacon_client_broadcast = get_nanos_timestamp().unwrap_or_default(); + let self_clone = self.clone(); + let unblinded_payload_clone = unblinded_payload.clone(); + let request_id_clone = *request_id; + let mut trace_clone = trace.clone(); + let payload_clone = payload.clone(); - // Broadcast payload to all broadcasters - self_clone.broadcast_signed_block( - unblinded_payload_clone.clone(), - Some(BroadcastValidation::Gossip), - &request_id_clone, - ); - trace_clone.broadcaster_block_broadcast = get_nanos_timestamp().unwrap_or_default(); - - // While we wait for the block to propagate, we also store the payload information - trace_clone.on_deliver_payload = get_nanos_timestamp().unwrap_or_default(); - self_clone - .save_delivered_payload_info( - payload_clone, - &signed_blinded_block, - &proposer_public_key, - &trace_clone, - &request_id_clone, - user_agent, - ) - .await; - }); - } else { - if let Err(err) = self + tokio::spawn(async move { + if let Err(err) = self_clone .multi_beacon_client .publish_block( - unblinded_payload.clone(), + unblinded_payload_clone.clone(), Some(BroadcastValidation::ConsensusAndEquivocation), fork, ) .await { - error!(request_id = %request_id, error = %err, "error publishing block"); - return Err(err.into()) - } + error!(request_id = %request_id_clone, error = %err, "error publishing block"); + }; - trace.beacon_client_broadcast = get_nanos_timestamp()?; + trace_clone.beacon_client_broadcast = get_nanos_timestamp().unwrap_or_default(); // Broadcast payload to all broadcasters - self.broadcast_signed_block( - unblinded_payload.clone(), + self_clone.broadcast_signed_block( + unblinded_payload_clone.clone(), Some(BroadcastValidation::Gossip), - request_id, + &request_id_clone, ); - trace.broadcaster_block_broadcast = get_nanos_timestamp()?; + trace_clone.broadcaster_block_broadcast = get_nanos_timestamp().unwrap_or_default(); // While we wait for the block to propagate, we also store the payload information - trace.on_deliver_payload = get_nanos_timestamp()?; - self.save_delivered_payload_info( - payload.clone(), - &signed_blinded_block, - &proposer_public_key, - trace, - request_id, - user_agent, - ) - .await; + trace_clone.on_deliver_payload = get_nanos_timestamp().unwrap_or_default(); + self_clone + .save_delivered_payload_info( + payload_clone, + &signed_blinded_block, + &proposer_public_key, + &trace_clone, + &request_id_clone, + user_agent, + ) + .await; + + if !is_trusted_proposer { + if let Err(_) = tx.send(()) { + error!(request_id = %request_id_clone, "Error sending beacon client response, receiver dropped"); + } + } + }); + + if !is_trusted_proposer { + + if let Ok(_) = rx.await { + info!(request_id = %request_id, trace = ?trace, "Payload published and saved!") + } else { + error!(request_id = %request_id, "Error in beacon client publishing"); + return Err(ProposerApiError::InternalServerError); + } // Calculate the remaining time needed to reach the target propagation duration. // Conditionally pause the execution until we hit @@ -1112,41 +1092,74 @@ where Ok(()) } - /// Validates that the `ExecutionPayloadHeader` of a given `SignedBlindedBeaconBlock` matches - /// the known `ExecutionPayload`. + /// Validates that the `SignedBlindedBeaconBlock` matches the known `ExecutionPayload`. /// /// - Checks the fork versions match. /// - Checks the equality of the local and provided header. + /// - Checks the equality of the kzg commitments. /// - Returns `Ok(())` if the `ExecutionPayloadHeader` matches. /// - Returns `Err(ProposerApiError)` for mismatching or invalid headers. - fn validate_header_equality( + fn validate_block_equality( &self, - local_header: &ExecutionPayloadHeader, - provided_header: ExecutionPayloadHeaderRef<'_>, + local_versioned_payload: &mut PayloadAndBlobs, + provided_signed_blinded_block: &SignedBlindedBeaconBlock, + request_id: &Uuid, ) -> Result<(), ProposerApiError> { + let message = provided_signed_blinded_block.message(); + let body = message.body(); + let provided_header = body.execution_payload_header(); + + let local_header = + match try_execution_header_from_payload(&mut local_versioned_payload.execution_payload) { + Ok(header) => header, + Err(err) => { + error!( + %request_id, + error = %err, + "error converting execution payload to header", + ); + return Err(err.into()); + } + }; + match local_header { ExecutionPayloadHeader::Bellatrix(local_header) => { let provided_header = provided_header.bellatrix().ok_or(ProposerApiError::PayloadTypeMismatch)?; - if local_header != provided_header { - return Err(ProposerApiError::BlindedBlockAndPayloadHeaderMismatch) + if local_header != *provided_header { + return Err(ProposerApiError::BlindedBlockAndPayloadHeaderMismatch); } } ExecutionPayloadHeader::Capella(local_header) => { let provided_header = provided_header.capella().ok_or(ProposerApiError::PayloadTypeMismatch)?; - if local_header != provided_header { - return Err(ProposerApiError::BlindedBlockAndPayloadHeaderMismatch) + if local_header != *provided_header { + return Err(ProposerApiError::BlindedBlockAndPayloadHeaderMismatch); } } ExecutionPayloadHeader::Deneb(local_header) => { let provided_header = provided_header.deneb().ok_or(ProposerApiError::PayloadTypeMismatch)?; - if local_header != provided_header { - return Err(ProposerApiError::BlindedBlockAndPayloadHeaderMismatch) + if local_header != *provided_header { + return Err(ProposerApiError::BlindedBlockAndPayloadHeaderMismatch); + } + + let local_kzg_commitments = local_versioned_payload + .blobs_bundle + .as_ref() + .map(|bundle| &bundle.commitments) + .ok_or(ProposerApiError::BlobKzgCommitmentsMismatch)?; + + let provided_kzg_commitments = body + .blob_kzg_commitments() + .ok_or(ProposerApiError::BlobKzgCommitmentsMismatch)?; + + if local_kzg_commitments != provided_kzg_commitments { + return Err(ProposerApiError::BlobKzgCommitmentsMismatch); } } } + Ok(()) } @@ -1264,31 +1277,34 @@ where /// Will process new gossiped messages from async fn process_gossiped_info(&self, mut recveiver: Receiver) { while let Some(msg) = recveiver.recv().await { - if let GossipedMessage::GetPayload(payload) = msg { - let api_clone = self.clone(); - tokio::spawn(async move { - let mut trace = GetPayloadTrace { - receive: get_nanos_timestamp().unwrap_or_default(), - ..Default::default() - }; - info!(request_id = %payload.request_id, "processing gossiped payload"); - match api_clone - ._get_payload( - payload.signed_blinded_beacon_block, - &mut trace, - &payload.request_id, + match msg { + GossipedMessage::GetPayload(payload) => { + let api_clone = self.clone(); + tokio::spawn(async move { + let mut trace = GetPayloadTrace { + receive: get_nanos_timestamp().unwrap_or_default(), + ..Default::default() + }; + debug!(request_id = %payload.request_id, "processing gossiped payload"); + match api_clone + ._get_payload( + payload.signed_blinded_beacon_block, + &mut trace, + &payload.request_id, None, - ) - .await - { - Ok(_get_payload_response) => { - info!(request_id = %payload.request_id, "gossiped payload processed"); - } - Err(err) => { - error!(request_id = %payload.request_id, error = %err, "error processing gossiped payload"); + ) + .await + { + Ok(_get_payload_response) => { + debug!(request_id = %payload.request_id, "gossiped payload processed"); + } + Err(err) => { + error!(request_id = %payload.request_id, error = %err, "error processing gossiped payload"); + } } - } - }); + }); + } + _ => {} } } } diff --git a/crates/api/src/proposer/error.rs b/crates/api/src/proposer/error.rs index 5b9c07a..cdb2b1b 100644 --- a/crates/api/src/proposer/error.rs +++ b/crates/api/src/proposer/error.rs @@ -180,6 +180,9 @@ pub enum ProposerApiError { #[error("not serving headers")] NotServingHeaders, + + #[error("blob kzg commitments mismatch in blinded block and payload")] + BlobKzgCommitmentsMismatch, } impl IntoResponse for ProposerApiError { @@ -352,6 +355,9 @@ impl IntoResponse for ProposerApiError { }, ProposerApiError::NotServingHeaders => { (StatusCode::NO_CONTENT, ProposerApiError::NotServingHeaders.to_string()).into_response() + }, + ProposerApiError::BlobKzgCommitmentsMismatch => { + (StatusCode::BAD_REQUEST, "blob kzg commitments mismatch in blinded block and payload").into_response() } } } diff --git a/crates/api/src/router.rs b/crates/api/src/router.rs index 595a57d..4499c75 100644 --- a/crates/api/src/router.rs +++ b/crates/api/src/router.rs @@ -1,6 +1,6 @@ use axum::{ error_handling::HandleErrorLayer, - http::StatusCode, + http::{request, StatusCode}, middleware, routing::{get, post}, Extension, Router, @@ -9,14 +9,16 @@ use helix_beacon_client::{beacon_client::BeaconClient, multi_beacon_client::Mult use helix_common::{Route, RouterConfig}; use helix_database::postgres::postgres_db_service::PostgresDatabaseService; use helix_datastore::redis::redis_cache::RedisCache; +use hyper::HeaderMap; +use tracing::warn; use std::{collections::HashMap, sync::Arc, time::Duration}; use tower::{timeout::TimeoutLayer, BoxError, ServiceBuilder}; use tower_http::limit::RequestBodyLimitLayer; +use tower_http::request_id::{MakeRequestUuid, PropagateRequestIdLayer, SetRequestIdLayer}; use crate::{ builder::{ - api::{BuilderApi, MAX_PAYLOAD_LENGTH}, - optimistic_simulator::OptimisticSimulator, + api::{BuilderApi, MAX_PAYLOAD_LENGTH}, multi_simulator::MultiSimulator, optimistic_simulator::OptimisticSimulator }, constraints::api::ConstraintsApi, gossiper::grpc_gossiper::GrpcGossiperClientManager, @@ -33,7 +35,7 @@ use crate::{ pub type BuilderApiProd = BuilderApi< RedisCache, PostgresDatabaseService, - OptimisticSimulator, + MultiSimulator>, GrpcGossiperClientManager, >; @@ -159,10 +161,23 @@ pub fn build_router( // Add Error-handling layer router = router.layer( ServiceBuilder::new() - .layer(HandleErrorLayer::new(|_: BoxError| async { StatusCode::REQUEST_TIMEOUT })) + .layer(HandleErrorLayer::new(|headers: HeaderMap, e: BoxError| async move { + let request_id = headers + .get("x-request-id") + .map(|v| v.to_str().unwrap_or_default()) + .unwrap_or_default(); + warn!( + request_id = request_id, + "Request timed out {:?}", e + ); + StatusCode::REQUEST_TIMEOUT + })) .layer(TimeoutLayer::new(API_REQUEST_TIMEOUT)), ); + router = router.layer(PropagateRequestIdLayer::x_request_id()); + router = router.layer(SetRequestIdLayer::x_request_id(MakeRequestUuid)); + // Add Extension layers router = router .layer(Extension(builder_api)) diff --git a/crates/api/src/service.rs b/crates/api/src/service.rs index fc5fbcb..684893e 100644 --- a/crates/api/src/service.rs +++ b/crates/api/src/service.rs @@ -10,7 +10,7 @@ use tokio::{ use tracing::{error, info}; use crate::{ - builder::optimistic_simulator::OptimisticSimulator, + builder::{multi_simulator::MultiSimulator, optimistic_simulator::OptimisticSimulator}, gossiper::grpc_gossiper::GrpcGossiperClientManager, relay_data::{BidsCache, DeliveredPayloadsCache}, router::{build_router, BuilderApiProd, ConstraintsApiProd, DataApiProd, ProposerApiProd}, @@ -43,6 +43,7 @@ impl ApiService { .await .expect("failed to store builders info from config"); postgres_db.load_known_validators().await; + //postgres_db.load_validator_registrations().await; postgres_db.start_registration_processor().await; let db = Arc::new(postgres_db); @@ -125,12 +126,19 @@ impl ApiService { let client = reqwest::ClientBuilder::new().timeout(SIMULATOR_REQUEST_TIMEOUT).build().unwrap(); - let simulator = OptimisticSimulator::::new( - auctioneer.clone(), - db.clone(), - client, - config.simulator.url.clone(), - ); + let mut simulators = vec![]; + + for cfg in &config.simulators { + let simulator = OptimisticSimulator::::new( + auctioneer.clone(), + db.clone(), + client.clone(), + cfg.url.clone(), + ); + simulators.push(simulator); + } + + let simulator = MultiSimulator::new(simulators); let (mut chain_event_updater, slot_update_sender) = ChainEventUpdater::new(db.clone(), chain_info.clone()); diff --git a/crates/beacon-client/src/beacon_client.rs b/crates/beacon-client/src/beacon_client.rs index b5ec91b..438807a 100644 --- a/crates/beacon-client/src/beacon_client.rs +++ b/crates/beacon-client/src/beacon_client.rs @@ -1,4 +1,4 @@ -use std::{sync::Arc, time::Duration}; +use std::{sync::Arc, time::Duration, vec}; use async_trait::async_trait; use ethereum_consensus::{primitives::Root, ssz}; diff --git a/crates/common/src/config.rs b/crates/common/src/config.rs index 20605a7..2925637 100644 --- a/crates/common/src/config.rs +++ b/crates/common/src/config.rs @@ -20,7 +20,7 @@ pub struct RelayConfig { pub redis: RedisConfig, #[serde(default)] pub broadcasters: Vec, - pub simulator: SimulatorConfig, + pub simulators: Vec, #[serde(default)] pub beacon_clients: Vec, #[serde(default)] @@ -436,11 +436,13 @@ fn test_config() { let mut config = RelayConfig::default(); config.redis.url = "redis://localhost:6379".to_string(); - config.simulator.url = "http://localhost:8080".to_string(); + config.simulators = vec![ + SimulatorConfig { + url: "http://localhost:8080".to_string() + }]; config.beacon_clients.push(BeaconClientConfig { url: Url::parse("http://localhost:8080").unwrap(), - gossip_blobs_enabled: false, - }); + gossip_blobs_enabled: false, }); config.broadcasters.push(BroadcasterConfig::BeaconClient(BeaconClientConfig { url: Url::parse("http://localhost:8080").unwrap(), gossip_blobs_enabled: false, diff --git a/crates/database/src/postgres/migrations/V25__adds_slot_val_prefs.sql b/crates/database/src/postgres/migrations/V25__adds_slot_val_prefs.sql index fa656df..0313526 100644 --- a/crates/database/src/postgres/migrations/V25__adds_slot_val_prefs.sql +++ b/crates/database/src/postgres/migrations/V25__adds_slot_val_prefs.sql @@ -1,8 +1,8 @@ -CREATE TABLE slot_preferences ( +CREATE TABLE IF NOT EXISTS slot_preferences ( "slot_number" integer PRIMARY KEY, "proposer_pubkey" bytea, "filtering" smallint, "trusted_builders" varchar[], "header_delay" boolean, - "gossip_blobs" boolean, + "gossip_blobs" boolean ); \ No newline at end of file diff --git a/crates/database/src/postgres/postgres_db_service.rs b/crates/database/src/postgres/postgres_db_service.rs index c571847..a3d5351 100644 --- a/crates/database/src/postgres/postgres_db_service.rs +++ b/crates/database/src/postgres/postgres_db_service.rs @@ -1,3 +1,4 @@ +use core::num; use std::{ collections::HashSet, ops::DerefMut, @@ -14,16 +15,9 @@ use helix_common::{ api::{ builder_api::BuilderGetValidatorsResponseEntry, data_api::BidFilters, proposer_api::ValidatorRegistrationInfo, - }, - bid_submission::{ + }, bid_submission::{ v2::header_submission::SignedHeaderSubmission, BidSubmission, BidTrace, SignedBidSubmission, - }, - deneb::SignedValidatorRegistration, - simulator::BlockSimError, - versioned_payload::PayloadAndBlobs, - BuilderInfo, Filtering, GetHeaderTrace, GetPayloadTrace, GossipedHeaderTrace, - GossipedPayloadTrace, HeaderSubmissionTrace, ProposerInfo, RelayConfig, - SignedValidatorRegistrationEntry, SubmissionTrace, ValidatorPreferences, ValidatorSummary, + }, deneb::SignedValidatorRegistration, simulator::BlockSimError, validator_preferences, versioned_payload::PayloadAndBlobs, BuilderInfo, Filtering, GetHeaderTrace, GetPayloadTrace, GossipedHeaderTrace, GossipedPayloadTrace, HeaderSubmissionTrace, ProposerInfo, RelayConfig, SignedValidatorRegistrationEntry, SubmissionTrace, ValidatorPreferences, ValidatorSummary }; use tokio_postgres::{types::ToSql, NoTls}; use tracing::{error, info}; @@ -579,7 +573,8 @@ impl DatabaseService for PostgresDatabaseService { validator_preferences.trusted_builders, validator_preferences.header_delay, validator_preferences.gossip_blobs, - validator_registrations.inserted_at + validator_registrations.inserted_at, + validator_registrations.user_agent FROM validator_registrations INNER JOIN validator_preferences ON validator_registrations.public_key = validator_preferences.public_key WHERE validator_registrations.public_key = $1 @@ -1133,9 +1128,9 @@ impl DatabaseService for PostgresDatabaseService { transaction.execute( " INSERT INTO slot_preferences (slot_number, proposer_pubkey, filtering, trusted_builders, header_delay, gossip_blobs) - SELECT $1::bytea, $2, filtering, trusted_builders, header_delay, gossip_blobs + SELECT $1, $2, filtering, trusted_builders, header_delay, gossip_blobs FROM validator_preferences - WHERE public_key = $1::bytea + WHERE public_key = $2 ON CONFLICT (slot_number) DO NOTHING; ", &[ diff --git a/crates/housekeeper/src/housekeeper.rs b/crates/housekeeper/src/housekeeper.rs index 7b48ad6..a93960c 100644 --- a/crates/housekeeper/src/housekeeper.rs +++ b/crates/housekeeper/src/housekeeper.rs @@ -229,7 +229,7 @@ impl }); } - debug!( + info!( head_slot = head_slot, head_slot_pos = (head_slot % EPOCH_SLOTS) + 1, prev_head_slot = prev_head_slot, @@ -531,7 +531,7 @@ impl async fn primev_update(&self) -> Result<(), HousekeeperError> { let primev_config = self.config.primev_config.as_ref().unwrap(); - let primev_builders = get_registered_primev_builders(primev_config).await; + let primev_builders = get_registered_primev_builders(&primev_config).await; for builder_pubkey in primev_builders { self.db .store_builder_info( From bdcf7af23d2918195fc4520538c9db88327eccd6 Mon Sep 17 00:00:00 2001 From: owen Date: Wed, 4 Dec 2024 10:19:22 +0000 Subject: [PATCH 13/39] use req id from middleware --- crates/api/src/builder/api.rs | 47 +++++++++++++++++++++++------------ 1 file changed, 31 insertions(+), 16 deletions(-) diff --git a/crates/api/src/builder/api.rs b/crates/api/src/builder/api.rs index ab8166d..be5e4c9 100644 --- a/crates/api/src/builder/api.rs +++ b/crates/api/src/builder/api.rs @@ -306,11 +306,14 @@ where Extension(api): Extension>>, req: Request, ) -> Result { - let request_id = Uuid::new_v4(); + let request_id = Uuid::parse_str(headers + .get("x-request-id") + .map(|v| v.to_str().unwrap_or_default()) + .unwrap_or_default()).unwrap(); let mut trace = SubmissionTrace { receive: get_nanos_timestamp()?, ..Default::default() }; let (head_slot, next_duty) = api.curr_slot_info.read().await.clone(); - info!( + debug!( request_id = %request_id, event = "submit_block", head_slot = head_slot, @@ -349,6 +352,7 @@ where info!( request_id = %request_id, event = "submit_block", + slot = payload.slot(), builder_pub_key = ?payload.builder_public_key(), block_value = %payload.value(), block_hash = ?block_hash, @@ -564,12 +568,15 @@ where Extension(api): Extension>>, req: Request, ) -> Result { - let request_id = Uuid::new_v4(); + let request_id = Uuid::parse_str(headers + .get("x-request-id") + .map(|v| v.to_str().unwrap_or_default()) + .unwrap_or_default()).unwrap(); let mut trace = HeaderSubmissionTrace { receive: get_nanos_timestamp()?, ..Default::default() }; let (head_slot, next_duty) = api.curr_slot_info.read().await.clone(); - info!( + debug!( request_id = %request_id, event = "submit_header", head_slot = head_slot, @@ -581,6 +588,16 @@ where decode_header_submission(req, &mut trace, &request_id).await?; let block_hash = payload.block_hash().clone(); + info!( + request_id = %request_id, + event = "submit_header", + slot = payload.slot(), + builder_pub_key = ?payload.builder_public_key(), + block_value = %payload.value(), + block_hash = ?block_hash, + "header submission decoded", + ); + // Verify the payload is for the current slot if payload.slot() <= head_slot { debug!( @@ -605,15 +622,6 @@ where } let next_duty = next_duty.unwrap(); - info!( - request_id = %request_id, - event = "submit_header", - builder_pub_key = ?payload.builder_public_key(), - block_value = %payload.value(), - block_hash = ?block_hash, - "header submission decoded", - ); - // Fetch the next payload attributes and validate basic information let payload_attributes = api .fetch_payload_attributes(payload.slot(), payload.parent_hash(), &block_hash, &request_id) @@ -803,12 +811,15 @@ where Extension(api): Extension>>, req: Request, ) -> Result { - let request_id = Uuid::new_v4(); + let request_id = Uuid::parse_str(headers + .get("x-request-id") + .map(|v| v.to_str().unwrap_or_default()) + .unwrap_or_default()).unwrap(); let now = SystemTime::now(); let mut trace = SubmissionTrace { receive: get_nanos_from(now)?, ..Default::default() }; let (head_slot, next_duty) = api.curr_slot_info.read().await.clone(); - info!( + debug!( request_id = %request_id, event = "submit_block_v2", head_slot = head_slot, @@ -823,6 +834,7 @@ where info!( request_id = %request_id, event = "submit_block_v2", + slot = payload.slot(), builder_pub_key = ?builder_pub_key, block_value = %payload.value(), block_hash = ?payload.block_hash(), @@ -1017,7 +1029,10 @@ where Extension(api): Extension>>, Json(mut signed_cancellation): Json, ) -> Result { - let request_id = Uuid::new_v4(); + let request_id = Uuid::parse_str(headers + .get("x-request-id") + .map(|v| v.to_str().unwrap_or_default()) + .unwrap_or_default()).unwrap(); let (head_slot, _next_duty) = api.curr_slot_info.read().await.clone(); let slot = signed_cancellation.message.slot; From a490a91be237106d662f1e1479f39080ede248e1 Mon Sep 17 00:00:00 2001 From: owen Date: Wed, 4 Dec 2024 10:21:27 +0000 Subject: [PATCH 14/39] fix build --- crates/api/src/builder/api.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/crates/api/src/builder/api.rs b/crates/api/src/builder/api.rs index be5e4c9..72d573b 100644 --- a/crates/api/src/builder/api.rs +++ b/crates/api/src/builder/api.rs @@ -304,6 +304,7 @@ where /// Implements this API: pub async fn submit_block( Extension(api): Extension>>, + headers: HeaderMap, req: Request, ) -> Result { let request_id = Uuid::parse_str(headers @@ -566,6 +567,7 @@ where /// verifications before saving the headre to the auctioneer. pub async fn submit_header( Extension(api): Extension>>, + headers: HeaderMap, req: Request, ) -> Result { let request_id = Uuid::parse_str(headers @@ -809,6 +811,7 @@ where /// Implements this API: TODO: point to gattaca spec. rename? pub async fn submit_block_v2( Extension(api): Extension>>, + headers: HeaderMap, req: Request, ) -> Result { let request_id = Uuid::parse_str(headers @@ -1027,6 +1030,7 @@ where /// all other relays. pub async fn cancel_bid( Extension(api): Extension>>, + headers: HeaderMap, Json(mut signed_cancellation): Json, ) -> Result { let request_id = Uuid::parse_str(headers From cc19c8d3f55b4e674adcb88d2b931e3d578012a7 Mon Sep 17 00:00:00 2001 From: owen Date: Wed, 4 Dec 2024 10:30:25 +0000 Subject: [PATCH 15/39] bug fix --- crates/database/src/postgres/postgres_db_service.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/database/src/postgres/postgres_db_service.rs b/crates/database/src/postgres/postgres_db_service.rs index a3d5351..1b40b11 100644 --- a/crates/database/src/postgres/postgres_db_service.rs +++ b/crates/database/src/postgres/postgres_db_service.rs @@ -453,7 +453,8 @@ impl DatabaseService for PostgresDatabaseService { gas_limit = excluded.gas_limit, timestamp = excluded.timestamp, signature = excluded.signature, - inserted_at = excluded.inserted_at + inserted_at = excluded.inserted_at, + user_agent = excluded.user_agent ", &[ &(fee_recipient.as_ref()), From f57ba92f6b4716f31c44b76af0b2b8c4d62b87ba Mon Sep 17 00:00:00 2001 From: ltitanb Date: Wed, 4 Dec 2024 13:39:05 +0000 Subject: [PATCH 16/39] bump rust version --- Cargo.lock | 811 +++++++++------------------------- Cargo.toml | 22 +- Dockerfile | 2 +- crates/housekeeper/Cargo.toml | 2 - local.Dockerfile | 2 +- rust-toolchain.toml | 2 +- 6 files changed, 214 insertions(+), 627 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1000b36..212412f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -33,7 +33,7 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac1f845298e95f983ff1944b728ae08b8cebab80d684f0a832ed0fc74dfa27e2" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cipher", "cpufeatures", ] @@ -44,7 +44,7 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "once_cell", "version_check", ] @@ -86,15 +86,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a0628ec0ba5b98b3370bb6be17b12f23bfce8ee4ad83823325a20546d9b03b78" dependencies = [ "alloy-rlp", - "bytes 1.5.0", - "cfg-if 1.0.0", + "bytes", + "cfg-if", "const-hex", "derive_more", "getrandom", "hex-literal", "itoa", "proptest", - "rand 0.8.5", + "rand", "ruint", "serde", "tiny-keccak", @@ -107,15 +107,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c0e5e60ff0e0c34c553822dabcfe0f5fece5a8c52f08a915be8c737de4b03fa" dependencies = [ "alloy-rlp", - "bytes 1.5.0", - "cfg-if 1.0.0", + "bytes", + "cfg-if", "const-hex", "derive_more", "getrandom", "hex-literal", "itoa", "proptest", - "rand 0.8.5", + "rand", "ruint", "serde", "tiny-keccak", @@ -128,8 +128,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ccb3ead547f4532bc8af961649942f0b9c16ee9226e26caa3f38420651cc0bf4" dependencies = [ "alloy-rlp", - "bytes 1.5.0", - "cfg-if 1.0.0", + "bytes", + "cfg-if", "const-hex", "derive_more", "hex-literal", @@ -137,7 +137,7 @@ dependencies = [ "k256 0.13.2", "keccak-asm", "proptest", - "rand 0.8.5", + "rand", "ruint", "serde", "tiny-keccak", @@ -151,7 +151,7 @@ checksum = "f938f00332d63a5b0ac687bd6f46d03884638948921d9f8b50c59563d421ae25" dependencies = [ "alloy-rlp-derive", "arrayvec", - "bytes 1.5.0", + "bytes", "smol_str", ] @@ -381,7 +381,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1df2c09229cbc5a028b1d70e00fdb2acee28b1055dfb5ca73eea49c5a25c4e7c" dependencies = [ "num-traits", - "rand 0.8.5", + "rand", ] [[package]] @@ -391,7 +391,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" dependencies = [ "num-traits", - "rand 0.8.5", + "rand", ] [[package]] @@ -537,7 +537,7 @@ checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" dependencies = [ "hermit-abi 0.1.19", "libc", - "winapi 0.3.9", + "winapi", ] [[package]] @@ -577,7 +577,7 @@ dependencies = [ "async-trait", "axum-core 0.3.4", "bitflags 1.3.2", - "bytes 1.5.0", + "bytes", "futures-util", "http 0.2.9", "http-body 0.4.5", @@ -605,7 +605,7 @@ dependencies = [ "async-trait", "axum-core 0.4.3", "base64 0.21.2", - "bytes 1.5.0", + "bytes", "futures-util", "http 1.0.0", "http-body 1.0.0", @@ -640,7 +640,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" dependencies = [ "async-trait", - "bytes 1.5.0", + "bytes", "futures-util", "http 0.2.9", "http-body 0.4.5", @@ -657,7 +657,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a15c63fd72d41492dc4f497196f5da1fb04fb7529e631d73630d1b491e47a2e3" dependencies = [ "async-trait", - "bytes 1.5.0", + "bytes", "futures-util", "http 1.0.0", "http-body 1.0.0", @@ -679,7 +679,7 @@ checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" dependencies = [ "addr2line", "cc", - "cfg-if 1.0.0", + "cfg-if", "libc", "miniz_oxide", "object", @@ -897,16 +897,6 @@ version = "1.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" -[[package]] -name = "bytes" -version = "0.4.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "206fdffcfa2df7cbe15601ef46c813fce0965eb3286db6b56c583b814b51c81c" -dependencies = [ - "byteorder", - "iovec", -] - [[package]] name = "bytes" version = "1.5.0" @@ -1031,12 +1021,6 @@ dependencies = [ "nom", ] -[[package]] -name = "cfg-if" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" - [[package]] name = "cfg-if" version = "1.0.0" @@ -1119,15 +1103,6 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd7cc57abe963c6d3b9d8be5b06ba7c8957a930305ca90304f24ef040aa6f961" -[[package]] -name = "cloudabi" -version = "0.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" -dependencies = [ - "bitflags 1.3.2", -] - [[package]] name = "codecs-derive" version = "0.1.0-alpha.10" @@ -1181,7 +1156,7 @@ dependencies = [ "hmac", "once_cell", "pbkdf2 0.12.2", - "rand 0.8.5", + "rand", "sha2 0.10.8", "thiserror", ] @@ -1229,7 +1204,7 @@ version = "4.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4" dependencies = [ - "bytes 1.5.0", + "bytes", "futures-core", "memchr", "pin-project-lite", @@ -1243,7 +1218,7 @@ version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0121754e84117e65f9d90648ee6aa4882a6e63110307ab73967a4c5e7e69e586" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "hex", "proptest", @@ -1332,7 +1307,7 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -1341,8 +1316,8 @@ version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" dependencies = [ - "cfg-if 1.0.0", - "crossbeam-utils 0.8.16", + "cfg-if", + "crossbeam-utils", ] [[package]] @@ -1351,9 +1326,9 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "crossbeam-epoch", - "crossbeam-utils 0.8.16", + "crossbeam-utils", ] [[package]] @@ -1363,30 +1338,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" dependencies = [ "autocfg", - "cfg-if 1.0.0", - "crossbeam-utils 0.8.16", + "cfg-if", + "crossbeam-utils", "memoffset", "scopeguard", ] -[[package]] -name = "crossbeam-utils" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8" -dependencies = [ - "autocfg", - "cfg-if 0.1.10", - "lazy_static", -] - [[package]] name = "crossbeam-utils" version = "0.8.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -1402,7 +1366,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" dependencies = [ "generic-array", - "rand_core 0.6.4", + "rand_core", "subtle", "zeroize", ] @@ -1414,7 +1378,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" dependencies = [ "generic-array", - "rand_core 0.6.4", + "rand_core", "subtle", "zeroize", ] @@ -1514,11 +1478,11 @@ version = "5.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "hashbrown 0.14.1", - "lock_api 0.4.10", + "lock_api", "once_cell", - "parking_lot_core 0.9.8", + "parking_lot_core", ] [[package]] @@ -1605,10 +1569,11 @@ dependencies = [ [[package]] name = "deranged" -version = "0.3.8" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2696e8a945f658fd14dc3b87242e6b80cd0f36ff04ea560fa39082368847946" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" dependencies = [ + "powerfmt", "serde", ] @@ -1678,7 +1643,7 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "dirs-sys-next", ] @@ -1702,7 +1667,7 @@ checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" dependencies = [ "libc", "redox_users", - "winapi 0.3.9", + "winapi", ] [[package]] @@ -1757,7 +1722,7 @@ dependencies = [ "generic-array", "group 0.12.1", "pkcs8 0.9.0", - "rand_core 0.6.4", + "rand_core", "sec1 0.3.0", "subtle", "zeroize", @@ -1776,7 +1741,7 @@ dependencies = [ "generic-array", "group 0.13.0", "pkcs8 0.10.2", - "rand_core 0.6.4", + "rand_core", "sec1 0.7.3", "subtle", "zeroize", @@ -1797,7 +1762,7 @@ version = "0.8.33" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -1808,11 +1773,11 @@ checksum = "26fa0a0be8915790626d5759eb51fe47435a8eac92c2f212bd2da9aa7f30ea56" dependencies = [ "base64 0.13.1", "bs58 0.4.0", - "bytes 1.5.0", + "bytes", "hex", "k256 0.11.6", "log", - "rand 0.8.5", + "rand", "rlp", "serde", "sha3", @@ -1826,11 +1791,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a3d8dc56e02f954cac8eb489772c552c473346fc34f67412bb6244fd647f7e4" dependencies = [ "base64 0.21.2", - "bytes 1.5.0", + "bytes", "hex", "k256 0.13.2", "log", - "rand 0.8.5", + "rand", "rlp", "serde", "sha3", @@ -1909,7 +1874,7 @@ dependencies = [ "hex", "hmac", "pbkdf2 0.11.0", - "rand 0.8.5", + "rand", "scrypt", "serde", "serde_json", @@ -1965,7 +1930,7 @@ dependencies = [ "integer-sqrt", "multiaddr", "multihash", - "rand 0.8.5", + "rand", "serde", "serde_json", "serde_yaml 0.8.26", @@ -2037,7 +2002,7 @@ checksum = "7d3627f83d8b87b432a5fad9934b4565260722a141a2c40f371f8080adec9425" dependencies = [ "ethereum-types", "itertools 0.10.5", - "smallvec 1.11.0", + "smallvec", ] [[package]] @@ -2134,7 +2099,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "82d80cc6ad30b14a48ab786523af33b37f28a8623fc06afd55324816ef18fb1f" dependencies = [ "arrayvec", - "bytes 1.5.0", + "bytes", "cargo_metadata 0.18.1", "chrono", "const-hex", @@ -2145,7 +2110,7 @@ dependencies = [ "num_enum", "once_cell", "open-fastrlp", - "rand 0.8.5", + "rand", "rlp", "serde", "serde_json", @@ -2209,7 +2174,7 @@ dependencies = [ "async-trait", "auto_impl", "base64 0.21.2", - "bytes 1.5.0", + "bytes", "const-hex", "enr 0.10.0", "ethers-core", @@ -2250,7 +2215,7 @@ dependencies = [ "elliptic-curve 0.13.7", "eth-keystore", "ethers-core", - "rand 0.8.5", + "rand", "sha2 0.10.8", "thiserror", "tracing", @@ -2262,7 +2227,7 @@ version = "2.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "66244a771d9163282646dbeffe0e6eca4dda4146b6498644e678ac6089b11edd" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "const-hex", "dirs", "dunce", @@ -2309,28 +2274,6 @@ dependencies = [ "once_cell", ] -[[package]] -name = "failure" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d32e9bd16cc02eae7db7ef620b392808b89f6a5e16bb3497d159c6b92a0f4f86" -dependencies = [ - "backtrace", - "failure_derive", -] - -[[package]] -name = "failure_derive" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", - "synstructure", -] - [[package]] name = "fallible-iterator" version = "0.2.0" @@ -2351,7 +2294,7 @@ checksum = "139834ddba373bbdd213dffe02c8d110508dcf1726c2be27e8d1f7d7e1856418" dependencies = [ "arrayvec", "auto_impl", - "bytes 1.5.0", + "bytes", ] [[package]] @@ -2360,7 +2303,7 @@ version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" dependencies = [ - "rand_core 0.6.4", + "rand_core", "subtle", ] @@ -2370,7 +2313,7 @@ version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" dependencies = [ - "rand_core 0.6.4", + "rand_core", "subtle", ] @@ -2413,7 +2356,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534" dependencies = [ "byteorder", - "rand 0.8.5", + "rand", "rustc-hex", "static_assertions", ] @@ -2471,31 +2414,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" dependencies = [ "libc", - "winapi 0.3.9", -] - -[[package]] -name = "fuchsia-cprng" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" - -[[package]] -name = "fuchsia-zircon" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" -dependencies = [ - "bitflags 1.3.2", - "fuchsia-zircon-sys", + "winapi", ] -[[package]] -name = "fuchsia-zircon-sys" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" - [[package]] name = "funty" version = "2.0.0" @@ -2644,9 +2565,9 @@ version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", ] [[package]] @@ -2680,7 +2601,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" dependencies = [ "ff 0.12.1", - "rand_core 0.6.4", + "rand_core", "subtle", ] @@ -2691,7 +2612,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" dependencies = [ "ff 0.13.0", - "rand_core 0.6.4", + "rand_core", "subtle", ] @@ -2701,7 +2622,7 @@ version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833" dependencies = [ - "bytes 1.5.0", + "bytes", "fnv", "futures-core", "futures-sink", @@ -2720,7 +2641,7 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "31d030e59af851932b72ceebadf4a2b5986dba4c3b99dd2493f8273a0f151943" dependencies = [ - "bytes 1.5.0", + "bytes", "fnv", "futures-core", "futures-sink", @@ -2788,7 +2709,7 @@ dependencies = [ "async-trait", "auto_impl", "axum 0.7.4", - "bytes 1.5.0", + "bytes", "ethereum-consensus", "flate2", "futures 0.3.28", @@ -2802,7 +2723,7 @@ dependencies = [ "mockito", "moka", "prost", - "rand 0.8.5", + "rand", "redis", "reqwest", "reqwest-eventsource", @@ -2903,7 +2824,7 @@ name = "helix-database" version = "0.0.1" dependencies = [ "async-trait", - "bytes 1.5.0", + "bytes", "chrono", "dashmap", "deadpool-postgres", @@ -2911,7 +2832,7 @@ dependencies = [ "ethereum-consensus", "helix-common", "hex", - "rand 0.8.5", + "rand", "refinery", "reth-primitives 0.1.0-alpha.10", "serde", @@ -2974,7 +2895,6 @@ dependencies = [ "socket2 0.4.9", "thiserror", "tokio", - "tokio-ping", "tracing", "tracing-subscriber", "uuid 1.7.0", @@ -3076,7 +2996,7 @@ version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" dependencies = [ - "bytes 1.5.0", + "bytes", "fnv", "itoa", ] @@ -3087,7 +3007,7 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b32afd38673a8016f7c9ae69e5af41a58f81b1d31689040f2f1959594ce194ea" dependencies = [ - "bytes 1.5.0", + "bytes", "fnv", "itoa", ] @@ -3098,7 +3018,7 @@ version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" dependencies = [ - "bytes 1.5.0", + "bytes", "http 0.2.9", "pin-project-lite", ] @@ -3109,7 +3029,7 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" dependencies = [ - "bytes 1.5.0", + "bytes", "http 1.0.0", ] @@ -3119,7 +3039,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "41cb79eb393015dadd30fc252023adb0b2400a0caee0fa2a077e6e21a551e840" dependencies = [ - "bytes 1.5.0", + "bytes", "futures-util", "http 1.0.0", "http-body 1.0.0", @@ -3165,7 +3085,7 @@ version = "0.14.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" dependencies = [ - "bytes 1.5.0", + "bytes", "futures-channel", "futures-core", "futures-util", @@ -3189,7 +3109,7 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fb5aa53871fc917b1a9ed87b683a5d86db645e23acb32c2e0785a353e522fb75" dependencies = [ - "bytes 1.5.0", + "bytes", "futures-channel", "futures-util", "h2 0.4.2", @@ -3234,7 +3154,7 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ - "bytes 1.5.0", + "bytes", "hyper 0.14.27", "native-tls", "tokio", @@ -3247,7 +3167,7 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa" dependencies = [ - "bytes 1.5.0", + "bytes", "futures-util", "http 1.0.0", "http-body 1.0.0", @@ -3377,7 +3297,7 @@ version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -3389,15 +3309,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "iovec" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e" -dependencies = [ - "libc", -] - [[package]] name = "ipnet" version = "2.8.0" @@ -3501,7 +3412,7 @@ version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72c1e0b51e7ec0a97369623508396067a486bd0cbed95a2659a4b863d28cfc8b" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "ecdsa 0.14.8", "elliptic-curve 0.12.3", "sha2 0.10.8", @@ -3513,7 +3424,7 @@ version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f01b677d82ef7a676aa37e099defd83a28e15687112cafdd112d60236b6115b" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "ecdsa 0.16.9", "elliptic-curve 0.13.7", "once_cell", @@ -3540,16 +3451,6 @@ dependencies = [ "sha3-asm", ] -[[package]] -name = "kernel32-sys" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" -dependencies = [ - "winapi 0.2.8", - "winapi-build", -] - [[package]] name = "lalrpop" version = "0.20.0" @@ -3605,8 +3506,8 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f" dependencies = [ - "cfg-if 1.0.0", - "winapi 0.3.9", + "cfg-if", + "winapi", ] [[package]] @@ -3638,15 +3539,6 @@ version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503" -[[package]] -name = "lock_api" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4da24a77a3d8a6d4862d95f72e6fdb9c09a643ecdb402d754004a557f2bec75" -dependencies = [ - "scopeguard", -] - [[package]] name = "lock_api" version = "0.4.10" @@ -3687,19 +3579,13 @@ version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed1202b2a6f884ae56f04cff409ab315c5ce26b5e58d7412e484f01fd52f52ef" -[[package]] -name = "maybe-uninit" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" - [[package]] name = "md-5" version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "digest 0.10.7", ] @@ -3760,25 +3646,6 @@ dependencies = [ "adler", ] -[[package]] -name = "mio" -version = "0.6.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4afd66f5b91bf2a3bc13fad0e21caedac168ca4c707504e75585648ae80e4cc4" -dependencies = [ - "cfg-if 0.1.10", - "fuchsia-zircon", - "fuchsia-zircon-sys", - "iovec", - "kernel32-sys", - "libc", - "log", - "miow", - "net2", - "slab", - "winapi 0.2.8", -] - [[package]] name = "mio" version = "0.8.8" @@ -3786,22 +3653,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" dependencies = [ "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", "windows-sys 0.48.0", ] -[[package]] -name = "miow" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebd808424166322d4a38da87083bfddd3ac4c131334ed55856112eb06d46944d" -dependencies = [ - "kernel32-sys", - "net2", - "winapi 0.2.8", - "ws2_32-sys", -] - [[package]] name = "mockito" version = "1.2.0" @@ -3813,7 +3668,7 @@ dependencies = [ "futures 0.3.28", "hyper 0.14.27", "log", - "rand 0.8.5", + "rand", "regex", "serde_json", "serde_urlencoded", @@ -3850,15 +3705,15 @@ checksum = "b28455ac4363046076054a7e9cfbd7f168019c29dba32a625f59fc0aeffaaea4" dependencies = [ "crossbeam-channel", "crossbeam-epoch", - "crossbeam-utils 0.8.16", + "crossbeam-utils", "num_cpus", "once_cell", - "parking_lot 0.12.1", + "parking_lot", "quanta", "rustc_version 0.4.0", "scheduled-thread-pool", "skeptic", - "smallvec 1.11.0", + "smallvec", "tagptr", "thiserror", "triomphe", @@ -3934,17 +3789,6 @@ dependencies = [ "tempfile", ] -[[package]] -name = "net2" -version = "0.2.39" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b13b648036a2339d06de780866fbdfda0dde886de7b3af2ddeba8b14f4ee34ac" -dependencies = [ - "cfg-if 0.1.10", - "libc", - "winapi 0.3.9", -] - [[package]] name = "new_debug_unreachable" version = "1.0.4" @@ -3968,7 +3812,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" dependencies = [ "overload", - "winapi 0.3.9", + "winapi", ] [[package]] @@ -4005,6 +3849,12 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + [[package]] name = "num-format" version = "0.4.4" @@ -4118,7 +3968,7 @@ checksum = "786393f80485445794f6043fd3138854dd109cc6c4bd1a6383db304c9ce9b9ce" dependencies = [ "arrayvec", "auto_impl", - "bytes 1.5.0", + "bytes", "ethereum-types", "open-fastrlp-derive", ] @@ -4129,7 +3979,7 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "003b2be5c6c53c1cfeb0a238b8a1c3915cd410feb684457a36c10038f764bb1c" dependencies = [ - "bytes 1.5.0", + "bytes", "proc-macro2", "quote", "syn 1.0.109", @@ -4142,7 +3992,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "729b745ad4a5575dd06a3e1af1414bd330ee561c01b3899eb584baeaa8def17e" dependencies = [ "bitflags 1.3.2", - "cfg-if 1.0.0", + "cfg-if", "foreign-types", "libc", "once_cell", @@ -4201,15 +4051,6 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" -[[package]] -name = "owning_ref" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdf84f41639e037b484f93433aa3897863b561ed65c6e59c7073d7c561710f37" -dependencies = [ - "stable_deref_trait", -] - [[package]] name = "parity-scale-codec" version = "3.6.4" @@ -4219,7 +4060,7 @@ dependencies = [ "arrayvec", "bitvec", "byte-slice-cast", - "bytes 1.5.0", + "bytes", "impl-trait-for-tuples", "parity-scale-codec-derive", "serde", @@ -4237,62 +4078,14 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "parking_lot" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4d05f1349491390b1730afba60bb20d55761bef489a954546b58b4b34e1e2ac" -dependencies = [ - "owning_ref", - "parking_lot_core 0.2.14", -] - -[[package]] -name = "parking_lot" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f842b1982eb6c2fe34036a4fbfb06dd185a3f5c8edfaacdf7d1ea10b07de6252" -dependencies = [ - "lock_api 0.3.4", - "parking_lot_core 0.6.3", - "rustc_version 0.2.3", -] - [[package]] name = "parking_lot" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ - "lock_api 0.4.10", - "parking_lot_core 0.9.8", -] - -[[package]] -name = "parking_lot_core" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4db1a8ccf734a7bce794cc19b3df06ed87ab2f3907036b693c68f56b4d4537fa" -dependencies = [ - "libc", - "rand 0.4.6", - "smallvec 0.6.14", - "winapi 0.3.9", -] - -[[package]] -name = "parking_lot_core" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda66b810a62be75176a80873726630147a5ca780cd33921e0b5709033e66b0a" -dependencies = [ - "cfg-if 0.1.10", - "cloudabi", - "libc", - "redox_syscall 0.1.57", - "rustc_version 0.2.3", - "smallvec 0.6.14", - "winapi 0.3.9", + "lock_api", + "parking_lot_core", ] [[package]] @@ -4301,10 +4094,10 @@ version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "redox_syscall 0.3.5", - "smallvec 1.11.0", + "smallvec", "windows-targets 0.48.5", ] @@ -4315,7 +4108,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7676374caaee8a325c9e7a2ae557f216c5563a171d6997b0ef8a65af35147700" dependencies = [ "base64ct", - "rand_core 0.6.4", + "rand_core", "subtle", ] @@ -4422,7 +4215,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" dependencies = [ "phf_shared 0.11.2", - "rand 0.8.5", + "rand", ] [[package]] @@ -4522,12 +4315,12 @@ checksum = "49b6c5ef183cd3ab4ba005f1ca64c21e8bd97ce4699cfea9e8d9a2c4958ca520" dependencies = [ "base64 0.21.2", "byteorder", - "bytes 1.5.0", + "bytes", "fallible-iterator", "hmac", "md-5", "memchr", - "rand 0.8.5", + "rand", "sha2 0.10.8", "stringprep", ] @@ -4538,11 +4331,17 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d2234cdee9408b523530a9b6d2d6b373d1db34f6a8e51dc03ded1828d7fb67c" dependencies = [ - "bytes 1.5.0", + "bytes", "fallible-iterator", "postgres-protocol", ] +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + [[package]] name = "ppv-lite86" version = "0.2.17" @@ -4633,7 +4432,7 @@ dependencies = [ "bitflags 2.4.1", "lazy_static", "num-traits", - "rand 0.8.5", + "rand", "rand_chacha", "rand_xorshift", "regex-syntax 0.8.5", @@ -4648,7 +4447,7 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "146c289cda302b98a28d40c8b3b90498d6e526dd24ac2ecea73e4e491685b94a" dependencies = [ - "bytes 1.5.0", + "bytes", "prost-derive", ] @@ -4658,7 +4457,7 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c55e02e35260070b6f716a2423c2ff1c3bb1642ddca6f99e1f26d06268a0e2d2" dependencies = [ - "bytes 1.5.0", + "bytes", "heck 0.4.1", "itertools 0.11.0", "log", @@ -4713,14 +4512,14 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a17e662a7a8291a865152364c20c7abc5e60486ab2001e8ec10b24862de0b9ab" dependencies = [ - "crossbeam-utils 0.8.16", + "crossbeam-utils", "libc", "mach2", "once_cell", "raw-cpuid", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", "web-sys", - "winapi 0.3.9", + "winapi", ] [[package]] @@ -4744,19 +4543,6 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" -[[package]] -name = "rand" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" -dependencies = [ - "fuchsia-cprng", - "libc", - "rand_core 0.3.1", - "rdrand", - "winapi 0.3.9", -] - [[package]] name = "rand" version = "0.8.5" @@ -4765,7 +4551,7 @@ checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", "rand_chacha", - "rand_core 0.6.4", + "rand_core", ] [[package]] @@ -4775,24 +4561,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core 0.6.4", + "rand_core", ] -[[package]] -name = "rand_core" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" -dependencies = [ - "rand_core 0.4.2", -] - -[[package]] -name = "rand_core" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" - [[package]] name = "rand_core" version = "0.6.4" @@ -4808,7 +4579,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" dependencies = [ - "rand_core 0.6.4", + "rand_core", ] [[package]] @@ -4838,19 +4609,10 @@ checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d" dependencies = [ "crossbeam-channel", "crossbeam-deque", - "crossbeam-utils 0.8.16", + "crossbeam-utils", "num_cpus", ] -[[package]] -name = "rdrand" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" -dependencies = [ - "rand_core 0.3.1", -] - [[package]] name = "redis" version = "0.23.3" @@ -4858,7 +4620,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4f49cdc0bb3f412bf8e7d1bd90fe1d9eb10bc5c399ba90973c14662a27b3f8ba" dependencies = [ "async-trait", - "bytes 1.5.0", + "bytes", "combine", "futures-util", "itoa", @@ -4872,12 +4634,6 @@ dependencies = [ "url", ] -[[package]] -name = "redox_syscall" -version = "0.1.57" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" - [[package]] name = "redox_syscall" version = "0.3.5" @@ -4924,14 +4680,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e895cb870cf06e92318cbbeb701f274d022d5ca87a16fa8244e291cd035ef954" dependencies = [ "async-trait", - "cfg-if 1.0.0", + "cfg-if", "lazy_static", "log", "regex", "serde", "siphasher 1.0.0", "thiserror", - "time 0.3.28", + "time", "tokio", "tokio-postgres", "toml 0.7.8", @@ -5009,7 +4765,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37b1ae8d9ac08420c66222fb9096fc5de435c3c48542bc5336c51892cffafb41" dependencies = [ "base64 0.21.2", - "bytes 1.5.0", + "bytes", "encoding_rs", "futures-core", "futures-util", @@ -5074,7 +4830,7 @@ name = "reth-codecs" version = "0.1.0-alpha.10" source = "git+https://github.com/paradigmxyz/reth?rev=5dd5555c5c7d8e43420e273e7005b8af63a847a5#5dd5555c5c7d8e43420e273e7005b8af63a847a5" dependencies = [ - "bytes 1.5.0", + "bytes", "codecs-derive 0.1.0-alpha.10", "revm-primitives 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -5085,7 +4841,7 @@ version = "0.1.0-alpha.13" source = "git+https://github.com/chainbound/reth?branch=patch/fiber#cead7220a835bbe282cb5ab9203c462dc5b1af8d" dependencies = [ "alloy-primitives 0.5.3", - "bytes 1.5.0", + "bytes", "codecs-derive 0.1.0-alpha.13", ] @@ -5111,7 +4867,7 @@ dependencies = [ "alloy-rlp", "alloy-sol-types", "byteorder", - "bytes 1.5.0", + "bytes", "c-kzg 0.1.1", "crc", "derive_more", @@ -5146,7 +4902,7 @@ dependencies = [ "alloy-primitives 0.5.3", "alloy-rlp", "byteorder", - "bytes 1.5.0", + "bytes", "c-kzg 0.4.0", "derive_more", "itertools 0.11.0", @@ -5163,7 +4919,7 @@ dependencies = [ "serde", "serde_json", "sha2 0.10.8", - "smallvec 1.11.0", + "smallvec", "strum 0.25.0", "sucds", "tempfile", @@ -5179,7 +4935,7 @@ source = "git+https://github.com/chainbound/reth?branch=patch/fiber#cead7220a835 dependencies = [ "alloy-primitives 0.5.3", "alloy-rlp", - "bytes 1.5.0", + "bytes", "itertools 0.11.0", "jsonrpsee-types", "secp256k1 0.27.0", @@ -5295,7 +5051,7 @@ dependencies = [ "spin 0.5.2", "untrusted 0.7.1", "web-sys", - "winapi 0.3.9", + "winapi", ] [[package]] @@ -5305,7 +5061,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" dependencies = [ "cc", - "cfg-if 1.0.0", + "cfg-if", "getrandom", "libc", "spin 0.9.8", @@ -5328,7 +5084,7 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec" dependencies = [ - "bytes 1.5.0", + "bytes", "rlp-derive", "rustc-hex", ] @@ -5353,14 +5109,14 @@ dependencies = [ "alloy-rlp", "ark-ff 0.3.0", "ark-ff 0.4.2", - "bytes 1.5.0", + "bytes", "fastrlp", "num-bigint", "num-traits", "parity-scale-codec", "primitive-types", "proptest", - "rand 0.8.5", + "rand", "rlp", "ruint-macro", "serde", @@ -5392,15 +5148,6 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" -[[package]] -name = "rustc_version" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" -dependencies = [ - "semver 0.9.0", -] - [[package]] name = "rustc_version" version = "0.3.3" @@ -5516,7 +5263,7 @@ version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f7d66a1128282b7ef025a8ead62a4a9fcf017382ec53b8ffbf4d7bf77bd3c60" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "derive_more", "parity-scale-codec", "scale-info-derive", @@ -5549,7 +5296,7 @@ version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3cbc66816425a074528352f5789333ecff06ca41b36b0b0efdfbb29edc391a19" dependencies = [ - "parking_lot 0.12.1", + "parking_lot", ] [[package]] @@ -5614,7 +5361,7 @@ version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "25996b82292a7a57ed3508f052cfff8640d38d32018784acd714758b43da9c8f" dependencies = [ - "rand 0.8.5", + "rand", "secp256k1-sys 0.8.1", ] @@ -5668,22 +5415,13 @@ dependencies = [ "libc", ] -[[package]] -name = "semver" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" -dependencies = [ - "semver-parser 0.7.0", -] - [[package]] name = "semver" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6" dependencies = [ - "semver-parser 0.10.2", + "semver-parser", ] [[package]] @@ -5695,12 +5433,6 @@ dependencies = [ "serde", ] -[[package]] -name = "semver-parser" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" - [[package]] name = "semver-parser" version = "0.10.2" @@ -5809,7 +5541,7 @@ dependencies = [ "serde", "serde_json", "serde_with_macros", - "time 0.3.28", + "time", ] [[package]] @@ -5859,7 +5591,7 @@ dependencies = [ "futures 0.3.28", "lazy_static", "log", - "parking_lot 0.12.1", + "parking_lot", "serial_test_derive", ] @@ -5881,7 +5613,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6" dependencies = [ "block-buffer 0.9.0", - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.9.0", "opaque-debug", @@ -5893,7 +5625,7 @@ version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.10.7", ] @@ -5911,7 +5643,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" dependencies = [ "block-buffer 0.9.0", - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.9.0", "opaque-debug", @@ -5924,7 +5656,7 @@ version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.10.7", ] @@ -5955,7 +5687,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c28efc5e327c837aa837c59eae585fc250715ef939ac32881bcc11677cd02d46" dependencies = [ "cc", - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -5989,7 +5721,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" dependencies = [ "digest 0.10.7", - "rand_core 0.6.4", + "rand_core", ] [[package]] @@ -5999,7 +5731,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" dependencies = [ "digest 0.10.7", - "rand_core 0.6.4", + "rand_core", ] [[package]] @@ -6017,7 +5749,7 @@ dependencies = [ "num-bigint", "num-traits", "thiserror", - "time 0.3.28", + "time", ] [[package]] @@ -6056,15 +5788,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "smallvec" -version = "0.6.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97fcaeba89edba30f044a10c6a3cc39df9c3f17d7cd829dd1446cab35f890e0" -dependencies = [ - "maybe-uninit", -] - [[package]] name = "smallvec" version = "1.11.0" @@ -6084,17 +5807,6 @@ dependencies = [ "serde", ] -[[package]] -name = "socket2" -version = "0.3.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "122e570113d28d773067fab24266b66753f6ea915758651696b6e35e49f88d6e" -dependencies = [ - "cfg-if 1.0.0", - "libc", - "winapi 0.3.9", -] - [[package]] name = "socket2" version = "0.4.9" @@ -6102,7 +5814,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" dependencies = [ "libc", - "winapi 0.3.9", + "winapi", ] [[package]] @@ -6218,17 +5930,11 @@ dependencies = [ "itertools 0.10.5", "serde", "serde_derive", - "smallvec 1.11.0", + "smallvec", "tree_hash 0.5.2", "typenum", ] -[[package]] -name = "stable_deref_trait" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" - [[package]] name = "static_assertions" version = "1.1.0" @@ -6243,7 +5949,7 @@ checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" dependencies = [ "new_debug_unreachable", "once_cell", - "parking_lot 0.12.1", + "parking_lot", "phf_shared 0.10.0", "precomputed-hash", ] @@ -6318,7 +6024,7 @@ dependencies = [ "byteorder", "crunchy", "lazy_static", - "rand 0.8.5", + "rand", "rustc-hex", ] @@ -6448,7 +6154,7 @@ version = "3.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "fastrand", "redox_syscall 0.3.5", "rustix", @@ -6463,7 +6169,7 @@ checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f" dependencies = [ "dirs-next", "rustversion", - "winapi 0.3.9", + "winapi", ] [[package]] @@ -6501,7 +6207,7 @@ version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "once_cell", ] @@ -6536,23 +6242,14 @@ dependencies = [ [[package]] name = "time" -version = "0.1.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" -dependencies = [ - "libc", - "wasi 0.10.0+wasi-snapshot-preview1", - "winapi 0.3.9", -] - -[[package]] -name = "time" -version = "0.3.28" +version = "0.3.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17f6bb557fd245c28e6411aa56b6403c689ad95061f50e4be16c274e70a17e48" +checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21" dependencies = [ "deranged", "itoa", + "num-conv", + "powerfmt", "serde", "time-core", "time-macros", @@ -6560,16 +6257,17 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.14" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a942f44339478ef67935ab2bbaec2fb0322496cf3cbe84b261e06ac3814c572" +checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de" dependencies = [ + "num-conv", "time-core", ] @@ -6604,11 +6302,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4f38200e3ef7995e5ef13baec2f432a6da0aa9ac495b2c0e8f3b7eec2c92d653" dependencies = [ "backtrace", - "bytes 1.5.0", + "bytes", "libc", - "mio 0.8.8", + "mio", "num_cpus", - "parking_lot 0.12.1", + "parking_lot", "pin-project-lite", "signal-hook-registry", "socket2 0.5.3", @@ -6616,27 +6314,6 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "tokio-executor" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb2d1b8f4548dbf5e1f7818512e9c406860678f29c300cdf0ebac72d1a3a1671" -dependencies = [ - "crossbeam-utils 0.7.2", - "futures 0.1.31", -] - -[[package]] -name = "tokio-io" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57fc868aae093479e3131e3d165c93b1c7474109d13c90ec0dda2a1bbfff0674" -dependencies = [ - "bytes 0.4.12", - "futures 0.1.31", - "log", -] - [[package]] name = "tokio-io-timeout" version = "1.2.0" @@ -6668,25 +6345,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "tokio-ping" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c87a14806e5786ea98babd87cbe942299764e207461e0534142f611945c7623" -dependencies = [ - "failure", - "futures 0.1.31", - "libc", - "mio 0.6.23", - "parking_lot 0.5.5", - "rand 0.4.6", - "socket2 0.3.19", - "time 0.1.45", - "tokio-executor", - "tokio-reactor", - "tokio-timer", -] - [[package]] name = "tokio-postgres" version = "0.7.10" @@ -6695,43 +6353,24 @@ checksum = "d340244b32d920260ae7448cb72b6e238bddc3d4f7603394e7dd46ed8e48f5b8" dependencies = [ "async-trait", "byteorder", - "bytes 1.5.0", + "bytes", "fallible-iterator", "futures-channel", "futures-util", "log", - "parking_lot 0.12.1", + "parking_lot", "percent-encoding", "phf", "pin-project-lite", "postgres-protocol", "postgres-types", - "rand 0.8.5", + "rand", "socket2 0.5.3", "tokio", "tokio-util", "whoami", ] -[[package]] -name = "tokio-reactor" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09bc590ec4ba8ba87652da2068d150dcada2cfa2e07faae270a5e0409aa51351" -dependencies = [ - "crossbeam-utils 0.7.2", - "futures 0.1.31", - "lazy_static", - "log", - "mio 0.6.23", - "num_cpus", - "parking_lot 0.9.0", - "slab", - "tokio-executor", - "tokio-io", - "tokio-sync", -] - [[package]] name = "tokio-rustls" version = "0.24.1" @@ -6754,28 +6393,6 @@ dependencies = [ "tokio-util", ] -[[package]] -name = "tokio-sync" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edfe50152bc8164fcc456dab7891fa9bf8beaf01c5ee7e1dd43a397c3cf87dee" -dependencies = [ - "fnv", - "futures 0.1.31", -] - -[[package]] -name = "tokio-timer" -version = "0.2.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93044f2d313c95ff1cb7809ce9a7a05735b012288a888b62d4434fd58c94f296" -dependencies = [ - "crossbeam-utils 0.7.2", - "futures 0.1.31", - "slab", - "tokio-executor", -] - [[package]] name = "tokio-tungstenite" version = "0.16.1" @@ -6821,7 +6438,7 @@ version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "806fe8c2c87eccc8b3267cbae29ed3ab2d0bd37fca70ab622e46aaa9375ddb7d" dependencies = [ - "bytes 1.5.0", + "bytes", "futures-core", "futures-sink", "pin-project-lite", @@ -6907,7 +6524,7 @@ dependencies = [ "async-trait", "axum 0.6.20", "base64 0.21.2", - "bytes 1.5.0", + "bytes", "flate2", "h2 0.3.21", "http 0.2.9", @@ -6950,7 +6567,7 @@ dependencies = [ "indexmap 1.9.3", "pin-project", "pin-project-lite", - "rand 0.8.5", + "rand", "slab", "tokio", "tokio-util", @@ -6968,7 +6585,7 @@ dependencies = [ "async-compression", "base64 0.21.2", "bitflags 2.4.1", - "bytes 1.5.0", + "bytes", "futures-core", "futures-util", "http 1.0.0", @@ -7008,7 +6625,7 @@ version = "0.1.37" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "log", "pin-project-lite", "tracing-attributes", @@ -7023,7 +6640,7 @@ checksum = "3566e8ce28cc0a3fe42519fc80e6b4c943cc4c8cef275620eb8dac2d3d4e06cf" dependencies = [ "crossbeam-channel", "thiserror", - "time 0.3.28", + "time", "tracing-subscriber", ] @@ -7080,7 +6697,7 @@ dependencies = [ "once_cell", "regex", "sharded-slab", - "smallvec 1.11.0", + "smallvec", "thread_local", "tracing", "tracing-core", @@ -7118,7 +6735,7 @@ checksum = "5c998ac5fe2b07c025444bdd522e6258110b63861c6698eedc610c071980238d" dependencies = [ "ethereum-types", "ethereum_hashing 1.0.0-beta.2", - "smallvec 1.11.0", + "smallvec", ] [[package]] @@ -7129,7 +6746,7 @@ checksum = "134d6b24a5b829f30b5ee7de05ba7384557f5f6b00e29409cdf2392f93201bfa" dependencies = [ "ethereum-types", "ethereum_hashing 0.6.0", - "smallvec 1.11.0", + "smallvec", ] [[package]] @@ -7163,11 +6780,11 @@ checksum = "6ad3713a14ae247f22a728a0456a545df14acf3867f905adff84be99e23b3ad1" dependencies = [ "base64 0.13.1", "byteorder", - "bytes 1.5.0", + "bytes", "http 0.2.9", "httparse", "log", - "rand 0.8.5", + "rand", "sha-1", "thiserror", "url", @@ -7181,12 +6798,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9" dependencies = [ "byteorder", - "bytes 1.5.0", + "bytes", "data-encoding", "http 0.2.9", "httparse", "log", - "rand 0.8.5", + "rand", "rustls", "sha1", "thiserror", @@ -7201,12 +6818,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ef1a641ea34f399a848dea702823bbecfb4c486f911735368f1f137cb8257e1" dependencies = [ "byteorder", - "bytes 1.5.0", + "bytes", "data-encoding", "http 1.0.0", "httparse", "log", - "rand 0.8.5", + "rand", "sha1", "thiserror", "url", @@ -7349,7 +6966,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f00cc9702ca12d3c81455259621e676d0f7251cec66a21e98fe2e9a37db93b2a" dependencies = [ "getrandom", - "rand 0.8.5", + "rand", ] [[package]] @@ -7398,12 +7015,6 @@ dependencies = [ "try-lock", ] -[[package]] -name = "wasi" -version = "0.10.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" - [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -7416,7 +7027,7 @@ version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "wasm-bindgen-macro", ] @@ -7441,7 +7052,7 @@ version = "0.4.37" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "js-sys", "wasm-bindgen", "web-sys", @@ -7526,12 +7137,6 @@ dependencies = [ "web-sys", ] -[[package]] -name = "winapi" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" - [[package]] name = "winapi" version = "0.3.9" @@ -7542,12 +7147,6 @@ dependencies = [ "winapi-x86_64-pc-windows-gnu", ] -[[package]] -name = "winapi-build" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" - [[package]] name = "winapi-i686-pc-windows-gnu" version = "0.4.0" @@ -7560,7 +7159,7 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" dependencies = [ - "winapi 0.3.9", + "winapi", ] [[package]] @@ -7725,20 +7324,10 @@ version = "0.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "windows-sys 0.48.0", ] -[[package]] -name = "ws2_32-sys" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e" -dependencies = [ - "winapi 0.2.8", - "winapi-build", -] - [[package]] name = "ws_stream_wasm" version = "0.7.4" @@ -7813,12 +7402,12 @@ dependencies = [ "bzip2", "constant_time_eq", "crc32fast", - "crossbeam-utils 0.8.16", + "crossbeam-utils", "flate2", "hmac", "pbkdf2 0.11.0", "sha1", - "time 0.3.28", + "time", "zstd 0.11.2+zstd.1.5.2", ] diff --git a/Cargo.toml b/Cargo.toml index 7bd8200..d0c43b8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,20 +1,20 @@ [workspace] members = [ - "crates/beacon-client", - "crates/api", - "crates/utils", - "crates/common", - "crates/database", - "crates/datastore", - "crates/cmd", - "crates/housekeeper", - "crates/website", + "crates/beacon-client", + "crates/api", + "crates/utils", + "crates/common", + "crates/database", + "crates/datastore", + "crates/cmd", + "crates/housekeeper", + "crates/website", ] resolver = "2" [workspace.package] version = "0.0.1" -rust-version = "1.72.0" +rust-version = "1.82.0" edition = "2021" repository = "https://github.com/gattaca-com/helix" license = "MIT OR Apache-2.0" @@ -44,7 +44,7 @@ reqwest = { version = "0.11.23", features = [ "blocking", ] } tokio = { version = "1.33.0", features = ["full"] } -tokio-stream = {version = "0.1.15", features = ["sync"]} +tokio-stream = { version = "0.1.15", features = ["sync"] } tower-http = { version = "0.5.1", features = ["full"] } url = "2.4" diff --git a/Dockerfile b/Dockerfile index 3d941e1..7b5da16 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.72.0 as helix +FROM rust:1.82.0 as helix RUN apt update -y RUN apt install -y clang diff --git a/crates/housekeeper/Cargo.toml b/crates/housekeeper/Cargo.toml index d3c4e10..d561519 100644 --- a/crates/housekeeper/Cargo.toml +++ b/crates/housekeeper/Cargo.toml @@ -19,7 +19,6 @@ axum.workspace = true hyper.workspace = true tokio.workspace = true -tokio-ping = "0.2.0" socket2 = "0.4" futures-util = { version = "0.3", features = ["compat"] } @@ -43,4 +42,3 @@ tracing-subscriber.workspace = true uuid.workspace = true ethers = "2.0.14" - diff --git a/local.Dockerfile b/local.Dockerfile index 50d93d6..019f62b 100644 --- a/local.Dockerfile +++ b/local.Dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.72.0 AS helix +FROM rust:1.82.0 AS helix RUN apt update -y RUN apt install -y clang diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 748be27..5f3ff17 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,3 +1,3 @@ [toolchain] -channel = "1.72.0" +channel = "1.82.0" profile = "default" From f1b16adfb5acd2a80ff7f5987d1521e0e285a042 Mon Sep 17 00:00:00 2001 From: ltitanb Date: Wed, 4 Dec 2024 14:35:45 +0000 Subject: [PATCH 17/39] removed some deps --- Cargo.lock | 75 ++++----------------------------- Cargo.toml | 2 - crates/api/Cargo.toml | 1 - crates/beacon-client/Cargo.toml | 5 +-- crates/common/Cargo.toml | 4 -- crates/database/Cargo.toml | 5 --- crates/datastore/Cargo.toml | 5 +-- crates/housekeeper/Cargo.toml | 12 ------ crates/website/Cargo.toml | 13 +++--- 9 files changed, 15 insertions(+), 107 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 212412f..8c6759f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -524,7 +524,7 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6d7b9decdf35d8908a7e3ef02f64c5e9b1695e230154c0e8de3969142d9b94c" dependencies = [ - "futures 0.3.28", + "futures", "pharos", "rustc_version 0.4.0", ] @@ -2423,12 +2423,6 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" -[[package]] -name = "futures" -version = "0.1.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" - [[package]] name = "futures" version = "0.3.28" @@ -2526,7 +2520,6 @@ version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" dependencies = [ - "futures 0.1.31", "futures-channel", "futures-core", "futures-io", @@ -2709,10 +2702,9 @@ dependencies = [ "async-trait", "auto_impl", "axum 0.7.4", - "bytes", "ethereum-consensus", "flate2", - "futures 0.3.28", + "futures", "helix-beacon-client", "helix-common", "helix-database", @@ -2754,8 +2746,7 @@ dependencies = [ "axum 0.7.4", "ethereum-consensus", "fiber", - "flate2", - "futures 0.3.28", + "futures", "helix-common", "helix-utils", "http 1.0.0", @@ -2767,8 +2758,6 @@ dependencies = [ "thiserror", "tokio", "tracing", - "tracing-appender", - "tracing-subscriber", "url", ] @@ -2776,7 +2765,7 @@ dependencies = [ name = "helix-cmd" version = "0.0.1" dependencies = [ - "futures 0.3.28", + "futures", "helix-api", "helix-common", "helix-database", @@ -2795,14 +2784,11 @@ name = "helix-common" version = "0.0.1" dependencies = [ "auto_impl", - "axum 0.7.4", "clap", - "deadpool-redis", "ethereum-consensus", "ethereum-types", "helix-utils", "merkle_proof", - "redis", "reqwest", "reth-primitives 0.1.0-alpha.10", "serde", @@ -2813,7 +2799,6 @@ dependencies = [ "ssz_types", "thiserror", "tokio", - "tokio-postgres", "tracing", "tree_hash 0.6.0", "tree_hash_derive", @@ -2825,7 +2810,6 @@ version = "0.0.1" dependencies = [ "async-trait", "bytes", - "chrono", "dashmap", "deadpool-postgres", "env_logger", @@ -2834,16 +2818,12 @@ dependencies = [ "hex", "rand", "refinery", - "reth-primitives 0.1.0-alpha.10", "serde", "serde_json", - "serial_test", "thiserror", "tokio", "tokio-postgres", - "tokio-stream", "tracing", - "tracing-test", ] [[package]] @@ -2856,13 +2836,10 @@ dependencies = [ "deadpool-redis", "ethereum-consensus", "futures-util", - "helix-beacon-client", "helix-common", "helix-database", "hex", - "hyper 1.1.0", "redis", - "reth-primitives 0.1.0-alpha.10", "serde", "serde_json", "thiserror", @@ -2875,28 +2852,18 @@ dependencies = [ name = "helix-housekeeper" version = "0.0.1" dependencies = [ - "async-trait", - "auto_impl", - "axum 0.7.4", - "deadpool-redis", "ethereum-consensus", "ethers", - "futures-util", "helix-beacon-client", "helix-common", "helix-database", "helix-datastore", "helix-utils", - "hyper 1.1.0", - "redis", "reth-primitives 0.1.0-alpha.10", - "serde", "serde_json", - "socket2 0.4.9", "thiserror", "tokio", "tracing", - "tracing-subscriber", "uuid 1.7.0", ] @@ -2934,12 +2901,9 @@ dependencies = [ "helix-utils", "hex", "num-format", - "reqwest", "serde", - "serde_json", "tokio", "tracing", - "url", ] [[package]] @@ -3665,7 +3629,7 @@ checksum = "f8d3038e23466858569c2d30a537f691fa0d53b51626630ae08262943e3bbb8b" dependencies = [ "assert-json-diff", "colored", - "futures 0.3.28", + "futures", "hyper 0.14.27", "log", "rand", @@ -4194,7 +4158,7 @@ version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e9567389417feee6ce15dd6527a8a1ecac205ef62c2932bcf3d9f6fc5b78b414" dependencies = [ - "futures 0.3.28", + "futures", "rustc_version 0.4.0", ] @@ -5588,7 +5552,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "538c30747ae860d6fb88330addbbd3e0ddbe46d662d032855596d8a8ca260611" dependencies = [ "dashmap", - "futures 0.3.28", + "futures", "lazy_static", "log", "parking_lot", @@ -6704,29 +6668,6 @@ dependencies = [ "tracing-log", ] -[[package]] -name = "tracing-test" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a2c0ff408fe918a94c428a3f2ad04e4afd5c95bbc08fcf868eff750c15728a4" -dependencies = [ - "lazy_static", - "tracing-core", - "tracing-subscriber", - "tracing-test-macro", -] - -[[package]] -name = "tracing-test-macro" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "258bc1c4f8e2e73a977812ab339d503e6feeb92700f6d07a6de4d321522d5c08" -dependencies = [ - "lazy_static", - "quote", - "syn 1.0.109", -] - [[package]] name = "tree_hash" version = "0.5.2" @@ -7335,7 +7276,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7999f5f4217fe3818726b66257a4475f71e74ffd190776ad053fa159e50737f5" dependencies = [ "async_io_stream", - "futures 0.3.28", + "futures", "js-sys", "log", "pharos", diff --git a/Cargo.toml b/Cargo.toml index d0c43b8..507ce70 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -60,7 +60,6 @@ tokio-postgres = "0.7.10" deadpool-postgres = "0.11.0" refinery = { version = "0.8", features = ["tokio-postgres"] } bytes = "1.5.0" -chrono = "0.4.19" # Ethereum Types ethereum-consensus = { git = "https://github.com/ralexstokes/ethereum-consensus", rev = "fc049504a200926c8bd5f0fbd3f9696c6c6f699d" } @@ -74,7 +73,6 @@ tracing-appender = "0.2.2" # Testing and Mocking mockito = "1.1.1" serial_test = "1.0.0" -tracing-test = "0.2.3" env_logger = "0.9" # Misc diff --git a/crates/api/Cargo.toml b/crates/api/Cargo.toml index 64eb41e..f0c0159 100644 --- a/crates/api/Cargo.toml +++ b/crates/api/Cargo.toml @@ -54,7 +54,6 @@ thiserror = { workspace = true } tracing = { workspace = true } tracing-subscriber = { workspace = true } uuid = { workspace = true } -bytes = "1.5" moka = "0.9" [dev-dependencies] diff --git a/crates/beacon-client/Cargo.toml b/crates/beacon-client/Cargo.toml index 14104fe..d801a83 100644 --- a/crates/beacon-client/Cargo.toml +++ b/crates/beacon-client/Cargo.toml @@ -29,16 +29,13 @@ ethereum-consensus.workspace = true # Logging tracing.workspace = true -tracing-subscriber.workspace = true -tracing-appender.workspace = true # Testing and Mocking mockito.workspace = true # Misc auto_impl.workspace = true -flate2 = "1.0" thiserror.workspace = true # Broadcasting -fiber.workspace = true \ No newline at end of file +fiber.workspace = true diff --git a/crates/common/Cargo.toml b/crates/common/Cargo.toml index 94afe9b..7865a63 100644 --- a/crates/common/Cargo.toml +++ b/crates/common/Cargo.toml @@ -10,7 +10,6 @@ license.workspace = true helix-utils.workspace = true # Networking -axum.workspace = true reqwest.workspace = true # Serialization and Data Format @@ -26,10 +25,7 @@ ethereum-types = "0.14.1" ssz_types = "0.5.4" # DB -deadpool-redis.workspace = true -redis.workspace = true tokio.workspace = true -tokio-postgres.workspace = true # Misc auto_impl.workspace = true diff --git a/crates/database/Cargo.toml b/crates/database/Cargo.toml index 4bcbe64..a7e20fa 100644 --- a/crates/database/Cargo.toml +++ b/crates/database/Cargo.toml @@ -12,7 +12,6 @@ helix-common.workspace = true # Async and Networking async-trait.workspace = true tokio.workspace = true -tokio-stream.workspace = true # Serialization and Data Format serde.workspace = true @@ -23,15 +22,11 @@ tokio-postgres.workspace = true deadpool-postgres.workspace = true refinery.workspace = true bytes.workspace = true -chrono.workspace = true # Ethereum Types ethereum-consensus.workspace = true -reth-primitives.workspace = true # Testing -serial_test.workspace = true -tracing-test.workspace = true env_logger.workspace = true # Misc diff --git a/crates/datastore/Cargo.toml b/crates/datastore/Cargo.toml index 0f52f6c..bf15a04 100644 --- a/crates/datastore/Cargo.toml +++ b/crates/datastore/Cargo.toml @@ -7,20 +7,17 @@ repository.workspace = true license.workspace = true [dependencies] -helix-beacon-client.workspace = true helix-database.workspace = true helix-common.workspace = true # Async and Networking async-trait.workspace = true axum.workspace = true -hyper.workspace = true tokio.workspace = true tokio-stream.workspace = true futures-util = "0.3" # Serialization and Data Format -reth-primitives.workspace = true serde.workspace = true serde_json.workspace = true hex.workspace = true @@ -35,4 +32,4 @@ redis.workspace = true # Misc auto_impl.workspace = true thiserror.workspace = true -tracing.workspace = true \ No newline at end of file +tracing.workspace = true diff --git a/crates/housekeeper/Cargo.toml b/crates/housekeeper/Cargo.toml index d561519..f958d58 100644 --- a/crates/housekeeper/Cargo.toml +++ b/crates/housekeeper/Cargo.toml @@ -14,31 +14,19 @@ helix-common.workspace = true helix-utils.workspace = true # Async and Networking -async-trait.workspace = true -axum.workspace = true -hyper.workspace = true tokio.workspace = true -socket2 = "0.4" -futures-util = { version = "0.3", features = ["compat"] } - # Serialization and Data Format reth-primitives.workspace = true -serde.workspace = true serde_json.workspace = true # Ethereum Types ethereum-consensus.workspace = true -# DB -deadpool-redis.workspace = true -redis.workspace = true # Misc -auto_impl.workspace = true thiserror.workspace = true tracing.workspace = true -tracing-subscriber.workspace = true uuid.workspace = true ethers = "2.0.14" diff --git a/crates/website/Cargo.toml b/crates/website/Cargo.toml index cf5159b..dd5d3e5 100644 --- a/crates/website/Cargo.toml +++ b/crates/website/Cargo.toml @@ -7,22 +7,19 @@ repository.workspace = true license.workspace = true [dependencies] -helix-database = {workspace = true} -helix-common = {workspace = true} -helix-utils = {workspace = true} -helix-housekeeper = {workspace = true} -helix-beacon-client = {workspace = true} +helix-database = { workspace = true } +helix-common = { workspace = true } +helix-utils = { workspace = true } +helix-housekeeper = { workspace = true } +helix-beacon-client = { workspace = true } axum = { workspace = true } tokio = { workspace = true } tracing = { workspace = true } serde = { workspace = true } -serde_json = { workspace = true } ethereum-consensus = { workspace = true } deadpool-postgres = { workspace = true } -url = { workspace = true } async-trait = { workspace = true } hex = { workspace = true } -reqwest = { workspace = true } askama = "0.12" alloy-primitives = "0.7.7" From 5f73fdef1dac0036100baaf98191fb8bed7b5c0a Mon Sep 17 00:00:00 2001 From: ltitanb Date: Wed, 4 Dec 2024 14:41:45 +0000 Subject: [PATCH 18/39] small tidy up --- crates/api/Cargo.toml | 28 ++++++++++++++-------------- crates/beacon-client/Cargo.toml | 2 +- crates/website/Cargo.toml | 26 +++++++++++++------------- 3 files changed, 28 insertions(+), 28 deletions(-) diff --git a/crates/api/Cargo.toml b/crates/api/Cargo.toml index f0c0159..a160d57 100644 --- a/crates/api/Cargo.toml +++ b/crates/api/Cargo.toml @@ -16,18 +16,18 @@ helix-utils.workspace = true helix-housekeeper.workspace = true # Async and Networking -async-trait = { workspace = true } -axum = { workspace = true } -tower = { workspace = true } -futures = { workspace = true } -hyper = { workspace = true } -reqwest = { workspace = true } +async-trait.workspace = true +axum.workspace = true +tower.workspace = true +futures.workspace = true +hyper.workspace = true +reqwest.workspace = true reqwest-eventsource = "0.5" -tokio = { workspace = true } +tokio.workspace = true tokio-tungstenite = "0.16" -tokio-stream = { workspace = true } -tower-http = { workspace = true } -url = { workspace = true } +tokio-stream.workspace = true +tower-http.workspace = true +url.workspace = true tonic = "0.10" prost = "0.12" @@ -50,10 +50,10 @@ mockito.workspace = true # Misc auto_impl.workspace = true rand = "0.8" -thiserror = { workspace = true } -tracing = { workspace = true } -tracing-subscriber = { workspace = true } -uuid = { workspace = true } +thiserror.workspace = true +tracing.workspace = true +tracing-subscriber.workspace = true +uuid.workspace = true moka = "0.9" [dev-dependencies] diff --git a/crates/beacon-client/Cargo.toml b/crates/beacon-client/Cargo.toml index d801a83..28d76ef 100644 --- a/crates/beacon-client/Cargo.toml +++ b/crates/beacon-client/Cargo.toml @@ -13,7 +13,7 @@ helix-utils.workspace = true # Async and Networking axum.workspace = true async-trait.workspace = true -futures = { workspace = true } +futures.workspace = true http.workspace = true reqwest.workspace = true reqwest-eventsource = "0.5" diff --git a/crates/website/Cargo.toml b/crates/website/Cargo.toml index dd5d3e5..7addbde 100644 --- a/crates/website/Cargo.toml +++ b/crates/website/Cargo.toml @@ -7,19 +7,19 @@ repository.workspace = true license.workspace = true [dependencies] -helix-database = { workspace = true } -helix-common = { workspace = true } -helix-utils = { workspace = true } -helix-housekeeper = { workspace = true } -helix-beacon-client = { workspace = true } -axum = { workspace = true } -tokio = { workspace = true } -tracing = { workspace = true } -serde = { workspace = true } -ethereum-consensus = { workspace = true } -deadpool-postgres = { workspace = true } -async-trait = { workspace = true } -hex = { workspace = true } +helix-database.workspace = true +helix-common.workspace = true +helix-utils.workspace = true +helix-housekeeper.workspace = true +helix-beacon-client.workspace = true +axum.workspace = true +tokio.workspace = true +tracing.workspace = true +serde.workspace = true +ethereum-consensus.workspace = true +deadpool-postgres.workspace = true +async-trait.workspace = true +hex.workspace = true askama = "0.12" alloy-primitives = "0.7.7" From 07e5df873abd44352275c0905d54c133d2a0ba13 Mon Sep 17 00:00:00 2001 From: ltitanb <163874448+ltitanb@users.noreply.github.com> Date: Thu, 5 Dec 2024 10:55:57 +0000 Subject: [PATCH 19/39] feat: metrics (#57) * metrics * comments --- Cargo.lock | 34 +- Cargo.toml | 3 + .../builder/simulator/optimistic_simulator.rs | 10 +- .../src/builder/simulator/rpc_simulator.rs | 8 +- crates/api/src/gossiper/grpc_gossiper.rs | 117 +++++- crates/api/src/middleware/metrics.rs | 34 ++ crates/api/src/middleware/mod.rs | 3 + crates/api/src/router.rs | 28 +- crates/cmd/src/main.rs | 3 +- crates/common/Cargo.toml | 5 +- crates/common/src/lib.rs | 1 + crates/common/src/metrics.rs | 356 ++++++++++++++++++ .../src/postgres/postgres_db_service.rs | 214 ++++++++--- crates/datastore/src/redis/redis_cache.rs | 222 +++++++++-- 14 files changed, 919 insertions(+), 119 deletions(-) create mode 100644 crates/api/src/middleware/metrics.rs create mode 100644 crates/common/src/metrics.rs diff --git a/Cargo.lock b/Cargo.lock index 8c6759f..4ce5361 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2266,9 +2266,9 @@ dependencies = [ [[package]] name = "eyre" -version = "0.6.11" +version = "0.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6267a1fa6f59179ea4afc8e50fd8612a3cc60bc858f786ff877a4a8cb042799" +checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec" dependencies = [ "indenter", "once_cell", @@ -2787,8 +2787,11 @@ dependencies = [ "clap", "ethereum-consensus", "ethereum-types", + "eyre", "helix-utils", + "lazy_static", "merkle_proof", + "prometheus", "reqwest", "reth-primitives 0.1.0-alpha.10", "serde", @@ -3445,11 +3448,11 @@ checksum = "3f35c735096c0293d313e8f2a641627472b83d01b937177fe76e5e2708d31e0d" [[package]] name = "lazy_static" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" dependencies = [ - "spin 0.5.2", + "spin 0.9.8", ] [[package]] @@ -4385,6 +4388,21 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "prometheus" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d33c28a30771f7f96db69893f78b857f7450d7e0237e9c8fc6427a81bae7ed1" +dependencies = [ + "cfg-if", + "fnv", + "lazy_static", + "memchr", + "parking_lot", + "protobuf", + "thiserror", +] + [[package]] name = "proptest" version = "1.5.0" @@ -4459,6 +4477,12 @@ dependencies = [ "prost", ] +[[package]] +name = "protobuf" +version = "2.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "106dd99e98437432fed6519dedecfade6a06a73bb7b2a1e019fdd2bee5778d94" + [[package]] name = "pulldown-cmark" version = "0.9.6" diff --git a/Cargo.toml b/Cargo.toml index 507ce70..0479e9f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -78,8 +78,11 @@ env_logger = "0.9" # Misc auto_impl = "1.0.1" backtrace = "0.3.69" +eyre = "0.6.12" thiserror = "1.0.49" uuid = { version = "1.5", features = ["fast-rng", "v4"] } +prometheus = "0.13.4" +lazy_static = "1.5.0" # Broadcasting fiber = { git = "https://github.com/chainbound/fiber-rs.git", tag = "v0.5.1" } diff --git a/crates/api/src/builder/simulator/optimistic_simulator.rs b/crates/api/src/builder/simulator/optimistic_simulator.rs index aa99811..2212303 100644 --- a/crates/api/src/builder/simulator/optimistic_simulator.rs +++ b/crates/api/src/builder/simulator/optimistic_simulator.rs @@ -4,10 +4,10 @@ use async_trait::async_trait; use ethereum_consensus::primitives::{BlsPublicKey, Hash32}; use reqwest::Client; use tokio::sync::{mpsc::Sender, RwLock}; -use tracing::{debug, error, info, warn}; +use tracing::{debug, error, warn}; use uuid::Uuid; -use helix_common::{simulator::BlockSimError, BuilderInfo}; +use helix_common::{metrics::SimulatorMetrics, simulator::BlockSimError, BuilderInfo}; use helix_database::DatabaseService; use helix_datastore::Auctioneer; @@ -101,6 +101,8 @@ impl OptimisticSimulator block_hash: &Hash32, reason: String, ) { + SimulatorMetrics::demotion_count(); + if let Err(err) = self.auctioneer.demote_builder(builder_public_key).await { *self.failsafe_triggered.write().await = true; error!( @@ -157,6 +159,8 @@ impl BlockSimulator for OptimisticSimulator< request_id: Uuid, ) -> Result { if self.should_process_optimistically(&request, builder_info).await { + SimulatorMetrics::sim_count(true); + debug!( request_id=%request_id, block_hash=%request.execution_payload.block_hash(), @@ -179,6 +183,8 @@ impl BlockSimulator for OptimisticSimulator< Ok(true) } else { + SimulatorMetrics::sim_count(false); + debug!( request_id=%request_id, block_hash=?request.execution_payload.block_hash(), diff --git a/crates/api/src/builder/simulator/rpc_simulator.rs b/crates/api/src/builder/simulator/rpc_simulator.rs index 1a537a6..4fb27c4 100644 --- a/crates/api/src/builder/simulator/rpc_simulator.rs +++ b/crates/api/src/builder/simulator/rpc_simulator.rs @@ -1,5 +1,5 @@ use async_trait::async_trait; -use helix_common::BuilderInfo; +use helix_common::{metrics::SimulatorMetrics, BuilderInfo}; use reqwest::{ header::{HeaderMap, HeaderValue, CONTENT_TYPE}, Client, Response, StatusCode, @@ -96,6 +96,8 @@ impl BlockSimulator for RpcSimulator { sim_result_saver_sender: Sender, request_id: Uuid, ) -> Result { + let timer = SimulatorMetrics::timer(); + let block_hash = request.execution_payload.block_hash().clone(); debug!( request_id = %request_id, @@ -106,7 +108,9 @@ impl BlockSimulator for RpcSimulator { match self.send_rpc_request(request, is_top_bid).await { Ok(response) => { + timer.stop_and_record(); let result = Self::process_rpc_response(response).await; + SimulatorMetrics::sim_status(result.is_ok()); // Send sim result to db processor task let db_info = @@ -119,7 +123,9 @@ impl BlockSimulator for RpcSimulator { result.map(|_| false) } Err(err) => { + timer.stop_and_discard(); error!(request_id = %request_id, err = ?err, "Error sending RPC request"); + SimulatorMetrics::sim_status(false); Err(BlockSimError::RpcError(err.to_string())) } } diff --git a/crates/api/src/gossiper/grpc_gossiper.rs b/crates/api/src/gossiper/grpc_gossiper.rs index 20d2914..9f86a9a 100644 --- a/crates/api/src/gossiper/grpc_gossiper.rs +++ b/crates/api/src/gossiper/grpc_gossiper.rs @@ -1,6 +1,8 @@ use std::{sync::Arc, time::Duration}; use async_trait::async_trait; +use helix_common::metrics::GossipMetrics; +use prost::Message; use tokio::{sync::mpsc::Sender, time::sleep}; use tonic::{transport::Channel, Request, Response, Status}; use tracing::error; @@ -23,6 +25,11 @@ use crate::{ use super::types::broadcast_cancellation::BroadcastCancellationParams; +const HEADER_ID: &str = "header"; +const PAYLOAD_ID: &str = "payload"; +const GET_PAYLOAD_ID: &str = "get_payload"; +const CANCELLATION_ID: &str = "cancel"; + #[derive(Clone)] pub struct GrpcGossiperClient { endpoint: String, @@ -65,6 +72,10 @@ impl GrpcGossiperClient { &self, request: grpc::BroadcastHeaderParams, ) -> Result<(), GossipError> { + let _timer = GossipMetrics::out_timer(HEADER_ID); + let size = request.encoded_len(); + GossipMetrics::out_size(HEADER_ID, size); + let request = Request::new(request); let client = { let client_guard = self.client.read().await; @@ -72,19 +83,27 @@ impl GrpcGossiperClient { }; if let Some(mut client) = client { - let result = tokio::time::timeout(Duration::from_secs(5), client.broadcast_header(request)).await; + let result = + tokio::time::timeout(Duration::from_secs(5), client.broadcast_header(request)) + .await; match result { - Ok(Ok(_)) => Ok(()), + Ok(Ok(_)) => { + GossipMetrics::out_count(HEADER_ID, true); + Ok(()) + } Ok(Err(err)) => { error!(err = %err, "Client call failed."); + GossipMetrics::out_count(HEADER_ID, false); return Err(GossipError::BroadcastError(err)); - }, + } Err(_) => { error!("Client call timed out."); + GossipMetrics::out_count(HEADER_ID, false); return Err(GossipError::TimeoutError); - }, + } } } else { + GossipMetrics::out_count(HEADER_ID, false); return Err(GossipError::ClientNotConnected) } } @@ -93,6 +112,10 @@ impl GrpcGossiperClient { &self, request: grpc::BroadcastPayloadParams, ) -> Result<(), GossipError> { + let _timer = GossipMetrics::out_timer(PAYLOAD_ID); + let size = request.encoded_len(); + GossipMetrics::out_size(PAYLOAD_ID, size); + let request = Request::new(request); let client = { let client_guard = self.client.read().await; @@ -100,19 +123,27 @@ impl GrpcGossiperClient { }; if let Some(mut client) = client { - let result = tokio::time::timeout(Duration::from_secs(5), client.broadcast_payload(request)).await; + let result = + tokio::time::timeout(Duration::from_secs(5), client.broadcast_payload(request)) + .await; match result { - Ok(Ok(_)) => Ok(()), + Ok(Ok(_)) => { + GossipMetrics::out_count(PAYLOAD_ID, true); + Ok(()) + } Ok(Err(err)) => { error!(err = %err, "Client call failed."); + GossipMetrics::out_count(PAYLOAD_ID, false); return Err(GossipError::BroadcastError(err)); - }, + } Err(_) => { error!("Client call timed out."); + GossipMetrics::out_count(PAYLOAD_ID, false); return Err(GossipError::TimeoutError); - }, + } } } else { + GossipMetrics::out_count(PAYLOAD_ID, false); return Err(GossipError::ClientNotConnected) } } @@ -121,6 +152,10 @@ impl GrpcGossiperClient { &self, request: grpc::BroadcastGetPayloadParams, ) -> Result<(), GossipError> { + let _timer = GossipMetrics::out_timer(GET_PAYLOAD_ID); + let size = request.encoded_len(); + GossipMetrics::out_size(GET_PAYLOAD_ID, size); + let request = Request::new(request); let client = { let client_guard = self.client.read().await; @@ -128,19 +163,27 @@ impl GrpcGossiperClient { }; if let Some(mut client) = client { - let result = tokio::time::timeout(Duration::from_secs(5), client.broadcast_get_payload(request)).await; + let result = + tokio::time::timeout(Duration::from_secs(5), client.broadcast_get_payload(request)) + .await; match result { - Ok(Ok(_)) => Ok(()), + Ok(Ok(_)) => { + GossipMetrics::out_count(GET_PAYLOAD_ID, true); + Ok(()) + } Ok(Err(err)) => { error!(err = %err, "Client call failed."); + GossipMetrics::out_count(GET_PAYLOAD_ID, false); return Err(GossipError::BroadcastError(err)); - }, + } Err(_) => { error!("Client call timed out."); + GossipMetrics::out_count(GET_PAYLOAD_ID, false); return Err(GossipError::TimeoutError); - }, + } } } else { + GossipMetrics::out_count(GET_PAYLOAD_ID, false); return Err(GossipError::ClientNotConnected) } } @@ -149,6 +192,10 @@ impl GrpcGossiperClient { &self, request: grpc::BroadcastCancellationParams, ) -> Result<(), GossipError> { + let _timer = GossipMetrics::out_timer(CANCELLATION_ID); + let size = request.encoded_len(); + GossipMetrics::out_size(CANCELLATION_ID, size); + let request = Request::new(request); let client = { let client_guard = self.client.read().await; @@ -156,19 +203,29 @@ impl GrpcGossiperClient { }; if let Some(mut client) = client { - let result = tokio::time::timeout(Duration::from_secs(5), client.broadcast_cancellation(request)).await; + let result = tokio::time::timeout( + Duration::from_secs(5), + client.broadcast_cancellation(request), + ) + .await; match result { - Ok(Ok(_)) => Ok(()), + Ok(Ok(_)) => { + GossipMetrics::out_count(CANCELLATION_ID, true); + Ok(()) + } Ok(Err(err)) => { error!(err = %err, "Client call failed."); + GossipMetrics::out_count(CANCELLATION_ID, false); return Err(GossipError::BroadcastError(err)); - }, + } Err(_) => { error!("Client call timed out."); + GossipMetrics::out_count(CANCELLATION_ID, false); return Err(GossipError::TimeoutError); - }, + } } } else { + GossipMetrics::out_count(CANCELLATION_ID, false); return Err(GossipError::ClientNotConnected) } } @@ -294,7 +351,12 @@ impl GossipService for GrpcGossiperService { &self, request: Request, ) -> Result, Status> { - let request = BroadcastHeaderParams::from_proto(request.into_inner()); + GossipMetrics::in_count(HEADER_ID); + let inner = request.into_inner(); + let size = inner.encoded_len(); + GossipMetrics::in_size(HEADER_ID, size); + + let request = BroadcastHeaderParams::from_proto(inner); if let Err(err) = self.builder_api_sender.send(GossipedMessage::Header(Box::new(request))).await { @@ -307,7 +369,12 @@ impl GossipService for GrpcGossiperService { &self, request: Request, ) -> Result, Status> { - let request = BroadcastPayloadParams::from_proto(request.into_inner()); + GossipMetrics::in_count(PAYLOAD_ID); + let inner = request.into_inner(); + let size = inner.encoded_len(); + GossipMetrics::in_size(PAYLOAD_ID, size); + + let request = BroadcastPayloadParams::from_proto(inner); if let Err(err) = self.builder_api_sender.send(GossipedMessage::Payload(Box::new(request))).await { @@ -320,7 +387,12 @@ impl GossipService for GrpcGossiperService { &self, request: Request, ) -> Result, Status> { - let request = BroadcastGetPayloadParams::from_proto(request.into_inner()); + GossipMetrics::in_count(GET_PAYLOAD_ID); + let inner = request.into_inner(); + let size = inner.encoded_len(); + GossipMetrics::in_size(GET_PAYLOAD_ID, size); + + let request = BroadcastGetPayloadParams::from_proto(inner); if let Err(err) = self.proposer_api_sender.send(GossipedMessage::GetPayload(Box::new(request))).await { @@ -333,7 +405,12 @@ impl GossipService for GrpcGossiperService { &self, request: Request, ) -> Result, Status> { - let request = BroadcastCancellationParams::from_proto(request.into_inner()); + GossipMetrics::in_count(CANCELLATION_ID); + let inner = request.into_inner(); + let size = inner.encoded_len(); + GossipMetrics::in_size(CANCELLATION_ID, size); + + let request = BroadcastCancellationParams::from_proto(inner); if let Err(err) = self.builder_api_sender.send(GossipedMessage::Cancellation(Box::new(request))).await { diff --git a/crates/api/src/middleware/metrics.rs b/crates/api/src/middleware/metrics.rs new file mode 100644 index 0000000..de2c0aa --- /dev/null +++ b/crates/api/src/middleware/metrics.rs @@ -0,0 +1,34 @@ +use axum::{ + body::{to_bytes, Body}, + extract::Request, + http::StatusCode, + middleware::Next, + response::{IntoResponse, Response}, +}; +use helix_common::metrics::ApiMetrics; + +use crate::builder::api::MAX_PAYLOAD_LENGTH; + +pub async fn metrics_middleware(req: Request, next: Next) -> Response { + let endpoint = req.uri().path().to_string(); + + ApiMetrics::count(&endpoint); + let _timer = ApiMetrics::timer(&endpoint); + + let (req_parts, req_body) = req.into_parts(); + + // we can probably remove the RequestBodyLimitLayer with this + let response = match to_bytes(req_body, MAX_PAYLOAD_LENGTH).await { + Ok(bytes) => { + ApiMetrics::size(&endpoint, bytes.len()); + + let req = Request::from_parts(req_parts, Body::from(bytes)); + next.run(req).await + } + Err(_) => return StatusCode::PAYLOAD_TOO_LARGE.into_response(), + }; + + ApiMetrics::status(&endpoint, response.status().as_str()); + + response +} diff --git a/crates/api/src/middleware/mod.rs b/crates/api/src/middleware/mod.rs index d47ff4f..f6d3a12 100644 --- a/crates/api/src/middleware/mod.rs +++ b/crates/api/src/middleware/mod.rs @@ -1 +1,4 @@ +pub mod metrics; pub mod rate_limiting; + +pub use metrics::metrics_middleware; diff --git a/crates/api/src/router.rs b/crates/api/src/router.rs index 4499c75..b150b03 100644 --- a/crates/api/src/router.rs +++ b/crates/api/src/router.rs @@ -1,6 +1,6 @@ use axum::{ error_handling::HandleErrorLayer, - http::{request, StatusCode}, + http::StatusCode, middleware, routing::{get, post}, Extension, Router, @@ -10,20 +10,27 @@ use helix_common::{Route, RouterConfig}; use helix_database::postgres::postgres_db_service::PostgresDatabaseService; use helix_datastore::redis::redis_cache::RedisCache; use hyper::HeaderMap; -use tracing::warn; use std::{collections::HashMap, sync::Arc, time::Duration}; use tower::{timeout::TimeoutLayer, BoxError, ServiceBuilder}; -use tower_http::limit::RequestBodyLimitLayer; -use tower_http::request_id::{MakeRequestUuid, PropagateRequestIdLayer, SetRequestIdLayer}; +use tower_http::{ + limit::RequestBodyLimitLayer, + request_id::{MakeRequestUuid, PropagateRequestIdLayer, SetRequestIdLayer}, +}; +use tracing::warn; use crate::{ builder::{ - api::{BuilderApi, MAX_PAYLOAD_LENGTH}, multi_simulator::MultiSimulator, optimistic_simulator::OptimisticSimulator + api::{BuilderApi, MAX_PAYLOAD_LENGTH}, + multi_simulator::MultiSimulator, + optimistic_simulator::OptimisticSimulator, }, constraints::api::ConstraintsApi, gossiper::grpc_gossiper::GrpcGossiperClientManager, - middleware::rate_limiting::rate_limit_by_ip::{ - rate_limit_by_ip, RateLimitState, RateLimitStateForRoute, + middleware::{ + metrics_middleware, + rate_limiting::rate_limit_by_ip::{ + rate_limit_by_ip, RateLimitState, RateLimitStateForRoute, + }, }, proposer::api::ProposerApi, relay_data::{ @@ -150,6 +157,8 @@ pub fn build_router( } } + router = router.layer(middleware::from_fn(metrics_middleware)); + // Add payload size limit router = router.layer(RequestBodyLimitLayer::new(MAX_PAYLOAD_LENGTH)); @@ -166,10 +175,7 @@ pub fn build_router( .get("x-request-id") .map(|v| v.to_str().unwrap_or_default()) .unwrap_or_default(); - warn!( - request_id = request_id, - "Request timed out {:?}", e - ); + warn!(request_id = request_id, "Request timed out {:?}", e); StatusCode::REQUEST_TIMEOUT })) .layer(TimeoutLayer::new(API_REQUEST_TIMEOUT)), diff --git a/crates/cmd/src/main.rs b/crates/cmd/src/main.rs index 310dd47..abf16bc 100644 --- a/crates/cmd/src/main.rs +++ b/crates/cmd/src/main.rs @@ -1,5 +1,5 @@ use helix_api::service::ApiService; -use helix_common::{LoggingConfig, RelayConfig}; +use helix_common::{metrics::start_metrics_server, LoggingConfig, RelayConfig}; use helix_database::postgres::postgres_db_service::PostgresDatabaseService; use helix_utils::set_panic_hook; use helix_website::website_service::WebsiteService; @@ -61,6 +61,7 @@ async fn run() { let mut handles = Vec::new(); let postgres_db = PostgresDatabaseService::from_relay_config(&config).await; + start_metrics_server(); // Try to run database migrations until they succeed loop { diff --git a/crates/common/Cargo.toml b/crates/common/Cargo.toml index 7865a63..07cf1b8 100644 --- a/crates/common/Cargo.toml +++ b/crates/common/Cargo.toml @@ -24,7 +24,7 @@ merkle_proof = { git = "https://github.com/sigp/lighthouse.git", tag = "v5.3.0" ethereum-types = "0.14.1" ssz_types = "0.5.4" -# DB +# Async tokio.workspace = true # Misc @@ -37,6 +37,9 @@ ssz_rs = { git = "https://github.com/ralexstokes/ssz-rs", rev = "1df4cd9", featu ] } sha2 = "0.10.8" tree_hash = "0.6.0" +prometheus.workspace = true +eyre.workspace = true +lazy_static.workspace = true [dev-dependencies] tree_hash = "0.6.0" diff --git a/crates/common/src/lib.rs b/crates/common/src/lib.rs index d8fc788..000fade 100644 --- a/crates/common/src/lib.rs +++ b/crates/common/src/lib.rs @@ -5,6 +5,7 @@ pub mod builder_info; pub mod chain_info; pub mod config; pub mod eth; +pub mod metrics; pub mod pending_block; pub mod proofs; pub mod proposer; diff --git a/crates/common/src/metrics.rs b/crates/common/src/metrics.rs new file mode 100644 index 0000000..695af43 --- /dev/null +++ b/crates/common/src/metrics.rs @@ -0,0 +1,356 @@ +use axum::{ + body::Body, + http::{header::CONTENT_TYPE, StatusCode}, + response::{IntoResponse, Response}, + routing::get, +}; +use eyre::bail; +use lazy_static::lazy_static; +use prometheus::{ + register_histogram, register_histogram_vec, register_int_counter, register_int_counter_vec, + Encoder, Histogram, HistogramTimer, HistogramVec, IntCounter, IntCounterVec, TextEncoder, +}; +use std::net::SocketAddr; +use tokio::net::TcpListener; +use tracing::{error, info, trace}; + +pub fn start_metrics_server() { + let port = + std::env::var("METRICS_PORT").map(|s| s.parse().expect("invalid port")).unwrap_or(9500); + tokio::spawn(MetricsProvider::new(port).run()); +} + +pub struct MetricsProvider { + port: u16, +} + +impl MetricsProvider { + pub fn new(port: u16) -> Self { + MetricsProvider { port } + } + + pub async fn run(self) -> eyre::Result<()> { + info!("Starting metrics server on port {}", self.port); + + let router = axum::Router::new() + .route("/metrics", get(handle_metrics)) + .route("/status", get(handle_status)); + let address = SocketAddr::from(([0, 0, 0, 0], self.port)); + let listener = TcpListener::bind(&address).await?; + + axum::serve(listener, router).await?; + + bail!("Metrics server stopped") + } +} + +async fn handle_status() -> Response { + trace!("Handling status request"); + + StatusCode::OK.into_response() +} + +async fn handle_metrics() -> Response { + trace!("Handling metrics request"); + + match prepare_metrics() { + Ok(response) => response, + Err(err) => { + error!("Failed to prepare metrics: {:?}", err); + StatusCode::INTERNAL_SERVER_ERROR.into_response() + } + } +} + +fn prepare_metrics() -> Result { + let metrics = prometheus::gather(); + let encoder = TextEncoder::new(); + let s = encoder.encode_to_string(&metrics)?; + + Response::builder() + .status(200) + .header(CONTENT_TYPE, encoder.format_type()) + .body(Body::from(s)) + .map_err(MetricsError::FailedBody) +} + +#[derive(Debug, thiserror::Error)] +enum MetricsError { + #[error("failed encoding metrics {0}")] + FailedEncoding(#[from] prometheus::Error), + + #[error("failed encoding body {0}")] + FailedBody(#[from] axum::http::Error), +} + +lazy_static! { + //////////////// API //////////////// + + /// Count for requests by API and endpoint + static ref REQUEST_COUNTS: IntCounterVec = + register_int_counter_vec!("request_count_total", "Count of requests", &["endpoint"]) + .unwrap(); + + /// Count for status codes by API and endpoint + static ref REQUEST_STATUS: IntCounterVec = + register_int_counter_vec!("request_status_total", "Count of status codes", &["endpoint", "http_status_code"]) + .unwrap(); + + /// Duration of request in seconds + static ref REQUEST_LATENCY: HistogramVec = register_histogram_vec!( + "request_latency_sec", + "Latency of requests", + &["endpoint"] + ) + .unwrap(); + + /// Request size in bytes + static ref REQUEST_SIZE: IntCounterVec = register_int_counter_vec!( + "request_size_bytes", + "Size of requests", + &["endpoint"] + ) + .unwrap(); + + //////////////// SIMULATOR //////////////// + static ref SIMULATOR_COUNTS: IntCounterVec = + register_int_counter_vec!("simulator_count_total", "Count of sim requests", &["is_optimistic"]) + .unwrap(); + + static ref SIMULATOR_STATUS: IntCounterVec = + register_int_counter_vec!("simulator_status_total", "Count of sim statuses", &["is_success"]) + .unwrap(); + + static ref SIMULATOR_LATENCY: Histogram = register_histogram!( + "sim_latency_sec", + "Latency of simulations", + ) + .unwrap(); + + static ref BUILDER_DEMOTION_COUNT: IntCounter = + register_int_counter!("builder_demotion_count_total", "Count of builder demotions") + .unwrap(); + + //////////////// GOSSIP //////////////// + + /// Received gossip messages coutn + static ref IN_GOSSIP_COUNTS: IntCounterVec = register_int_counter_vec!( + "in_gossip_count_total", + "Count of received gossip messages", + &["endpoint"] + ) + .unwrap(); + + + /// Received gossip size in bytes + static ref IN_GOSSIP_SIZE: IntCounterVec = register_int_counter_vec!( + "in_gossip_size_bytes", + "Size of receivedgossip messages", + &["endpoint"] + ) + .unwrap(); + + /// Sent gossip messages count + static ref OUT_GOSSIP_COUNTS: IntCounterVec = register_int_counter_vec!( + "out_gossip_count_total", + "Count of sent gossip messages", + &["endpoint", "is_success"] + ) + .unwrap(); + + /// Sent gossip latency + static ref OUT_GOSSIP_LATENCY: HistogramVec = register_histogram_vec!( + "out_gossip_latency_sec", + "Latency of sent gossip messages", + &["endpoint"] + ) + .unwrap(); + + /// Sent gossip size in bytes + static ref OUT_GOSSIP_SIZE: IntCounterVec = register_int_counter_vec!( + "out_gossip_size_bytes", + "Size of sent gossip messages", + &["endpoint"] + ) + .unwrap(); + + //////////////// DB //////////////// + static ref DB_COUNTS: IntCounterVec = register_int_counter_vec!( + "db_count_total", + "Count of db operations", + &["endpoint", "is_success"] + ) + .unwrap(); + + static ref DB_LATENCY: HistogramVec = register_histogram_vec!( + "db_latency_sec", + "Latency of db operations", + &["endpoint"] + ) + .unwrap(); + + + //////////////// REDIS //////////////// + static ref REDIS_COUNTS: IntCounterVec = register_int_counter_vec!( + "redis_count_total", + "Count of redis operations", + &["endpoint", "is_success"] + ) + .unwrap(); + + static ref REDIS_LATENCY: HistogramVec = register_histogram_vec!( + "redis_latency_sec", + "Latency of redis operations", + &["endpoint"] + ) + .unwrap(); +} + +pub struct ApiMetrics; + +impl ApiMetrics { + pub fn count(endpoint: &str) { + REQUEST_COUNTS.with_label_values(&[endpoint]).inc(); + } + pub fn status(endpoint: &str, status_code: &str) { + REQUEST_STATUS.with_label_values(&[endpoint, status_code]).inc(); + } + /// Records on drop + pub fn timer(endpoint: &str) -> HistogramTimer { + REQUEST_LATENCY.with_label_values(&[endpoint]).start_timer() + } + pub fn size(endpoint: &str, size: usize) { + REQUEST_SIZE.with_label_values(&[endpoint]).inc_by(size as u64); + } +} + +pub struct GossipMetrics; + +impl GossipMetrics { + pub fn in_count(endpoint: &str) { + IN_GOSSIP_COUNTS.with_label_values(&[endpoint]).inc(); + } + + pub fn in_size(endpoint: &str, size: usize) { + IN_GOSSIP_SIZE.with_label_values(&[endpoint]).inc_by(size as u64); + } + + pub fn out_count(endpoint: &str, is_success: bool) { + OUT_GOSSIP_COUNTS.with_label_values(&[endpoint, is_success.to_string().as_str()]).inc(); + } + + /// Records on drop + pub fn out_timer(endpoint: &str) -> HistogramTimer { + OUT_GOSSIP_LATENCY.with_label_values(&[endpoint]).start_timer() + } + + pub fn out_size(endpoint: &str, size: usize) { + OUT_GOSSIP_SIZE.with_label_values(&[endpoint]).inc_by(size as u64); + } +} + +pub struct DbMetrics; + +impl DbMetrics { + pub fn count(endpoint: &str, is_success: bool) { + DB_COUNTS.with_label_values(&[endpoint, is_success.to_string().as_str()]).inc(); + } + + pub fn latency(endpoint: &str) -> HistogramTimer { + DB_LATENCY.with_label_values(&[endpoint]).start_timer() + } +} + +pub struct DbMetricRecord<'a> { + endpoint: &'a str, + has_recorded: bool, + _timer: HistogramTimer, +} + +impl<'a> DbMetricRecord<'a> { + pub fn new(endpoint: &'a str) -> Self { + let timer = DbMetrics::latency(endpoint); + DbMetricRecord { has_recorded: false, _timer: timer, endpoint } + } + + pub fn record_success(&mut self) { + self.has_recorded = true; + DbMetrics::count(self.endpoint, true); + } + + pub fn record_failure(&mut self) { + self.has_recorded = true; + DbMetrics::count(self.endpoint, false); + } +} + +impl<'a> Drop for DbMetricRecord<'a> { + fn drop(&mut self) { + if !self.has_recorded { + self.record_failure(); + } + } +} + +pub struct RedisMetrics; + +impl RedisMetrics { + pub fn count(endpoint: &str, is_success: bool) { + REDIS_COUNTS.with_label_values(&[endpoint, is_success.to_string().as_str()]).inc(); + } + + pub fn latency(endpoint: &str) -> HistogramTimer { + REDIS_LATENCY.with_label_values(&[endpoint]).start_timer() + } +} + +pub struct RedisMetricRecord<'a> { + endpoint: &'a str, + has_recorded: bool, + _timer: HistogramTimer, +} + +impl<'a> RedisMetricRecord<'a> { + pub fn new(endpoint: &'a str) -> Self { + let timer = RedisMetrics::latency(endpoint); + RedisMetricRecord { has_recorded: false, _timer: timer, endpoint } + } + + pub fn record_success(&mut self) { + self.has_recorded = true; + RedisMetrics::count(self.endpoint, true); + } + + pub fn record_failure(&mut self) { + self.has_recorded = true; + RedisMetrics::count(self.endpoint, false); + } +} + +impl<'a> Drop for RedisMetricRecord<'a> { + fn drop(&mut self) { + if !self.has_recorded { + self.record_failure(); + } + } +} + +pub struct SimulatorMetrics; + +impl SimulatorMetrics { + pub fn sim_count(is_optimistic: bool) { + SIMULATOR_COUNTS.with_label_values(&[is_optimistic.to_string().as_str()]).inc(); + } + + pub fn sim_status(is_success: bool) { + SIMULATOR_STATUS.with_label_values(&[is_success.to_string().as_str()]).inc(); + } + + pub fn timer() -> HistogramTimer { + SIMULATOR_LATENCY.start_timer() + } + + pub fn demotion_count() { + BUILDER_DEMOTION_COUNT.inc(); + } +} diff --git a/crates/database/src/postgres/postgres_db_service.rs b/crates/database/src/postgres/postgres_db_service.rs index 1b40b11..100b1af 100644 --- a/crates/database/src/postgres/postgres_db_service.rs +++ b/crates/database/src/postgres/postgres_db_service.rs @@ -1,4 +1,3 @@ -use core::num; use std::{ collections::HashSet, ops::DerefMut, @@ -15,9 +14,17 @@ use helix_common::{ api::{ builder_api::BuilderGetValidatorsResponseEntry, data_api::BidFilters, proposer_api::ValidatorRegistrationInfo, - }, bid_submission::{ + }, + bid_submission::{ v2::header_submission::SignedHeaderSubmission, BidSubmission, BidTrace, SignedBidSubmission, - }, deneb::SignedValidatorRegistration, simulator::BlockSimError, validator_preferences, versioned_payload::PayloadAndBlobs, BuilderInfo, Filtering, GetHeaderTrace, GetPayloadTrace, GossipedHeaderTrace, GossipedPayloadTrace, HeaderSubmissionTrace, ProposerInfo, RelayConfig, SignedValidatorRegistrationEntry, SubmissionTrace, ValidatorPreferences, ValidatorSummary + }, + deneb::SignedValidatorRegistration, + metrics::DbMetricRecord, + simulator::BlockSimError, + versioned_payload::PayloadAndBlobs, + BuilderInfo, Filtering, GetHeaderTrace, GetPayloadTrace, GossipedHeaderTrace, + GossipedPayloadTrace, HeaderSubmissionTrace, ProposerInfo, RelayConfig, + SignedValidatorRegistrationEntry, SubmissionTrace, ValidatorPreferences, ValidatorSummary, }; use tokio_postgres::{types::ToSql, NoTls}; use tracing::{error, info}; @@ -147,6 +154,8 @@ impl PostgresDatabaseService { } pub async fn load_known_validators(&self) { + let mut record = DbMetricRecord::new("load_known_validators"); + let client = self.pool.get().await.unwrap(); let rows = client.query("SELECT * FROM known_validators", &[]).await.unwrap(); for row in rows { @@ -154,9 +163,13 @@ impl PostgresDatabaseService { parse_bytes_to_pubkey(row.get::<&str, &[u8]>("public_key")).unwrap(); self.known_validators_cache.insert(public_key); } + + record.record_success(); } pub async fn load_validator_registrations(&self) { + let mut record = DbMetricRecord::new("load_validator_registrations"); + match self.get_validator_registrations().await { Ok(entries) => { let num_entries = entries.len(); @@ -167,6 +180,7 @@ impl PostgresDatabaseService { ); }); info!("Loaded {} validator registrations", num_entries); + record.record_success(); } Err(e) => { error!("Error loading validator registrations: {}", e); @@ -213,6 +227,8 @@ impl PostgresDatabaseService { &self, entries: &[SignedValidatorRegistrationEntry], ) -> Result<(), DatabaseError> { + let mut record = DbMetricRecord::new("save_validator_registrations"); + let mut client = self.pool.get().await?; let mut sorted_entries = entries.to_vec(); @@ -373,6 +389,7 @@ impl PostgresDatabaseService { transaction.commit().await?; } + record.record_success(); Ok(()) } } @@ -408,6 +425,8 @@ impl DatabaseService for PostgresDatabaseService { pool_name: Option, user_agent: Option, ) -> Result<(), DatabaseError> { + let mut record = DbMetricRecord::new("save_validator_registration"); + let registration = registration_info.registration.message.clone(); if let Some(entry) = self.validator_registration_cache.get(®istration.public_key) { @@ -481,6 +500,7 @@ impl DatabaseService for PostgresDatabaseService { transaction.commit().await?; + record.record_success(); Ok(()) } @@ -490,6 +510,8 @@ impl DatabaseService for PostgresDatabaseService { pool_name: Option, user_agent: Option, ) -> Result<(), DatabaseError> { + let mut record = DbMetricRecord::new("save_validator_registrations"); + entries.retain(|entry| { if let Some(existing_entry) = self.validator_registration_cache.get(&entry.registration.message.public_key) @@ -516,6 +538,7 @@ impl DatabaseService for PostgresDatabaseService { ); } + record.record_success(); Ok(()) } @@ -524,6 +547,8 @@ impl DatabaseService for PostgresDatabaseService { validator_keys: &[BlsPublicKey], trusted_builders: &[String], ) -> Result<(), DatabaseError> { + let mut record = DbMetricRecord::new("update_trusted_builders"); + let client = self.pool.get().await?; client .execute( @@ -535,6 +560,7 @@ impl DatabaseService for PostgresDatabaseService { ) .await?; + record.record_success(); Ok(()) } @@ -558,6 +584,8 @@ impl DatabaseService for PostgresDatabaseService { &self, pub_key: BlsPublicKey, ) -> Result { + let mut record = DbMetricRecord::new("get_validator_registration"); + match self .pool .get() @@ -585,33 +613,42 @@ impl DatabaseService for PostgresDatabaseService { .await? { rows if rows.is_empty() => Err(DatabaseError::ValidatorRegistrationNotFound), - rows => parse_row(rows.first().unwrap()), + rows => { + record.record_success(); + parse_row(rows.first().unwrap()) + }, } } async fn get_validator_registrations( &self, ) -> Result, DatabaseError> { - parse_rows( - self.pool - .get() - .await? - .query( - " - SELECT * FROM validator_registrations - INNER JOIN validator_preferences - ON validator_registrations.public_key = validator_preferences.public_key - ", - &[], - ) - .await?, - ) + let mut record = DbMetricRecord::new("get_validator_registrations"); + + let rows = self + .pool + .get() + .await? + .query( + " + SELECT * FROM validator_registrations + INNER JOIN validator_preferences + ON validator_registrations.public_key = validator_preferences.public_key + ", + &[], + ) + .await?; + + record.record_success(); + parse_rows(rows) } async fn get_validator_registrations_for_pub_keys( &self, pub_keys: Vec, ) -> Result, DatabaseError> { + let mut record = DbMetricRecord::new("get_validator_registrations_for_pub_keys"); + let client = self.pool.get().await.map_err(DatabaseError::from)?; // Constructing the query @@ -635,7 +672,10 @@ impl DatabaseService for PostgresDatabaseService { let params_slice: Vec<&(dyn ToSql + Sync)> = params.iter().map(|b| b.as_ref() as &(dyn ToSql + Sync)).collect(); - parse_rows(client.query(&stmt, ¶ms_slice).await.map_err(DatabaseError::from)?) + let rows = client.query(&stmt, ¶ms_slice).await.map_err(DatabaseError::from)?; + + record.record_success(); + parse_rows(rows) } async fn get_validator_registration_timestamp( @@ -649,6 +689,8 @@ impl DatabaseService for PostgresDatabaseService { &self, proposer_duties: Vec, ) -> Result<(), DatabaseError> { + let mut record = DbMetricRecord::new("set_proposer_duties"); + let mut client = self.pool.get().await?; let transaction = client.transaction().await?; @@ -709,34 +751,41 @@ impl DatabaseService for PostgresDatabaseService { transaction.commit().await?; + record.record_success(); Ok(()) } async fn get_proposer_duties( &self, ) -> Result, DatabaseError> { - parse_rows( - self.pool - .get() - .await? - .query( - " + let mut record = DbMetricRecord::new("get_proposer_duties"); + + let rows = self + .pool + .get() + .await? + .query( + " SELECT * FROM proposer_duties INNER JOIN validator_registrations ON proposer_duties.public_key = validator_registrations.public_key INNER JOIN validator_preferences ON proposer_duties.public_key = validator_preferences.public_key ", - &[], - ) - .await?, - ) + &[], + ) + .await?; + + record.record_success(); + parse_rows(rows) } async fn set_known_validators( &self, known_validators: Vec, ) -> Result<(), DatabaseError> { + let mut record = DbMetricRecord::new("set_known_validators"); + info!("Known validators: current cache size: {:?}", self.known_validators_cache.len()); let mut client = self.pool.get().await?; @@ -800,6 +849,7 @@ impl DatabaseService for PostgresDatabaseService { transaction.commit().await?; + record.record_success(); Ok(()) } @@ -807,6 +857,8 @@ impl DatabaseService for PostgresDatabaseService { &self, public_keys: Vec, ) -> Result, DatabaseError> { + let mut record = DbMetricRecord::new("check_known_validators"); + let client = self.pool.get().await?; let mut pub_keys = HashSet::new(); @@ -829,6 +881,7 @@ impl DatabaseService for PostgresDatabaseService { } } + record.record_success(); Ok(pub_keys) } @@ -836,6 +889,8 @@ impl DatabaseService for PostgresDatabaseService { &self, api_key: &str, ) -> Result, DatabaseError> { + let mut record = DbMetricRecord::new("get_validator_pool_name"); + let client = self.pool.get().await?; if self.validator_pool_cache.is_empty() { @@ -871,6 +926,7 @@ impl DatabaseService for PostgresDatabaseService { self.validator_pool_cache.insert(api_key.to_string(), name.clone()); + record.record_success(); Ok(Some(name)) } @@ -882,6 +938,8 @@ impl DatabaseService for PostgresDatabaseService { message_received: u64, payload_fetched: u64, ) -> Result<(), DatabaseError> { + let mut record = DbMetricRecord::new("save_too_late_get_payload"); + let region_id = self.region; self.pool .get() @@ -903,6 +961,8 @@ impl DatabaseService for PostgresDatabaseService { ], ) .await?; + + record.record_success(); Ok(()) } @@ -913,6 +973,8 @@ impl DatabaseService for PostgresDatabaseService { latency_trace: &GetPayloadTrace, user_agent: Option, ) -> Result<(), DatabaseError> { + let mut record = DbMetricRecord::new("save_delivered_payload"); + let region_id = self.region; let mut client = self.pool.get().await?; let transaction = client.transaction().await?; @@ -1064,6 +1126,8 @@ impl DatabaseService for PostgresDatabaseService { } transaction.commit().await?; + + record.record_success(); Ok(()) } @@ -1073,6 +1137,8 @@ impl DatabaseService for PostgresDatabaseService { trace: Arc, optimistic_version: i16, ) -> Result<(), DatabaseError> { + let mut record = DbMetricRecord::new("store_block_submission"); + let region_id = self.region; let mut client = self.pool.get().await?; let transaction = client.transaction().await?; @@ -1142,6 +1208,7 @@ impl DatabaseService for PostgresDatabaseService { transaction.commit().await?; + record.record_success(); Ok(()) } @@ -1150,6 +1217,8 @@ impl DatabaseService for PostgresDatabaseService { builder_pub_key: &BlsPublicKey, builder_info: &BuilderInfo, ) -> Result<(), DatabaseError> { + let mut record = DbMetricRecord::new("store_builder_info"); + self.pool .get() .await? @@ -1171,6 +1240,7 @@ impl DatabaseService for PostgresDatabaseService { ) .await?; + record.record_success(); Ok(()) } @@ -1178,12 +1248,15 @@ impl DatabaseService for PostgresDatabaseService { &self, builders: &[BuilderInfoDocument], ) -> Result<(), DatabaseError> { + let mut record = DbMetricRecord::new("store_builders_info"); + // PERF: this is not the most performant approach but it is expected // to add just a few builders only at startup for builder in builders { self.store_builder_info(&builder.pub_key, &builder.builder_info).await?; } + record.record_success(); Ok(()) } @@ -1191,6 +1264,8 @@ impl DatabaseService for PostgresDatabaseService { &self, builder_pub_key: &BlsPublicKey, ) -> Result { + let mut record = DbMetricRecord::new("get_builder_info"); + match self .pool .get() @@ -1207,19 +1282,30 @@ impl DatabaseService for PostgresDatabaseService { rows if rows.is_empty() => { Err(DatabaseError::BuilderInfoNotFound { public_key: builder_pub_key.clone() }) } - rows => parse_row(rows.first().unwrap()), + rows => { + record.record_success(); + parse_row(rows.first().unwrap()) + } } } async fn get_all_builder_infos(&self) -> Result, DatabaseError> { - parse_rows(self.pool.get().await?.query("SELECT * FROM builder_info", &[]).await?) + let mut record = DbMetricRecord::new("get_all_builder_infos"); + + let rows = self.pool.get().await?.query("SELECT * FROM builder_info", &[]).await?; + + record.record_success(); + parse_rows(rows) } async fn check_builder_api_key(&self, api_key: &str) -> Result { + let mut record = DbMetricRecord::new("check_builder_api_key"); + let client = self.pool.get().await?; let rows = client.query("SELECT * FROM builder_info WHERE api_key = $1", &[&(api_key)]).await?; + record.record_success(); Ok(!rows.is_empty()) } @@ -1229,6 +1315,8 @@ impl DatabaseService for PostgresDatabaseService { block_hash: &Hash32, reason: String, ) -> Result<(), DatabaseError> { + let mut record = DbMetricRecord::new("db_demote_builder"); + let mut client = self.pool.get().await?; let transaction = client.transaction().await?; transaction @@ -1260,6 +1348,7 @@ impl DatabaseService for PostgresDatabaseService { transaction.commit().await?; + record.record_success(); Ok(()) } @@ -1268,6 +1357,8 @@ impl DatabaseService for PostgresDatabaseService { block_hash: ByteVector<32>, block_sim_result: Result<(), BlockSimError>, ) -> Result<(), DatabaseError> { + let mut record = DbMetricRecord::new("save_simulation_result"); + if let Err(e) = block_sim_result { self.pool .get() @@ -1283,6 +1374,8 @@ impl DatabaseService for PostgresDatabaseService { ) .await?; } + + record.record_success(); Ok(()) } @@ -1291,6 +1384,8 @@ impl DatabaseService for PostgresDatabaseService { filters: &BidFilters, validator_preferences: Arc, ) -> Result, DatabaseError> { + let mut record = DbMetricRecord::new("get_bids"); + let filters = PgBidFilters::from(filters); let mut query = String::from(" @@ -1370,12 +1465,15 @@ impl DatabaseService for PostgresDatabaseService { )); params.push(Box::new(filtering)); param_index += 1; - } + } let params_refs: Vec<&(dyn ToSql + Sync)> = params.iter().map(|p| &**p as &(dyn ToSql + Sync)).collect(); - parse_rows(self.pool.get().await?.query(&query, ¶ms_refs[..]).await?) + let rows = self.pool.get().await?.query(&query, ¶ms_refs[..]).await?; + + record.record_success(); + parse_rows(rows) } async fn get_delivered_payloads( @@ -1383,6 +1481,8 @@ impl DatabaseService for PostgresDatabaseService { filters: &BidFilters, validator_preferences: Arc, ) -> Result, DatabaseError> { + let mut record = DbMetricRecord::new("get_delivered_payloads"); + let filters = PgBidFilters::from(filters); let mut query = String::from( " @@ -1497,7 +1597,9 @@ impl DatabaseService for PostgresDatabaseService { let params_refs: Vec<&(dyn ToSql + Sync)> = params.iter().map(|p| &**p as &(dyn ToSql + Sync)).collect(); - parse_rows(self.pool.get().await?.query(&query, ¶ms_refs[..]).await?) + let rows = self.pool.get().await?.query(&query, ¶ms_refs[..]).await?; + record.record_success(); + parse_rows(rows) } async fn save_get_header_call( @@ -1509,6 +1611,8 @@ impl DatabaseService for PostgresDatabaseService { trace: GetHeaderTrace, user_agent: Option, ) -> Result<(), DatabaseError> { + let mut record = DbMetricRecord::new("save_get_header_call"); + let region_id = self.region; let mut client = self.pool.get().await?; @@ -1553,6 +1657,7 @@ impl DatabaseService for PostgresDatabaseService { transaction.commit().await?; + record.record_success(); Ok(()) } @@ -1563,6 +1668,8 @@ impl DatabaseService for PostgresDatabaseService { error: String, trace: GetPayloadTrace, ) -> Result<(), DatabaseError> { + let mut record = DbMetricRecord::new("save_failed_get_payload"); + let region_id = self.region; let mut client = self.pool.get().await?; @@ -1603,6 +1710,7 @@ impl DatabaseService for PostgresDatabaseService { transaction.commit().await?; + record.record_success(); Ok(()) } @@ -1611,6 +1719,8 @@ impl DatabaseService for PostgresDatabaseService { submission: Arc, trace: Arc, ) -> Result<(), DatabaseError> { + let mut record = DbMetricRecord::new("store_header_submission"); + let region_id = self.region; let mut client = self.pool.get().await?; let transaction = client.transaction().await?; @@ -1663,6 +1773,7 @@ impl DatabaseService for PostgresDatabaseService { transaction.commit().await?; + record.record_success(); Ok(()) } @@ -1671,6 +1782,8 @@ impl DatabaseService for PostgresDatabaseService { block_hash: ByteVector<32>, trace: Arc, ) -> Result<(), DatabaseError> { + let mut record = DbMetricRecord::new("save_gossiped_header_trace"); + let region_id = self.region; self.pool.get().await?.execute( @@ -1689,6 +1802,8 @@ impl DatabaseService for PostgresDatabaseService { &(trace.auctioneer_update as i64), ], ).await?; + + record.record_success(); Ok(()) } @@ -1697,6 +1812,8 @@ impl DatabaseService for PostgresDatabaseService { block_hash: ByteVector<32>, trace: Arc, ) -> Result<(), DatabaseError> { + let mut record = DbMetricRecord::new("save_gossiped_payload_trace"); + let region_id = self.region; self.pool.get().await?.execute( @@ -1714,21 +1831,26 @@ impl DatabaseService for PostgresDatabaseService { &(trace.auctioneer_update as i64), ], ).await?; + + record.record_success(); Ok(()) } async fn get_trusted_proposers(&self) -> Result, DatabaseError> { - parse_rows( - self.pool - .get() - .await? - .query( - " - SELECT * FROM trusted_proposers - ", - &[], - ) - .await?, - ) + let mut record = DbMetricRecord::new("get_trusted_proposers"); + let rows = self + .pool + .get() + .await? + .query( + " + SELECT * FROM trusted_proposers + ", + &[], + ) + .await?; + + record.record_success(); + parse_rows(rows) } } diff --git a/crates/datastore/src/redis/redis_cache.rs b/crates/datastore/src/redis/redis_cache.rs index c7c68d3..0e4b1c9 100644 --- a/crates/datastore/src/redis/redis_cache.rs +++ b/crates/datastore/src/redis/redis_cache.rs @@ -14,6 +14,7 @@ use helix_common::{ constraints_api::{SignedDelegation, SignedRevocation}, }, bid_submission::{v2::header_submission::SignedHeaderSubmission, BidSubmission}, + metrics::RedisMetricRecord, pending_block::PendingBlock, proofs::SignedConstraintsWithProofData, versioned_payload::PayloadAndBlobs, @@ -511,10 +512,14 @@ impl Auctioneer for RedisCache { &self, pub_key: BlsPublicKey, ) -> Result, AuctioneerError> { + let mut record = RedisMetricRecord::new("get_validator_delegations"); + let key = get_delegations_key(&pub_key); let delegations = self.get(&key).await.map_err(AuctioneerError::RedisError)?.unwrap_or_default(); + + record.record_success(); Ok(delegations) } @@ -522,6 +527,8 @@ impl Auctioneer for RedisCache { &self, signed_delegations: Vec, ) -> Result<(), AuctioneerError> { + let mut record = RedisMetricRecord::new("save_validator_delegations"); + let len = signed_delegations.len(); for signed_delegation in signed_delegations { let key = get_delegations_key(&signed_delegation.message.validator_pubkey); @@ -541,6 +548,7 @@ impl Auctioneer for RedisCache { trace!(len, "saved delegations to cache"); + record.record_success(); Ok(()) } @@ -548,6 +556,8 @@ impl Auctioneer for RedisCache { &self, signed_revocations: Vec, ) -> Result<(), AuctioneerError> { + let mut record = RedisMetricRecord::new("revoke_validator_delegations"); + for signed_revocation in &signed_revocations { let key = get_delegations_key(&signed_revocation.message.validator_pubkey); @@ -568,6 +578,7 @@ impl Auctioneer for RedisCache { .map_err(AuctioneerError::RedisError)?; } + record.record_success(); Ok(()) } @@ -576,6 +587,8 @@ impl Auctioneer for RedisCache { slot: u64, constraints: SignedConstraintsWithProofData, ) -> Result<(), AuctioneerError> { + let mut record = RedisMetricRecord::new("save_constraints"); + let key = get_constraints_key(slot); // Get the existing constraints from the cache or create new constraints. @@ -587,15 +600,23 @@ impl Auctioneer for RedisCache { // Save the constraints to the cache. self.set(&key, &prev_constraints, Some(CONSTRAINTS_CACHE_EXPIRY_S)) .await - .map_err(AuctioneerError::RedisError) + .map_err(AuctioneerError::RedisError)?; + + record.record_success(); + Ok(()) } async fn get_constraints( &self, slot: u64, ) -> Result>, AuctioneerError> { + let mut record = RedisMetricRecord::new("get_constraints"); + let key = get_constraints_key(slot); - self.get(&key).await.map_err(AuctioneerError::RedisError) + let constraints = self.get(&key).await.map_err(AuctioneerError::RedisError)?; + + record.record_success(); + Ok(constraints) } async fn save_inclusion_proof( @@ -605,10 +626,15 @@ impl Auctioneer for RedisCache { bid_block_hash: &Hash32, inclusion_proof: &InclusionProofs, ) -> Result<(), AuctioneerError> { + let mut record = RedisMetricRecord::new("save_inclusion_proof"); + let key = get_inclusion_proof_key(slot, proposer_pub_key, bid_block_hash); self.set(&key, inclusion_proof, Some(CONSTRAINTS_CACHE_EXPIRY_S)) .await - .map_err(AuctioneerError::RedisError) + .map_err(AuctioneerError::RedisError)?; + + record.record_success(); + Ok(()) } async fn get_inclusion_proof( @@ -617,12 +643,23 @@ impl Auctioneer for RedisCache { proposer_pub_key: &BlsPublicKey, bid_block_hash: &Hash32, ) -> Result, AuctioneerError> { + let mut record = RedisMetricRecord::new("get_inclusion_proof"); + let key = get_inclusion_proof_key(slot, proposer_pub_key, bid_block_hash); - self.get(&key).await.map_err(AuctioneerError::RedisError) + let inclusion_proof = self.get(&key).await.map_err(AuctioneerError::RedisError)?; + + record.record_success(); + Ok(inclusion_proof) } async fn get_last_slot_delivered(&self) -> Result, AuctioneerError> { - self.get(LAST_SLOT_DELIVERED_KEY).await.map_err(AuctioneerError::RedisError) + let mut record = RedisMetricRecord::new("get_last_slot_delivered"); + + let last_slot_delivered = + self.get(LAST_SLOT_DELIVERED_KEY).await.map_err(AuctioneerError::RedisError)?; + + record.record_success(); + Ok(last_slot_delivered) } async fn check_and_set_last_slot_and_hash_delivered( @@ -630,6 +667,8 @@ impl Auctioneer for RedisCache { slot: u64, hash: &Hash32, ) -> Result<(), AuctioneerError> { + let mut record = RedisMetricRecord::new("check_and_set_last_slot_and_hash_delivered"); + let last_slot_delivered_res = self.get_last_slot_delivered().await?; if let Some(last_slot_delivered) = last_slot_delivered_res { @@ -648,6 +687,8 @@ impl Auctioneer for RedisCache { } None => return Err(AuctioneerError::UnexpectedValueType), } + + record.record_success(); return Ok(()) } } @@ -669,7 +710,10 @@ impl Auctioneer for RedisCache { .arg(hash_value) .ignore(); - Ok(pipe.query_async(&mut conn).await.map_err(RedisCacheError::from)?) + pipe.query_async(&mut conn).await.map_err(RedisCacheError::from)?; + + record.record_success(); + Ok(()) } async fn get_best_bid( @@ -678,8 +722,12 @@ impl Auctioneer for RedisCache { parent_hash: &Hash32, proposer_pub_key: &BlsPublicKey, ) -> Result, AuctioneerError> { + let mut record = RedisMetricRecord::new("get_best_bid"); + let key = get_cache_get_header_response_key(slot, parent_hash, proposer_pub_key); let wrapped_bid: Option = self.get(&key).await?; + + record.record_success(); Ok(wrapped_bid.map(|wrapped_bid| wrapped_bid.bid)) } @@ -698,8 +746,13 @@ impl Auctioneer for RedisCache { block_hash: &Hash32, execution_payload: &PayloadAndBlobs, ) -> Result<(), AuctioneerError> { + let mut record = RedisMetricRecord::new("save_execution_payload"); + let key = get_execution_payload_key(slot, proposer_pub_key, block_hash); - Ok(self.set(&key, &execution_payload, Some(BID_CACHE_EXPIRY_S)).await?) + self.set(&key, &execution_payload, Some(BID_CACHE_EXPIRY_S)).await?; + + record.record_success(); + Ok(()) } async fn get_execution_payload( @@ -708,8 +761,13 @@ impl Auctioneer for RedisCache { proposer_pub_key: &BlsPublicKey, block_hash: &Hash32, ) -> Result, AuctioneerError> { + let mut record = RedisMetricRecord::new("get_execution_payload"); + let key = get_execution_payload_key(slot, proposer_pub_key, block_hash); - Ok(self.get(&key).await?) + let execution_payload = self.get(&key).await?; + + record.record_success(); + Ok(execution_payload) } async fn get_bid_trace( @@ -718,17 +776,27 @@ impl Auctioneer for RedisCache { proposer_pub_key: &BlsPublicKey, block_hash: &Hash32, ) -> Result, AuctioneerError> { + let mut record = RedisMetricRecord::new("get_bid_trace"); + let key = get_cache_bid_trace_key(slot, proposer_pub_key, block_hash); - Ok(self.get(&key).await?) + let bid_trace = self.get(&key).await?; + + record.record_success(); + Ok(bid_trace) } async fn save_bid_trace(&self, bid_trace: &BidTrace) -> Result<(), AuctioneerError> { + let mut record = RedisMetricRecord::new("save_bid_trace"); + let key = get_cache_bid_trace_key( bid_trace.slot, &bid_trace.proposer_public_key, &bid_trace.block_hash, ); - Ok(self.set(&key, &bid_trace, Some(BID_CACHE_EXPIRY_S)).await?) + self.set(&key, &bid_trace, Some(BID_CACHE_EXPIRY_S)).await?; + + record.record_success(); + Ok(()) } async fn get_builder_latest_payload_received_at( @@ -738,8 +806,13 @@ impl Auctioneer for RedisCache { parent_hash: &Hash32, proposer_pub_key: &BlsPublicKey, ) -> Result, AuctioneerError> { + let mut record = RedisMetricRecord::new("get_builder_latest_payload_received_at"); + let key = get_builder_latest_bid_time_key(slot, parent_hash, proposer_pub_key); - Ok(self.hget(&key, &format!("{builder_pub_key:?}")).await?) + let received_at = self.hget(&key, &format!("{builder_pub_key:?}")).await?; + + record.record_success(); + Ok(received_at) } /// This function performs three operations: @@ -755,6 +828,8 @@ impl Auctioneer for RedisCache { received_at: u128, builder_bid: &SignedBuilderBid, ) -> Result<(), AuctioneerError> { + let mut record = RedisMetricRecord::new("save_builder_bid"); + let mut conn = self.pool.get().await.map_err(RedisCacheError::from)?; let mut pipe = redis::pipe(); @@ -797,7 +872,10 @@ impl Auctioneer for RedisCache { .expire(&key_latest_bids_value, BID_CACHE_EXPIRY_S) .ignore(); - Ok(pipe.query_async(&mut conn).await.map_err(RedisCacheError::from)?) + pipe.query_async(&mut conn).await.map_err(RedisCacheError::from)?; + + record.record_success(); + Ok(()) } /// The `save_bid_and_update_top_bid` function performs several key operations: @@ -815,10 +893,13 @@ impl Auctioneer for RedisCache { state: &mut SaveBidAndUpdateTopBidResponse, signing_context: &RelaySigningContext, ) -> Result, AuctioneerError> { + let mut record = RedisMetricRecord::new("save_bid_and_update_top_bid"); + // Exit early if cancellations aren't enabled and the bid is below the floor. let is_bid_above_floor = submission.bid_trace().value > floor_value; if !cancellations_enabled && !is_bid_above_floor { - return Ok(None) + record.record_success(); + return Ok(None); } // Save the execution payload @@ -852,6 +933,7 @@ impl Auctioneer for RedisCache { ) .await?; + record.record_success(); Ok(Some((builder_bid, cloned_submission.payload_and_blobs()))) } @@ -861,8 +943,13 @@ impl Auctioneer for RedisCache { parent_hash: &Hash32, proposer_pub_key: &BlsPublicKey, ) -> Result, AuctioneerError> { + let mut record = RedisMetricRecord::new("get_top_bid_value"); + let key = get_top_bid_value_key(slot, parent_hash, proposer_pub_key); - Ok(self.get(&key).await?) + let top_bid_value = self.get(&key).await?; + + record.record_success(); + Ok(top_bid_value) } async fn get_builder_latest_value( @@ -872,8 +959,13 @@ impl Auctioneer for RedisCache { proposer_pub_key: &BlsPublicKey, builder_pub_key: &BlsPublicKey, ) -> Result, AuctioneerError> { + let mut record = RedisMetricRecord::new("get_builder_latest_value"); + let key = get_builder_latest_bid_value_key(slot, parent_hash, proposer_pub_key); - Ok(self.hget(&key, &format!("{builder_pub_key:?}")).await?) + let builder_latest_value = self.hget(&key, &format!("{builder_pub_key:?}")).await?; + + record.record_success(); + Ok(builder_latest_value) } async fn get_floor_bid_value( @@ -882,8 +974,13 @@ impl Auctioneer for RedisCache { parent_hash: &Hash32, proposer_pub_key: &BlsPublicKey, ) -> Result, AuctioneerError> { + let mut record = RedisMetricRecord::new("get_floor_bid_value"); + let key = get_floor_bid_value_key(slot, parent_hash, proposer_pub_key); - Ok(self.get(&key).await?) + let floor_bid_value = self.get(&key).await?; + + record.record_success(); + Ok(floor_bid_value) } async fn delete_builder_bid( @@ -893,6 +990,8 @@ impl Auctioneer for RedisCache { proposer_pub_key: &BlsPublicKey, builder_pub_key: &BlsPublicKey, ) -> Result<(), AuctioneerError> { + let mut record = RedisMetricRecord::new("delete_builder_bid"); + // Delete the value let key_latest_value = get_builder_latest_bid_value_key(slot, parent_hash, proposer_pub_key); @@ -911,41 +1010,55 @@ impl Auctioneer for RedisCache { .await? .unwrap_or(U256::ZERO); - Ok(self - .update_top_bid( - &mut state, - &builder_bids, - slot, - parent_hash, - proposer_pub_key, - floor_value, - ) - .await?) + self.update_top_bid( + &mut state, + &builder_bids, + slot, + parent_hash, + proposer_pub_key, + floor_value, + ) + .await?; + + record.record_success(); + Ok(()) } async fn get_builder_info( &self, builder_pub_key: &BlsPublicKey, ) -> Result { - self.hget(BUILDER_INFO_KEY, &format!("{builder_pub_key:?}")) + let mut record = RedisMetricRecord::new("get_builder_info"); + let builder_info = self + .hget(BUILDER_INFO_KEY, &format!("{builder_pub_key:?}")) .await? - .ok_or(AuctioneerError::BuilderNotFound { pub_key: builder_pub_key.clone() }) + .ok_or(AuctioneerError::BuilderNotFound { pub_key: builder_pub_key.clone() })?; + + record.record_success(); + Ok(builder_info) } async fn demote_builder(&self, builder_pub_key: &BlsPublicKey) -> Result<(), AuctioneerError> { + let mut record = RedisMetricRecord::new("demote_builder"); let mut builder_info = self.get_builder_info(builder_pub_key).await?; if !builder_info.is_optimistic { return Ok(()) } builder_info.is_optimistic = false; - Ok(self.hset(BUILDER_INFO_KEY, &format!("{builder_pub_key:?}"), &builder_info).await?) + self.hset(BUILDER_INFO_KEY, &format!("{builder_pub_key:?}"), &builder_info).await?; + + record.record_success(); + Ok(()) } async fn update_builder_infos( &self, builder_infos: Vec, ) -> Result<(), AuctioneerError> { + let mut record = RedisMetricRecord::new("update_builder_infos"); + if builder_infos.is_empty() { + record.record_success(); return Ok(()) } @@ -967,6 +1080,7 @@ impl Auctioneer for RedisCache { } } + record.record_success(); Ok(()) } @@ -977,8 +1091,12 @@ impl Auctioneer for RedisCache { parent_hash: &Hash32, proposer_pub_key: &BlsPublicKey, ) -> Result { + let mut record = RedisMetricRecord::new("seen_or_insert_block_hash"); let key = get_seen_block_hashes_key(slot, parent_hash, proposer_pub_key); - Ok(self.seen_or_add(&key, block_hash).await?) + let seen = self.seen_or_add(&key, block_hash).await?; + + record.record_success(); + Ok(seen) } async fn save_signed_builder_bid_and_update_top_bid( @@ -990,9 +1108,12 @@ impl Auctioneer for RedisCache { floor_value: U256, state: &mut SaveBidAndUpdateTopBidResponse, ) -> Result<(), AuctioneerError> { + let mut record = RedisMetricRecord::new("save_signed_builder_bid_and_update_top_bid"); + // Exit early if cancellations aren't enabled and the bid is below the floor. let is_bid_above_floor = builder_bid.value() > floor_value; if !cancellations_enabled && !is_bid_above_floor { + record.record_success(); return Ok(()) } @@ -1035,6 +1156,7 @@ impl Auctioneer for RedisCache { // TODO: the floor may have raised but we will exit early here. state.top_bid_value = builder_bids.values().max().cloned().unwrap_or(U256::ZERO); if state.top_bid_value == state.prev_top_bid_value { + record.record_success(); return Ok(()) } @@ -1054,6 +1176,7 @@ impl Auctioneer for RedisCache { // Handle floor value updates only if needed. // Only non-cancellable bids above the floor should set a new floor. if cancellations_enabled || !is_bid_above_floor { + record.record_success(); return Ok(()) } self.set_new_floor( @@ -1066,6 +1189,7 @@ impl Auctioneer for RedisCache { .await?; state.set_latency_update_floor(); + record.record_success(); Ok(()) } @@ -1073,8 +1197,12 @@ impl Auctioneer for RedisCache { &self, block_hash: &Hash32, ) -> Result, AuctioneerError> { + let mut record = RedisMetricRecord::new("get_header_tx_root"); let key = get_header_tx_root_key(block_hash); - Ok(self.get(&key).await?) + let tx_root = self.get(&key).await?; + + record.record_success(); + Ok(tx_root) } async fn save_header_submission_and_update_top_bid( @@ -1086,9 +1214,12 @@ impl Auctioneer for RedisCache { state: &mut SaveBidAndUpdateTopBidResponse, signing_context: &RelaySigningContext, ) -> Result, AuctioneerError> { + let mut record = RedisMetricRecord::new("save_header_submission_and_update_top_bid"); + // Exit early if cancellations aren't enabled and the bid is below the floor. let is_bid_above_floor = submission.value() > floor_value; if !cancellations_enabled && !is_bid_above_floor { + record.record_success(); return Ok(None) } @@ -1116,6 +1247,7 @@ impl Auctioneer for RedisCache { ) .await?; + record.record_success(); Ok(Some(builder_bid)) } @@ -1123,6 +1255,8 @@ impl Auctioneer for RedisCache { &self, proposer_whitelist: Vec, ) -> Result<(), AuctioneerError> { + let mut record = RedisMetricRecord::new("update_trusted_proposers"); + // get keys let proposer_keys: Vec = proposer_whitelist.iter().map(|proposer| format!("{:?}", proposer.pub_key)).collect(); @@ -1145,6 +1279,7 @@ impl Auctioneer for RedisCache { } } + record.record_success(); Ok(()) } @@ -1152,9 +1287,13 @@ impl Auctioneer for RedisCache { &self, proposer_pub_key: &BlsPublicKey, ) -> Result { + let mut record = RedisMetricRecord::new("is_trusted_proposer"); + let key_str = format!("{proposer_pub_key:?}"); let proposer_info: Option = self.hget(PROPOSER_WHITELIST_KEY, &key_str).await?; + + record.record_success(); Ok(proposer_info.is_some()) } @@ -1162,6 +1301,8 @@ impl Auctioneer for RedisCache { &self, primev_proposers: &[BlsPublicKey], ) -> Result<(), AuctioneerError> { + let mut record = RedisMetricRecord::new("update_primev_proposers"); + // get keys let proposer_keys: Vec = primev_proposers.iter().map(|proposer| format!("{:?}", proposer)).collect(); @@ -1184,6 +1325,7 @@ impl Auctioneer for RedisCache { } } + record.record_success(); Ok(()) } @@ -1191,9 +1333,14 @@ impl Auctioneer for RedisCache { &self, proposer_pub_key: &BlsPublicKey, ) -> Result { + let mut record = RedisMetricRecord::new("is_primev_proposer"); + let key_str = format!("{proposer_pub_key:?}"); let proposer_info: Option = self.hget(PRIMEV_PROPOSERS_KEY, &key_str).await?; - Ok(proposer_info.is_some()) + let is_primev = proposer_info.is_some(); + + record.record_success(); + Ok(is_primev) } async fn save_pending_block_header( @@ -1203,6 +1350,8 @@ impl Auctioneer for RedisCache { block_hash: &Hash32, timestamp_ms: u64, ) -> Result<(), AuctioneerError> { + let mut record = RedisMetricRecord::new("save_pending_block_header"); + let builder_key = get_pending_block_builder_key(builder_pub_key); self.add(builder_key.as_str(), format!("{block_hash:?}")).await?; @@ -1210,6 +1359,7 @@ impl Auctioneer for RedisCache { let entries = vec![("slot", slot), ("header_received", timestamp_ms)]; self.hset_multiple_not_exists(key.as_str(), &entries, PENDING_BLOCK_EXPIRY_S).await?; + record.record_success(); Ok(()) } @@ -1220,6 +1370,8 @@ impl Auctioneer for RedisCache { block_hash: &Hash32, timestamp_ms: u64, ) -> Result<(), AuctioneerError> { + let mut record = RedisMetricRecord::new("save_pending_block_payload"); + let builder_key = get_pending_block_builder_key(builder_pub_key); self.add(builder_key.as_str(), format!("{block_hash:?}")).await?; @@ -1227,16 +1379,20 @@ impl Auctioneer for RedisCache { let entries = vec![("slot", slot), ("payload_received", timestamp_ms)]; self.hset_multiple_not_exists(key.as_str(), &entries, PENDING_BLOCK_EXPIRY_S).await?; + record.record_success(); Ok(()) } async fn get_pending_blocks(&self) -> Result, AuctioneerError> { + let mut record = RedisMetricRecord::new("get_pending_blocks"); + let mut pending_blocks: Vec = Vec::new(); let redis_builder_infos: Option> = self.hgetall(BUILDER_INFO_KEY).await?; if redis_builder_infos.is_none() { + record.record_success(); return Ok(pending_blocks) } @@ -1293,6 +1449,8 @@ impl Auctioneer for RedisCache { self.remove(builder_key.as_str(), expired).await?; } } + + record.record_success(); Ok(pending_blocks) } From cf3f471316f253fdcf18443442dafc465b0d1154 Mon Sep 17 00:00:00 2001 From: ltitanb Date: Thu, 5 Dec 2024 11:01:37 +0000 Subject: [PATCH 20/39] fix build --- Cargo.lock | 1 + crates/common/Cargo.toml | 1 + 2 files changed, 2 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index 4ce5361..ab953bc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2784,6 +2784,7 @@ name = "helix-common" version = "0.0.1" dependencies = [ "auto_impl", + "axum 0.7.4", "clap", "ethereum-consensus", "ethereum-types", diff --git a/crates/common/Cargo.toml b/crates/common/Cargo.toml index 07cf1b8..ca8cb71 100644 --- a/crates/common/Cargo.toml +++ b/crates/common/Cargo.toml @@ -25,6 +25,7 @@ ethereum-types = "0.14.1" ssz_types = "0.5.4" # Async +axum.workspace = true tokio.workspace = true # Misc From 55569333c9ee910707ba8e5f5192f13be8cd4ebf Mon Sep 17 00:00:00 2001 From: ltitanb Date: Thu, 5 Dec 2024 11:20:53 +0000 Subject: [PATCH 21/39] fix ci --- .github/workflows/build.yml | 3 +- crates/api/src/builder/api.rs | 75 +++++++++++-------- crates/api/src/builder/simulator/mod.rs | 2 +- .../src/builder/simulator/multi_simulator.rs | 17 +---- crates/api/src/builder/tests.rs | 2 +- crates/api/src/gossiper/error.rs | 1 - crates/api/src/gossiper/grpc_gossiper.rs | 16 ++-- crates/api/src/proposer/api.rs | 63 ++++++++-------- crates/api/src/relay_data/api.rs | 6 +- crates/api/src/relay_data/error.rs | 2 +- crates/beacon-client/src/beacon_client.rs | 2 +- crates/common/src/config.rs | 8 +- .../src/postgres/postgres_db_service_tests.rs | 37 ++------- crates/datastore/src/redis/redis_cache.rs | 2 +- 14 files changed, 108 insertions(+), 128 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 74613bd..efdacd0 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -11,7 +11,7 @@ env: jobs: build: - runs-on: self-hosted + runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 @@ -42,4 +42,3 @@ jobs: DOCKER_BUILDKIT=1 docker tag helix:${{ env.IMAGE_NAME }} $REGISTRY_URL/helix:${{ env.IMAGE_NAME }} DOCKER_BUILDKIT=1 docker login https://$REGISTRY_URL/ -u "$DOCKER_REG_USR" -p "$DOCKER_REG_PW" DOCKER_BUILDKIT=1 docker push $REGISTRY_URL/helix:${{ env.IMAGE_NAME }} - diff --git a/crates/api/src/builder/api.rs b/crates/api/src/builder/api.rs index 72d573b..bc3fc8b 100644 --- a/crates/api/src/builder/api.rs +++ b/crates/api/src/builder/api.rs @@ -307,10 +307,10 @@ where headers: HeaderMap, req: Request, ) -> Result { - let request_id = Uuid::parse_str(headers - .get("x-request-id") - .map(|v| v.to_str().unwrap_or_default()) - .unwrap_or_default()).unwrap(); + let request_id = Uuid::parse_str( + headers.get("x-request-id").map(|v| v.to_str().unwrap_or_default()).unwrap_or_default(), + ) + .unwrap(); let mut trace = SubmissionTrace { receive: get_nanos_timestamp()?, ..Default::default() }; let (head_slot, next_duty) = api.curr_slot_info.read().await.clone(); @@ -336,7 +336,7 @@ where return Err(BuilderApiError::SubmissionForPastSlot { current_slot: head_slot, submission_slot: payload.slot(), - }); + }) } // Verify that we have a validator connected for this slot @@ -346,7 +346,7 @@ where block_hash = ?block_hash, "could not find slot duty" ); - return Err(BuilderApiError::ProposerDutyNotFound); + return Err(BuilderApiError::ProposerDutyNotFound) } let next_duty = next_duty.unwrap(); @@ -362,7 +362,12 @@ where // Fetch the next payload attributes and validate basic information let payload_attributes = api - .fetch_payload_attributes(payload.slot(), payload.parent_hash(), &block_hash, &request_id) + .fetch_payload_attributes( + payload.slot(), + payload.parent_hash(), + &block_hash, + &request_id, + ) .await?; // Handle duplicates. @@ -429,7 +434,7 @@ where Ok(Some(slot)) => { if payload.slot() <= slot { debug!(request_id = %request_id, "payload already delivered"); - return Err(BuilderApiError::PayloadAlreadyDelivered); + return Err(BuilderApiError::PayloadAlreadyDelivered) } } Ok(None) => {} @@ -570,10 +575,10 @@ where headers: HeaderMap, req: Request, ) -> Result { - let request_id = Uuid::parse_str(headers - .get("x-request-id") - .map(|v| v.to_str().unwrap_or_default()) - .unwrap_or_default()).unwrap(); + let request_id = Uuid::parse_str( + headers.get("x-request-id").map(|v| v.to_str().unwrap_or_default()).unwrap_or_default(), + ) + .unwrap(); let mut trace = HeaderSubmissionTrace { receive: get_nanos_timestamp()?, ..Default::default() }; let (head_slot, next_duty) = api.curr_slot_info.read().await.clone(); @@ -610,7 +615,7 @@ where return Err(BuilderApiError::SubmissionForPastSlot { current_slot: head_slot, submission_slot: payload.slot(), - }); + }) } // Verify that we have a validator connected for this slot @@ -620,13 +625,18 @@ where block_hash = ?block_hash, "could not find slot duty" ); - return Err(BuilderApiError::ProposerDutyNotFound); + return Err(BuilderApiError::ProposerDutyNotFound) } let next_duty = next_duty.unwrap(); // Fetch the next payload attributes and validate basic information let payload_attributes = api - .fetch_payload_attributes(payload.slot(), payload.parent_hash(), &block_hash, &request_id) + .fetch_payload_attributes( + payload.slot(), + payload.parent_hash(), + &block_hash, + &request_id, + ) .await?; // Fetch builder info @@ -713,7 +723,7 @@ where Ok(Some(slot)) => { if payload.slot() <= slot { debug!(request_id = %request_id, "payload already delivered"); - return Err(BuilderApiError::PayloadAlreadyDelivered); + return Err(BuilderApiError::PayloadAlreadyDelivered) } } Ok(None) => {} @@ -814,10 +824,10 @@ where headers: HeaderMap, req: Request, ) -> Result { - let request_id = Uuid::parse_str(headers - .get("x-request-id") - .map(|v| v.to_str().unwrap_or_default()) - .unwrap_or_default()).unwrap(); + let request_id = Uuid::parse_str( + headers.get("x-request-id").map(|v| v.to_str().unwrap_or_default()).unwrap_or_default(), + ) + .unwrap(); let now = SystemTime::now(); let mut trace = SubmissionTrace { receive: get_nanos_from(now)?, ..Default::default() }; let (head_slot, next_duty) = api.curr_slot_info.read().await.clone(); @@ -879,13 +889,18 @@ where block_hash = ?block_hash, "could not find slot duty" ); - return Err(BuilderApiError::ProposerDutyNotFound); + return Err(BuilderApiError::ProposerDutyNotFound) } let next_duty = next_duty.unwrap(); // Fetch the next payload attributes and validate basic information let payload_attributes = api - .fetch_payload_attributes(payload.slot(), payload.parent_hash(), &block_hash, &request_id) + .fetch_payload_attributes( + payload.slot(), + payload.parent_hash(), + &block_hash, + &request_id, + ) .await?; // Fetch builder info @@ -923,7 +938,7 @@ where Ok(Some(slot)) => { if payload.slot() <= slot { debug!(request_id = %request_id, "payload already delivered"); - return Err(BuilderApiError::PayloadAlreadyDelivered); + return Err(BuilderApiError::PayloadAlreadyDelivered) } } Ok(None) => {} @@ -1033,10 +1048,10 @@ where headers: HeaderMap, Json(mut signed_cancellation): Json, ) -> Result { - let request_id = Uuid::parse_str(headers - .get("x-request-id") - .map(|v| v.to_str().unwrap_or_default()) - .unwrap_or_default()).unwrap(); + let request_id = Uuid::parse_str( + headers.get("x-request-id").map(|v| v.to_str().unwrap_or_default()).unwrap_or_default(), + ) + .unwrap(); let (head_slot, _next_duty) = api.curr_slot_info.read().await.clone(); let slot = signed_cancellation.message.slot; @@ -1066,7 +1081,7 @@ where Ok(Some(del_slot)) => { if slot <= del_slot { debug!(request_id = %request_id, "payload already delivered"); - return Err(BuilderApiError::PayloadAlreadyDelivered); + return Err(BuilderApiError::PayloadAlreadyDelivered) } } Ok(None) => {} @@ -1172,7 +1187,7 @@ where Ok(Some(slot)) => { if req.slot <= slot { debug!(request_id = %request_id, "payload already delivered"); - return; + return } } Ok(None) => {} @@ -1282,7 +1297,7 @@ where Ok(Some(slot)) => { if req.slot <= slot { debug!(request_id = %request_id, "payload already delivered"); - return; + return } } Ok(None) => {} diff --git a/crates/api/src/builder/simulator/mod.rs b/crates/api/src/builder/simulator/mod.rs index 92c1ba0..0b4407d 100644 --- a/crates/api/src/builder/simulator/mod.rs +++ b/crates/api/src/builder/simulator/mod.rs @@ -1,6 +1,6 @@ pub mod mock_simulator; -pub mod optimistic_simulator; pub mod multi_simulator; +pub mod optimistic_simulator; mod optimistic_simulator_tests; pub mod rpc_simulator; pub mod traits; diff --git a/crates/api/src/builder/simulator/multi_simulator.rs b/crates/api/src/builder/simulator/multi_simulator.rs index 7e9f3b9..f587e1d 100644 --- a/crates/api/src/builder/simulator/multi_simulator.rs +++ b/crates/api/src/builder/simulator/multi_simulator.rs @@ -1,11 +1,11 @@ use axum::async_trait; use helix_common::{simulator::BlockSimError, BuilderInfo}; -use tokio::sync::mpsc::Sender; -use uuid::Uuid; use std::sync::{ atomic::{AtomicUsize, Ordering}, Arc, }; +use tokio::sync::mpsc::Sender; +use uuid::Uuid; use crate::builder::DbInfo; @@ -19,10 +19,7 @@ pub struct MultiSimulator { impl MultiSimulator { pub fn new(simulators: Vec) -> Self { - Self { - simulators, - next_index: Arc::new(AtomicUsize::new(0)), - } + Self { simulators, next_index: Arc::new(AtomicUsize::new(0)) } } pub fn clone_for_async(&self) -> Self { @@ -52,13 +49,7 @@ impl BlockSimulator for MultiSimulator { // Process the request with the selected simulator simulator - .process_request( - request, - builder_info, - is_top_bid, - sim_result_saver_sender, - request_id, - ) + .process_request(request, builder_info, is_top_bid, sim_result_saver_sender, request_id) .await } } diff --git a/crates/api/src/builder/tests.rs b/crates/api/src/builder/tests.rs index 073fe74..d9f1d4e 100644 --- a/crates/api/src/builder/tests.rs +++ b/crates/api/src/builder/tests.rs @@ -14,7 +14,7 @@ use core::panic; use ethereum_consensus::{ builder::{SignedValidatorRegistration, ValidatorRegistration}, configs::mainnet::CAPELLA_FORK_EPOCH, - deneb::Transaction, Withdrawal, + deneb::Withdrawal, phase0::mainnet::SLOTS_PER_EPOCH, primitives::{BlsPublicKey, BlsSignature}, ssz::{self, prelude::*}, diff --git a/crates/api/src/gossiper/error.rs b/crates/api/src/gossiper/error.rs index 26b0356..23099b0 100644 --- a/crates/api/src/gossiper/error.rs +++ b/crates/api/src/gossiper/error.rs @@ -9,7 +9,6 @@ pub enum GossipError { #[error("Failed to reconnect")] ReconnectFailed, // Add other error common as needed - #[error("Broadcast timed out")] TimeoutError, } diff --git a/crates/api/src/gossiper/grpc_gossiper.rs b/crates/api/src/gossiper/grpc_gossiper.rs index 9f86a9a..1ca7dab 100644 --- a/crates/api/src/gossiper/grpc_gossiper.rs +++ b/crates/api/src/gossiper/grpc_gossiper.rs @@ -94,12 +94,12 @@ impl GrpcGossiperClient { Ok(Err(err)) => { error!(err = %err, "Client call failed."); GossipMetrics::out_count(HEADER_ID, false); - return Err(GossipError::BroadcastError(err)); + return Err(GossipError::BroadcastError(err)) } Err(_) => { error!("Client call timed out."); GossipMetrics::out_count(HEADER_ID, false); - return Err(GossipError::TimeoutError); + return Err(GossipError::TimeoutError) } } } else { @@ -134,12 +134,12 @@ impl GrpcGossiperClient { Ok(Err(err)) => { error!(err = %err, "Client call failed."); GossipMetrics::out_count(PAYLOAD_ID, false); - return Err(GossipError::BroadcastError(err)); + return Err(GossipError::BroadcastError(err)) } Err(_) => { error!("Client call timed out."); GossipMetrics::out_count(PAYLOAD_ID, false); - return Err(GossipError::TimeoutError); + return Err(GossipError::TimeoutError) } } } else { @@ -174,12 +174,12 @@ impl GrpcGossiperClient { Ok(Err(err)) => { error!(err = %err, "Client call failed."); GossipMetrics::out_count(GET_PAYLOAD_ID, false); - return Err(GossipError::BroadcastError(err)); + return Err(GossipError::BroadcastError(err)) } Err(_) => { error!("Client call timed out."); GossipMetrics::out_count(GET_PAYLOAD_ID, false); - return Err(GossipError::TimeoutError); + return Err(GossipError::TimeoutError) } } } else { @@ -216,12 +216,12 @@ impl GrpcGossiperClient { Ok(Err(err)) => { error!(err = %err, "Client call failed."); GossipMetrics::out_count(CANCELLATION_ID, false); - return Err(GossipError::BroadcastError(err)); + return Err(GossipError::BroadcastError(err)) } Err(_) => { error!("Client call timed out."); GossipMetrics::out_count(CANCELLATION_ID, false); - return Err(GossipError::TimeoutError); + return Err(GossipError::TimeoutError) } } } else { diff --git a/crates/api/src/proposer/api.rs b/crates/api/src/proposer/api.rs index 1b61efc..358ac9b 100644 --- a/crates/api/src/proposer/api.rs +++ b/crates/api/src/proposer/api.rs @@ -17,16 +17,13 @@ use ethereum_consensus::{ phase0::mainnet::SLOTS_PER_EPOCH, primitives::BlsPublicKey, ssz::prelude::*, - types::mainnet::{ - ExecutionPayloadHeader, ExecutionPayloadHeaderRef, SignedBeaconBlock, - SignedBlindedBeaconBlock, - }, + types::mainnet::{ExecutionPayloadHeader, SignedBeaconBlock, SignedBlindedBeaconBlock}, }; -use reth_primitives::kzg; use tokio::{ sync::{ - mpsc::{self, error::SendError, Receiver, Sender}, oneshot, RwLock + mpsc::{self, error::SendError, Receiver, Sender}, + oneshot, RwLock, }, time::{sleep, Instant}, }; @@ -170,10 +167,10 @@ where return Err(ProposerApiError::EmptyRequest) } - let request_id = Uuid::parse_str(headers - .get("x-request-id") - .map(|v| v.to_str().unwrap_or_default()) - .unwrap_or_default()).unwrap(); + let request_id = Uuid::parse_str( + headers.get("x-request-id").map(|v| v.to_str().unwrap_or_default()).unwrap_or_default(), + ) + .unwrap(); let mut trace = RegisterValidatorsTrace { receive: get_nanos_timestamp()?, ..Default::default() }; @@ -326,14 +323,18 @@ where // Bulk write registrations to db tokio::spawn(async move { - // Add validator preferences to each registration let mut valid_registrations_infos = Vec::new(); for reg in valid_registrations { let mut preferences = validator_preferences.clone(); - if proposer_api.auctioneer.is_primev_proposer(®.message.public_key).await.unwrap_or_default() { + if proposer_api + .auctioneer + .is_primev_proposer(®.message.public_key) + .await + .unwrap_or_default() + { preferences.trusted_builders = Some(vec!["PrimevBuilder".to_string()]); } @@ -385,10 +386,10 @@ where return Err(ProposerApiError::ServiceUnavailableError) } - let request_id = Uuid::parse_str(headers - .get("x-request-id") - .map(|v| v.to_str().unwrap_or_default()) - .unwrap_or_default()).unwrap(); + let request_id = Uuid::parse_str( + headers.get("x-request-id").map(|v| v.to_str().unwrap_or_default()).unwrap_or_default(), + ) + .unwrap(); let mut trace = GetHeaderTrace { receive: get_nanos_timestamp()?, ..Default::default() }; @@ -651,10 +652,10 @@ where req: Request, ) -> Result { let mut trace = GetPayloadTrace { receive: get_nanos_timestamp()?, ..Default::default() }; - let request_id = Uuid::parse_str(headers - .get("x-request-id") - .map(|v| v.to_str().unwrap_or_default()) - .unwrap_or_default()).unwrap(); + let request_id = Uuid::parse_str( + headers.get("x-request-id").map(|v| v.to_str().unwrap_or_default()).unwrap_or_default(), + ) + .unwrap(); let user_agent = headers.get("user-agent").and_then(|v| v.to_str().ok()).map(|v| v.to_string()); @@ -842,7 +843,9 @@ where return Err(err) } - if let Err(err) = self.validate_block_equality(&mut versioned_payload, &signed_blinded_block, request_id) { + if let Err(err) = + self.validate_block_equality(&mut versioned_payload, &signed_blinded_block, request_id) + { error!( %request_id, error = %err, @@ -932,12 +935,11 @@ where }); if !is_trusted_proposer { - if let Ok(_) = rx.await { info!(request_id = %request_id, trace = ?trace, "Payload published and saved!") } else { error!(request_id = %request_id, "Error in beacon client publishing"); - return Err(ProposerApiError::InternalServerError); + return Err(ProposerApiError::InternalServerError) } // Calculate the remaining time needed to reach the target propagation duration. @@ -1110,7 +1112,8 @@ where let provided_header = body.execution_payload_header(); let local_header = - match try_execution_header_from_payload(&mut local_versioned_payload.execution_payload) { + match try_execution_header_from_payload(&mut local_versioned_payload.execution_payload) + { Ok(header) => header, Err(err) => { error!( @@ -1118,7 +1121,7 @@ where error = %err, "error converting execution payload to header", ); - return Err(err.into()); + return Err(err.into()) } }; @@ -1127,21 +1130,21 @@ where let provided_header = provided_header.bellatrix().ok_or(ProposerApiError::PayloadTypeMismatch)?; if local_header != *provided_header { - return Err(ProposerApiError::BlindedBlockAndPayloadHeaderMismatch); + return Err(ProposerApiError::BlindedBlockAndPayloadHeaderMismatch) } } ExecutionPayloadHeader::Capella(local_header) => { let provided_header = provided_header.capella().ok_or(ProposerApiError::PayloadTypeMismatch)?; if local_header != *provided_header { - return Err(ProposerApiError::BlindedBlockAndPayloadHeaderMismatch); + return Err(ProposerApiError::BlindedBlockAndPayloadHeaderMismatch) } } ExecutionPayloadHeader::Deneb(local_header) => { let provided_header = provided_header.deneb().ok_or(ProposerApiError::PayloadTypeMismatch)?; if local_header != *provided_header { - return Err(ProposerApiError::BlindedBlockAndPayloadHeaderMismatch); + return Err(ProposerApiError::BlindedBlockAndPayloadHeaderMismatch) } let local_kzg_commitments = local_versioned_payload @@ -1155,7 +1158,7 @@ where .ok_or(ProposerApiError::BlobKzgCommitmentsMismatch)?; if local_kzg_commitments != provided_kzg_commitments { - return Err(ProposerApiError::BlobKzgCommitmentsMismatch); + return Err(ProposerApiError::BlobKzgCommitmentsMismatch) } } } @@ -1291,7 +1294,7 @@ where payload.signed_blinded_beacon_block, &mut trace, &payload.request_id, - None, + None, ) .await { diff --git a/crates/api/src/relay_data/api.rs b/crates/api/src/relay_data/api.rs index 59a0589..e393ae7 100644 --- a/crates/api/src/relay_data/api.rs +++ b/crates/api/src/relay_data/api.rs @@ -50,7 +50,7 @@ impl DataApi { } if params.limit.is_some() && params.limit.unwrap() > 200 { - return Err(DataApiError::LimitReached{limit: 200}); + return Err(DataApiError::LimitReached { limit: 200 }) } if params.limit.is_none() { @@ -58,7 +58,7 @@ impl DataApi { } if params.limit.is_some() && params.limit.unwrap() > 200 { - return Err(DataApiError::LimitReached{limit: 200}); + return Err(DataApiError::LimitReached { limit: 200 }) } if params.limit.is_none() { @@ -108,7 +108,7 @@ impl DataApi { } if params.limit.is_some() && params.limit.unwrap() > 500 { - return Err(DataApiError::LimitReached{limit: 500}); + return Err(DataApiError::LimitReached { limit: 500 }) } if params.limit.is_none() { diff --git a/crates/api/src/relay_data/error.rs b/crates/api/src/relay_data/error.rs index f3a136c..89053ac 100644 --- a/crates/api/src/relay_data/error.rs +++ b/crates/api/src/relay_data/error.rs @@ -26,7 +26,7 @@ impl IntoResponse for DataApiError { "need to query for specific slot or block_hash or block_number or builder_pubkey", ) .into_response(), - DataApiError::LimitReached{limit} => { + DataApiError::LimitReached { limit } => { (StatusCode::BAD_REQUEST, format!("maximum limit is {limit}")).into_response() } DataApiError::InternalServerError => { diff --git a/crates/beacon-client/src/beacon_client.rs b/crates/beacon-client/src/beacon_client.rs index 438807a..b5ec91b 100644 --- a/crates/beacon-client/src/beacon_client.rs +++ b/crates/beacon-client/src/beacon_client.rs @@ -1,4 +1,4 @@ -use std::{sync::Arc, time::Duration, vec}; +use std::{sync::Arc, time::Duration}; use async_trait::async_trait; use ethereum_consensus::{primitives::Root, ssz}; diff --git a/crates/common/src/config.rs b/crates/common/src/config.rs index 2925637..15e84dd 100644 --- a/crates/common/src/config.rs +++ b/crates/common/src/config.rs @@ -436,13 +436,11 @@ fn test_config() { let mut config = RelayConfig::default(); config.redis.url = "redis://localhost:6379".to_string(); - config.simulators = vec![ - SimulatorConfig { - url: "http://localhost:8080".to_string() - }]; + config.simulators = vec![SimulatorConfig { url: "http://localhost:8080".to_string() }]; config.beacon_clients.push(BeaconClientConfig { url: Url::parse("http://localhost:8080").unwrap(), - gossip_blobs_enabled: false, }); + gossip_blobs_enabled: false, + }); config.broadcasters.push(BroadcasterConfig::BeaconClient(BeaconClientConfig { url: Url::parse("http://localhost:8080").unwrap(), gossip_blobs_enabled: false, diff --git a/crates/database/src/postgres/postgres_db_service_tests.rs b/crates/database/src/postgres/postgres_db_service_tests.rs index d04682c..57204bc 100644 --- a/crates/database/src/postgres/postgres_db_service_tests.rs +++ b/crates/database/src/postgres/postgres_db_service_tests.rs @@ -127,11 +127,7 @@ mod tests { let registration = get_randomized_signed_validator_registration(); db_service - .save_validator_registration( - registration.clone(), - Some("test".to_string()), - None, - ) + .save_validator_registration(registration.clone(), Some("test".to_string()), None) .await .unwrap(); sleep(Duration::from_secs(5)).await; @@ -160,11 +156,7 @@ mod tests { .collect::>(); db_service - .save_validator_registrations( - registrations.clone(), - Some("test".to_string()), - None, - ) + .save_validator_registrations(registrations.clone(), Some("test".to_string()), None) .await .unwrap(); sleep(Duration::from_secs(5)).await; @@ -195,11 +187,7 @@ mod tests { .collect::>(); db_service - .save_validator_registrations( - registrations.clone(), - Some("test".to_string()), - None, - ) + .save_validator_registrations(registrations.clone(), Some("test".to_string()), None) .await .unwrap(); @@ -239,11 +227,7 @@ mod tests { let registration = get_randomized_signed_validator_registration(); db_service - .save_validator_registration( - registration.clone(), - Some("test".to_string()), - None, - ) + .save_validator_registration(registration.clone(), Some("test".to_string()), None) .await .unwrap(); @@ -264,11 +248,7 @@ mod tests { for i in 0..10 { let registration = get_randomized_signed_validator_registration(); db_service - .save_validator_registration( - registration.clone(), - Some("test".to_string()), - None, - ) + .save_validator_registration(registration.clone(), Some("test".to_string()), None) .await .unwrap(); @@ -602,12 +582,7 @@ mod tests { PayloadAndBlobs { execution_payload: execution_payload.clone(), blobs_bundle: None }; db_service - .save_delivered_payload( - &bid_trace, - Arc::new(payload_and_blobs), - &latency_trace, - None, - ) + .save_delivered_payload(&bid_trace, Arc::new(payload_and_blobs), &latency_trace, None) .await?; Ok(()) } diff --git a/crates/datastore/src/redis/redis_cache.rs b/crates/datastore/src/redis/redis_cache.rs index 0e4b1c9..637d190 100644 --- a/crates/datastore/src/redis/redis_cache.rs +++ b/crates/datastore/src/redis/redis_cache.rs @@ -899,7 +899,7 @@ impl Auctioneer for RedisCache { let is_bid_above_floor = submission.bid_trace().value > floor_value; if !cancellations_enabled && !is_bid_above_floor { record.record_success(); - return Ok(None); + return Ok(None) } // Save the execution payload From 27ff2aa0241bb97b4fd3563931f08dac56247d8a Mon Sep 17 00:00:00 2001 From: ltitanb Date: Thu, 5 Dec 2024 12:06:24 +0000 Subject: [PATCH 22/39] clippy --- Cargo.lock | 342 ++++++++++++++++-- crates/api/src/constraints/api.rs | 5 +- crates/api/src/gossiper/grpc_gossiper.rs | 24 +- crates/api/src/proposer/api.rs | 59 ++- crates/common/src/proofs.rs | 2 +- .../src/postgres/postgres_db_u256_parsing.rs | 2 + crates/datastore/src/redis/redis_cache.rs | 2 + crates/housekeeper/src/housekeeper.rs | 12 +- 8 files changed, 354 insertions(+), 94 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ab953bc..20f3b9f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -509,9 +509,9 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.73" +version = "0.1.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0" +checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", @@ -1670,6 +1670,17 @@ dependencies = [ "winapi", ] +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] + [[package]] name = "dunce" version = "1.0.4" @@ -2400,9 +2411,9 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ "percent-encoding", ] @@ -3168,6 +3179,124 @@ dependencies = [ "cc", ] +[[package]] +name = "icu_collections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locid" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_locid_transform" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] + [[package]] name = "ident_case" version = "1.0.1" @@ -3176,12 +3305,23 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "0.4.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" dependencies = [ - "unicode-bidi", - "unicode-normalization", + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +dependencies = [ + "icu_normalizer", + "icu_properties", ] [[package]] @@ -3507,6 +3647,12 @@ version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503" +[[package]] +name = "litemap" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" + [[package]] name = "lock_api" version = "0.4.10" @@ -3730,7 +3876,7 @@ dependencies = [ "proc-macro2", "quote", "syn 1.0.109", - "synstructure", + "synstructure 0.12.6", ] [[package]] @@ -3799,11 +3945,10 @@ dependencies = [ [[package]] name = "num-bigint" -version = "0.4.4" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ - "autocfg", "num-integer", "num-traits", ] @@ -3835,11 +3980,10 @@ dependencies = [ [[package]] name = "num-integer" -version = "0.1.45" +version = "0.1.46" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" dependencies = [ - "autocfg", "num-traits", ] @@ -3868,9 +4012,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.16" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", "libm", @@ -4131,9 +4275,9 @@ dependencies = [ [[package]] name = "percent-encoding" -version = "2.3.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" @@ -5779,9 +5923,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.11.0" +version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" dependencies = [ "arbitrary", "serde", @@ -5924,6 +6068,12 @@ dependencies = [ "typenum", ] +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + [[package]] name = "static_assertions" version = "1.1.0" @@ -6104,6 +6254,17 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "synstructure" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] + [[package]] name = "system-configuration" version = "0.5.1" @@ -6269,6 +6430,16 @@ dependencies = [ "crunchy", ] +[[package]] +name = "tinystr" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +dependencies = [ + "displaydoc", + "zerovec", +] + [[package]] name = "tinyvec" version = "1.6.0" @@ -6610,11 +6781,10 @@ checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" [[package]] name = "tracing" -version = "0.1.37" +version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ - "cfg-if", "log", "pin-project-lite", "tracing-attributes", @@ -6635,9 +6805,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.26" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", @@ -6646,9 +6816,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.31" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", "valuable", @@ -6894,9 +7064,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.4.1" +version = "2.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", "idna", @@ -6909,6 +7079,18 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + [[package]] name = "utf8parse" version = "0.2.1" @@ -6989,19 +7171,20 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.87" +version = "0.2.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" +checksum = "d15e63b4482863c109d70a7b8706c1e364eb6ea449b201a76c5b89cedcec2d5c" dependencies = [ "cfg-if", + "once_cell", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.87" +version = "0.2.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" +checksum = "8d36ef12e3aaca16ddd3f67922bc63e48e953f126de60bd33ccc0101ef9998cd" dependencies = [ "bumpalo", "log", @@ -7026,9 +7209,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.87" +version = "0.2.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" +checksum = "705440e08b42d3e4b36de7d66c944be628d579796b8090bfa3471478a2260051" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -7036,9 +7219,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.87" +version = "0.2.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" +checksum = "98c9ae5a76e46f4deecd0f0255cc223cfa18dc9b261213b8aa0c7b36f61b3f1d" dependencies = [ "proc-macro2", "quote", @@ -7049,9 +7232,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.87" +version = "0.2.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" +checksum = "6ee99da9c5ba11bd675621338ef6fa52296b76b83305e9b6e5c77d4c286d6d49" [[package]] name = "wasm-streams" @@ -7294,6 +7477,18 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + [[package]] name = "ws_stream_wasm" version = "0.7.4" @@ -7337,6 +7532,51 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" +[[package]] +name = "yoke" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", + "synstructure 0.13.1", +] + +[[package]] +name = "zerofrom" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", + "synstructure 0.13.1", +] + [[package]] name = "zeroize" version = "1.8.1" @@ -7357,6 +7597,28 @@ dependencies = [ "syn 2.0.87", ] +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] + [[package]] name = "zip" version = "0.6.6" diff --git a/crates/api/src/constraints/api.rs b/crates/api/src/constraints/api.rs index 5519290..16426aa 100644 --- a/crates/api/src/constraints/api.rs +++ b/crates/api/src/constraints/api.rs @@ -201,9 +201,8 @@ where api.constraints_handle.send_constraints(constraint.clone()); // Decode the constraints and generate proof data. - let constraints_with_proofs = SignedConstraintsWithProofData::try_from(constraint).map_err(|err| { - error!(request_id = %request_id, "Failed to decode constraints transactions and generate proof data"); - err + let constraints_with_proofs = SignedConstraintsWithProofData::try_from(constraint).inspect_err(|err| { + error!(%err, %request_id, "Failed to decode constraints transactions and generate proof data"); })?; // Finally add the constraints to the redis cache diff --git a/crates/api/src/gossiper/grpc_gossiper.rs b/crates/api/src/gossiper/grpc_gossiper.rs index 1ca7dab..cf5f761 100644 --- a/crates/api/src/gossiper/grpc_gossiper.rs +++ b/crates/api/src/gossiper/grpc_gossiper.rs @@ -94,17 +94,17 @@ impl GrpcGossiperClient { Ok(Err(err)) => { error!(err = %err, "Client call failed."); GossipMetrics::out_count(HEADER_ID, false); - return Err(GossipError::BroadcastError(err)) + Err(GossipError::BroadcastError(err)) } Err(_) => { error!("Client call timed out."); GossipMetrics::out_count(HEADER_ID, false); - return Err(GossipError::TimeoutError) + Err(GossipError::TimeoutError) } } } else { GossipMetrics::out_count(HEADER_ID, false); - return Err(GossipError::ClientNotConnected) + Err(GossipError::ClientNotConnected) } } @@ -134,17 +134,17 @@ impl GrpcGossiperClient { Ok(Err(err)) => { error!(err = %err, "Client call failed."); GossipMetrics::out_count(PAYLOAD_ID, false); - return Err(GossipError::BroadcastError(err)) + Err(GossipError::BroadcastError(err)) } Err(_) => { error!("Client call timed out."); GossipMetrics::out_count(PAYLOAD_ID, false); - return Err(GossipError::TimeoutError) + Err(GossipError::TimeoutError) } } } else { GossipMetrics::out_count(PAYLOAD_ID, false); - return Err(GossipError::ClientNotConnected) + Err(GossipError::ClientNotConnected) } } @@ -174,17 +174,17 @@ impl GrpcGossiperClient { Ok(Err(err)) => { error!(err = %err, "Client call failed."); GossipMetrics::out_count(GET_PAYLOAD_ID, false); - return Err(GossipError::BroadcastError(err)) + Err(GossipError::BroadcastError(err)) } Err(_) => { error!("Client call timed out."); GossipMetrics::out_count(GET_PAYLOAD_ID, false); - return Err(GossipError::TimeoutError) + Err(GossipError::TimeoutError) } } } else { GossipMetrics::out_count(GET_PAYLOAD_ID, false); - return Err(GossipError::ClientNotConnected) + Err(GossipError::ClientNotConnected) } } @@ -216,17 +216,17 @@ impl GrpcGossiperClient { Ok(Err(err)) => { error!(err = %err, "Client call failed."); GossipMetrics::out_count(CANCELLATION_ID, false); - return Err(GossipError::BroadcastError(err)) + Err(GossipError::BroadcastError(err)) } Err(_) => { error!("Client call timed out."); GossipMetrics::out_count(CANCELLATION_ID, false); - return Err(GossipError::TimeoutError) + Err(GossipError::TimeoutError) } } } else { GossipMetrics::out_count(CANCELLATION_ID, false); - return Err(GossipError::ClientNotConnected) + Err(GossipError::ClientNotConnected) } } } diff --git a/crates/api/src/proposer/api.rs b/crates/api/src/proposer/api.rs index 358ac9b..603bb41 100644 --- a/crates/api/src/proposer/api.rs +++ b/crates/api/src/proposer/api.rs @@ -927,15 +927,13 @@ where ) .await; - if !is_trusted_proposer { - if let Err(_) = tx.send(()) { - error!(request_id = %request_id_clone, "Error sending beacon client response, receiver dropped"); - } + if !is_trusted_proposer && tx.send(()).is_err() { + error!(request_id = %request_id_clone, "Error sending beacon client response, receiver dropped"); } }); if !is_trusted_proposer { - if let Ok(_) = rx.await { + if (rx.await).is_ok() { info!(request_id = %request_id, trace = ?trace, "Payload published and saved!") } else { error!(request_id = %request_id, "Error in beacon client publishing"); @@ -1280,34 +1278,31 @@ where /// Will process new gossiped messages from async fn process_gossiped_info(&self, mut recveiver: Receiver) { while let Some(msg) = recveiver.recv().await { - match msg { - GossipedMessage::GetPayload(payload) => { - let api_clone = self.clone(); - tokio::spawn(async move { - let mut trace = GetPayloadTrace { - receive: get_nanos_timestamp().unwrap_or_default(), - ..Default::default() - }; - debug!(request_id = %payload.request_id, "processing gossiped payload"); - match api_clone - ._get_payload( - payload.signed_blinded_beacon_block, - &mut trace, - &payload.request_id, - None, - ) - .await - { - Ok(_get_payload_response) => { - debug!(request_id = %payload.request_id, "gossiped payload processed"); - } - Err(err) => { - error!(request_id = %payload.request_id, error = %err, "error processing gossiped payload"); - } + if let GossipedMessage::GetPayload(payload) = msg { + let api_clone = self.clone(); + tokio::spawn(async move { + let mut trace = GetPayloadTrace { + receive: get_nanos_timestamp().unwrap_or_default(), + ..Default::default() + }; + debug!(request_id = %payload.request_id, "processing gossiped payload"); + match api_clone + ._get_payload( + payload.signed_blinded_beacon_block, + &mut trace, + &payload.request_id, + None, + ) + .await + { + Ok(_get_payload_response) => { + debug!(request_id = %payload.request_id, "gossiped payload processed"); } - }); - } - _ => {} + Err(err) => { + error!(request_id = %payload.request_id, error = %err, "error processing gossiped payload"); + } + } + }); } } } diff --git a/crates/common/src/proofs.rs b/crates/common/src/proofs.rs index 7388cc4..2a71715 100644 --- a/crates/common/src/proofs.rs +++ b/crates/common/src/proofs.rs @@ -61,7 +61,7 @@ pub struct ConstraintsMessage { impl SignableBLS for ConstraintsMessage { fn digest(&self) -> [u8; 32] { let mut hasher = Sha256::new(); - hasher.update(&self.pubkey.to_vec()); + hasher.update(self.pubkey.to_vec()); hasher.update(self.slot.to_le_bytes()); hasher.update((self.top as u8).to_le_bytes()); for tx in self.transactions.iter() { diff --git a/crates/database/src/postgres/postgres_db_u256_parsing.rs b/crates/database/src/postgres/postgres_db_u256_parsing.rs index 9d4f36d..5e97037 100644 --- a/crates/database/src/postgres/postgres_db_u256_parsing.rs +++ b/crates/database/src/postgres/postgres_db_u256_parsing.rs @@ -1,3 +1,5 @@ +#![allow(clippy::doc_lazy_continuation)] + use bytes::BufMut; use ethereum_consensus::primitives::U256; diff --git a/crates/datastore/src/redis/redis_cache.rs b/crates/datastore/src/redis/redis_cache.rs index 637d190..6f65243 100644 --- a/crates/datastore/src/redis/redis_cache.rs +++ b/crates/datastore/src/redis/redis_cache.rs @@ -1,3 +1,5 @@ +#![allow(dependency_on_unit_never_type_fallback)] // TODO: temp fix , needs to be fixed before upading to 2024 edition + use crate::redis::utils::get_constraints_key; use std::collections::{HashMap, HashSet}; diff --git a/crates/housekeeper/src/housekeeper.rs b/crates/housekeeper/src/housekeeper.rs index a93960c..23f0948 100644 --- a/crates/housekeeper/src/housekeeper.rs +++ b/crates/housekeeper/src/housekeeper.rs @@ -531,7 +531,7 @@ impl async fn primev_update(&self) -> Result<(), HousekeeperError> { let primev_config = self.config.primev_config.as_ref().unwrap(); - let primev_builders = get_registered_primev_builders(&primev_config).await; + let primev_builders = get_registered_primev_builders(primev_config).await; for builder_pubkey in primev_builders { self.db .store_builder_info( @@ -557,11 +557,11 @@ impl /// Determine if the trusted proposers should be refreshed for the given slot. /// /// This function checks two conditions: - /// 1. If the `head_slot` is exactly divisible by `TRUSTED_PROPOSERS_UPDATE_FREQ`, - /// it will return `true` to trigger a trusted proposer update. - /// 2. If the distance between the current `head_slot` and the last slot for which - /// the trusted proposers was refreshed (`refreshed_trusted_proposers_slot`) is greater than or - /// equal to `TRUSTED_PROPOSERS_UPDATE_FREQ`, it will also return `true`. + /// 1. If the `head_slot` is exactly divisible by `TRUSTED_PROPOSERS_UPDATE_FREQ`, it will + /// return `true` to trigger a trusted proposer update. + /// 2. If the distance between the current `head_slot` and the last slot for which the trusted + /// proposers was refreshed (`refreshed_trusted_proposers_slot`) is greater than or equal to + /// `TRUSTED_PROPOSERS_UPDATE_FREQ`, it will also return `true`. async fn should_update_trusted_proposers( self: &SharedHousekeeper, head_slot: u64, From 19710f19ad3fb01277aab10ab6b07d5cac5dd50f Mon Sep 17 00:00:00 2001 From: ltitanb Date: Thu, 5 Dec 2024 12:06:39 +0000 Subject: [PATCH 23/39] param index fix --- crates/database/src/postgres/postgres_db_service.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/database/src/postgres/postgres_db_service.rs b/crates/database/src/postgres/postgres_db_service.rs index 100b1af..a64dd63 100644 --- a/crates/database/src/postgres/postgres_db_service.rs +++ b/crates/database/src/postgres/postgres_db_service.rs @@ -1456,6 +1456,7 @@ impl DatabaseService for PostgresDatabaseService { if let Some(block_hash) = filters.block_hash() { query.push_str(&format!(" AND block_submission.block_hash = ${}", param_index)); params.push(Box::new(block_hash)); + param_index += 1; } if let Some(filtering) = filtering { @@ -1464,7 +1465,6 @@ impl DatabaseService for PostgresDatabaseService { param_index )); params.push(Box::new(filtering)); - param_index += 1; } let params_refs: Vec<&(dyn ToSql + Sync)> = From 5eb7deced2514c4382ae7d164caa97453c3fd87f Mon Sep 17 00:00:00 2001 From: ltitanb Date: Thu, 5 Dec 2024 13:14:59 +0000 Subject: [PATCH 24/39] rm doc lint --- .github/workflows/lint.yml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index bfe430b..703727e 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -52,10 +52,3 @@ jobs: - name: Rustfmt run: cargo +nightly-2023-06-01 fmt --all --check working-directory: ./repos/${{ secrets.REPO_NAME }} - - # Check Documentation - - name: Docs - run: cargo doc --workspace --all-features --no-deps - env: - RUSTDOCFLAGS: --cfg docsrs --show-type-layout --generate-link-to-definition --enable-index-page -Zunstable-options -D warnings - working-directory: ./repos/${{ secrets.REPO_NAME }} From b9a3d4132dbc0aee6f9a36350c87d163410aa110 Mon Sep 17 00:00:00 2001 From: ltitanb Date: Thu, 5 Dec 2024 18:12:11 +0000 Subject: [PATCH 25/39] extract uuid --- Cargo.lock | 1 + crates/api/src/builder/api.rs | 26 +++++++++----------------- crates/api/src/proposer/api.rs | 21 ++++++++------------- crates/api/src/router.rs | 9 ++++----- crates/utils/Cargo.toml | 3 ++- crates/utils/src/lib.rs | 11 +++++++++++ 6 files changed, 35 insertions(+), 36 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 20f3b9f..7faf78f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2897,6 +2897,7 @@ dependencies = [ "tracing", "tree_hash 0.6.0", "tree_hash_derive", + "uuid 1.7.0", ] [[package]] diff --git a/crates/api/src/builder/api.rs b/crates/api/src/builder/api.rs index bc3fc8b..638ad1c 100644 --- a/crates/api/src/builder/api.rs +++ b/crates/api/src/builder/api.rs @@ -62,7 +62,7 @@ use helix_common::{ use helix_database::DatabaseService; use helix_datastore::{types::SaveBidAndUpdateTopBidResponse, Auctioneer}; use helix_housekeeper::{ChainUpdate, PayloadAttributesUpdate, SlotUpdate}; -use helix_utils::{get_payload_attributes_key, has_reached_fork}; +use helix_utils::{extract_request_id, get_payload_attributes_key, has_reached_fork}; use serde::Deserialize; @@ -307,10 +307,8 @@ where headers: HeaderMap, req: Request, ) -> Result { - let request_id = Uuid::parse_str( - headers.get("x-request-id").map(|v| v.to_str().unwrap_or_default()).unwrap_or_default(), - ) - .unwrap(); + let request_id = extract_request_id(&headers); + let mut trace = SubmissionTrace { receive: get_nanos_timestamp()?, ..Default::default() }; let (head_slot, next_duty) = api.curr_slot_info.read().await.clone(); @@ -575,10 +573,8 @@ where headers: HeaderMap, req: Request, ) -> Result { - let request_id = Uuid::parse_str( - headers.get("x-request-id").map(|v| v.to_str().unwrap_or_default()).unwrap_or_default(), - ) - .unwrap(); + let request_id = extract_request_id(&headers); + let mut trace = HeaderSubmissionTrace { receive: get_nanos_timestamp()?, ..Default::default() }; let (head_slot, next_duty) = api.curr_slot_info.read().await.clone(); @@ -824,10 +820,8 @@ where headers: HeaderMap, req: Request, ) -> Result { - let request_id = Uuid::parse_str( - headers.get("x-request-id").map(|v| v.to_str().unwrap_or_default()).unwrap_or_default(), - ) - .unwrap(); + let request_id = extract_request_id(&headers); + let now = SystemTime::now(); let mut trace = SubmissionTrace { receive: get_nanos_from(now)?, ..Default::default() }; let (head_slot, next_duty) = api.curr_slot_info.read().await.clone(); @@ -1048,10 +1042,8 @@ where headers: HeaderMap, Json(mut signed_cancellation): Json, ) -> Result { - let request_id = Uuid::parse_str( - headers.get("x-request-id").map(|v| v.to_str().unwrap_or_default()).unwrap_or_default(), - ) - .unwrap(); + let request_id = extract_request_id(&headers); + let (head_slot, _next_duty) = api.curr_slot_info.read().await.clone(); let slot = signed_cancellation.message.slot; diff --git a/crates/api/src/proposer/api.rs b/crates/api/src/proposer/api.rs index 603bb41..43c5893 100644 --- a/crates/api/src/proposer/api.rs +++ b/crates/api/src/proposer/api.rs @@ -48,7 +48,10 @@ use helix_common::{ use helix_database::DatabaseService; use helix_datastore::{error::AuctioneerError, Auctioneer}; use helix_housekeeper::{ChainUpdate, SlotUpdate}; -use helix_utils::signing::{verify_signed_builder_message, verify_signed_consensus_message}; +use helix_utils::{ + extract_request_id, + signing::{verify_signed_builder_message, verify_signed_consensus_message}, +}; use crate::{ gossiper::{ @@ -167,10 +170,7 @@ where return Err(ProposerApiError::EmptyRequest) } - let request_id = Uuid::parse_str( - headers.get("x-request-id").map(|v| v.to_str().unwrap_or_default()).unwrap_or_default(), - ) - .unwrap(); + let request_id = extract_request_id(&headers); let mut trace = RegisterValidatorsTrace { receive: get_nanos_timestamp()?, ..Default::default() }; @@ -386,10 +386,7 @@ where return Err(ProposerApiError::ServiceUnavailableError) } - let request_id = Uuid::parse_str( - headers.get("x-request-id").map(|v| v.to_str().unwrap_or_default()).unwrap_or_default(), - ) - .unwrap(); + let request_id = extract_request_id(&headers); let mut trace = GetHeaderTrace { receive: get_nanos_timestamp()?, ..Default::default() }; @@ -651,11 +648,9 @@ where headers: HeaderMap, req: Request, ) -> Result { + let request_id = extract_request_id(&headers); + let mut trace = GetPayloadTrace { receive: get_nanos_timestamp()?, ..Default::default() }; - let request_id = Uuid::parse_str( - headers.get("x-request-id").map(|v| v.to_str().unwrap_or_default()).unwrap_or_default(), - ) - .unwrap(); let user_agent = headers.get("user-agent").and_then(|v| v.to_str().ok()).map(|v| v.to_string()); diff --git a/crates/api/src/router.rs b/crates/api/src/router.rs index b150b03..ba65970 100644 --- a/crates/api/src/router.rs +++ b/crates/api/src/router.rs @@ -9,6 +9,7 @@ use helix_beacon_client::{beacon_client::BeaconClient, multi_beacon_client::Mult use helix_common::{Route, RouterConfig}; use helix_database::postgres::postgres_db_service::PostgresDatabaseService; use helix_datastore::redis::redis_cache::RedisCache; +use helix_utils::extract_request_id; use hyper::HeaderMap; use std::{collections::HashMap, sync::Arc, time::Duration}; use tower::{timeout::TimeoutLayer, BoxError, ServiceBuilder}; @@ -171,11 +172,9 @@ pub fn build_router( router = router.layer( ServiceBuilder::new() .layer(HandleErrorLayer::new(|headers: HeaderMap, e: BoxError| async move { - let request_id = headers - .get("x-request-id") - .map(|v| v.to_str().unwrap_or_default()) - .unwrap_or_default(); - warn!(request_id = request_id, "Request timed out {:?}", e); + let request_id = extract_request_id(&headers); + + warn!(%request_id, "Request timed out {:?}", e); StatusCode::REQUEST_TIMEOUT })) .layer(TimeoutLayer::new(API_REQUEST_TIMEOUT)), diff --git a/crates/utils/Cargo.toml b/crates/utils/Cargo.toml index b29ca48..e05ea66 100644 --- a/crates/utils/Cargo.toml +++ b/crates/utils/Cargo.toml @@ -23,4 +23,5 @@ reqwest.workspace = true axum.workspace = true backtrace.workspace = true -tracing.workspace = true \ No newline at end of file +tracing.workspace = true +uuid.workspace = true diff --git a/crates/utils/src/lib.rs b/crates/utils/src/lib.rs index 7ad443f..c050178 100644 --- a/crates/utils/src/lib.rs +++ b/crates/utils/src/lib.rs @@ -13,8 +13,10 @@ use ethereum_consensus::{ phase0::mainnet::SLOTS_PER_EPOCH, ssz::{self, prelude::SimpleSerialize}, }; +use http::HeaderMap; use reth_primitives::{proofs, Address}; use tracing::{error, info}; +use uuid::Uuid; pub mod request_encoding; pub mod serde; @@ -123,3 +125,12 @@ pub fn save_to_file(path: String, json: String) { // Write the JSON string to the file file.write_all(json.as_bytes()).expect("Failed to write JSON to file"); } + +// Returns request id from header if exists otherwise returns a random one +pub fn extract_request_id(headers: &HeaderMap) -> Uuid { + headers + .get("x-request-id") + .and_then(|v| v.to_str().ok()) + .and_then(|v| Uuid::parse_str(v).ok()) + .unwrap_or(Uuid::new_v4()) +} From ee914461f977243a506c8e75dea127454c5ca18c Mon Sep 17 00:00:00 2001 From: ltitanb Date: Thu, 5 Dec 2024 19:33:29 +0000 Subject: [PATCH 26/39] remove test skip --- Cargo.lock | 1 + crates/datastore/Cargo.toml | 3 + crates/datastore/src/redis/redis_cache.rs | 76 ++++++++++++++--------- 3 files changed, 50 insertions(+), 30 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7faf78f..f7c50a5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2857,6 +2857,7 @@ dependencies = [ "redis", "serde", "serde_json", + "serial_test", "thiserror", "tokio", "tokio-stream", diff --git a/crates/datastore/Cargo.toml b/crates/datastore/Cargo.toml index bf15a04..a8e06a1 100644 --- a/crates/datastore/Cargo.toml +++ b/crates/datastore/Cargo.toml @@ -33,3 +33,6 @@ redis.workspace = true auto_impl.workspace = true thiserror.workspace = true tracing.workspace = true + +[dev-dependencies] +serial_test.workspace = true diff --git a/crates/datastore/src/redis/redis_cache.rs b/crates/datastore/src/redis/redis_cache.rs index 6f65243..0613f03 100644 --- a/crates/datastore/src/redis/redis_cache.rs +++ b/crates/datastore/src/redis/redis_cache.rs @@ -1514,6 +1514,7 @@ mod tests { use super::*; use ethereum_consensus::clock::get_current_unix_time_in_nanos; use helix_common::capella::{self, ExecutionPayloadHeader}; + use serial_test::serial; use serde::{Deserialize, Serialize}; @@ -1541,12 +1542,14 @@ mod tests { /// Reference: https://redis.io/kb/doc/1hcec8xg9w/how-can-i-install-redis-on-docker #[tokio::test] + #[serial] async fn test_new() { let result = RedisCache::new("redis://127.0.0.1/", Vec::new()).await; assert!(result.is_ok()); } #[tokio::test] + #[serial] async fn test_get_and_set_object() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -1570,7 +1573,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_hget_and_hset_object() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -1590,7 +1593,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_hgetall() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -1617,7 +1620,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_lrange() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -1636,6 +1639,7 @@ mod tests { } #[tokio::test] + #[serial] async fn test_rpush() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -1646,6 +1650,7 @@ mod tests { } #[tokio::test] + #[serial] async fn test_clear_key() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -1666,7 +1671,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_get_new_builder_bids() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -1699,6 +1704,7 @@ mod tests { } #[tokio::test] + #[serial] async fn test_update_top_bid() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -1828,6 +1834,7 @@ mod tests { /// ####################################################################### #[tokio::test] + #[serial] async fn test_get_and_check_last_slot_and_hash_delivered() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -1846,7 +1853,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_set_past_slot() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -1866,6 +1873,7 @@ mod tests { } #[tokio::test] + #[serial] async fn test_set_same_slot_different_hash() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -1883,7 +1891,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_set_same_slot_no_hash() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -1900,7 +1908,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_get_and_set_best_bid() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -1912,10 +1920,12 @@ mod tests { let mut capella_bid = capella::SignedBuilderBid::default(); capella_bid.message.value = U256::from(1999); let best_bid = SignedBuilderBid::Capella(capella_bid, None); + let wrapper = + SignedBuilderBidWrapper::new(best_bid.clone(), slot, proposer_pub_key.clone(), 0); // Save the best bid let key = get_cache_get_header_response_key(slot, &parent_hash, &proposer_pub_key); - let set_result = cache.set(&key, &best_bid, None).await; + let set_result = cache.set(&key, &wrapper, None).await; assert!(set_result.is_ok(), "Failed to set best bid in cache"); // Test: Get the best bid @@ -1929,6 +1939,7 @@ mod tests { } #[tokio::test] + #[serial] async fn test_get_and_save_execution_payload() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -1971,7 +1982,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_save_builder_bid_and_get_latest_payload_received_at() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -2008,10 +2019,14 @@ mod tests { // Validate: the SignedBuilderBid object is correctly set let key_latest_bid = get_latest_bid_by_builder_key(slot, &parent_hash, &proposer_pub_key, &builder_pub_key); - let fetched_bid: Result, _> = cache.get(&key_latest_bid).await; + let fetched_bid: Result, _> = + cache.get(&key_latest_bid).await; assert!(fetched_bid.is_ok(), "Failed to fetch the latest bid"); + + let fetched_bid = fetched_bid.unwrap().unwrap().bid; + assert_eq!( - fetched_bid.unwrap().unwrap().block_hash(), + fetched_bid.block_hash(), builder_bid.block_hash(), "Mismatch in saved builder bid" ); @@ -2088,7 +2103,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_get_builder_info() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -2122,7 +2137,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_get_trusted_proposers_and_update_trusted_proposers() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -2194,7 +2209,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_demote_optimistic_builder() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -2222,7 +2237,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_delete_builder_bid() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -2322,7 +2337,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_no_cancellation_bid_below_floor() { let (cache, submission, floor_value, received_at) = setup_save_and_update_test().await; let mut state = SaveBidAndUpdateTopBidResponse::default(); @@ -2343,7 +2358,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_no_cancellation_bid_above_floor() { let (cache, mut submission, floor_value, received_at) = setup_save_and_update_test().await; let mut state = SaveBidAndUpdateTopBidResponse::default(); @@ -2377,7 +2392,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_cancellation_bid_below_floor() { let (cache, mut submission, floor_value, received_at) = setup_save_and_update_test().await; let mut state = SaveBidAndUpdateTopBidResponse::default(); @@ -2411,7 +2426,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_cancellation_bid_above_floor() { let (cache, mut submission, floor_value, received_at) = setup_save_and_update_test().await; let mut state = SaveBidAndUpdateTopBidResponse::default(); @@ -2445,7 +2460,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_no_cancellation_bid_above_floor_but_not_top() { let (cache, mut submission, floor_value, received_at) = setup_save_and_update_test().await; @@ -2521,7 +2536,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_seen_or_insert_block_hash() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -2581,7 +2596,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_can_aquire_lock() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -2589,7 +2604,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_others_cant_aquire_lock_if_held() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -2598,6 +2613,7 @@ mod tests { } #[tokio::test] + #[serial] async fn test_can_renew_lock() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -2606,7 +2622,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_others_cannot_renew() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -2616,7 +2632,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_pending_blocks() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -2653,7 +2669,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_pending_blocks_multiple() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -2717,7 +2733,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_pending_blocks_no_header() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -2752,7 +2768,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_pending_blocks_no_payload() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -2787,7 +2803,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_pending_blocks_dublicate_payload() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); @@ -2829,7 +2845,7 @@ mod tests { } #[tokio::test] - #[ignore = "TODO: to fix"] + #[serial] async fn test_kill_switch() { let cache = RedisCache::new("redis://127.0.0.1/", Vec::new()).await.unwrap(); cache.clear_cache().await.unwrap(); From 5c6cae3ec2d058741845ab9b107913f8ed1d3830 Mon Sep 17 00:00:00 2001 From: ltitanb Date: Fri, 6 Dec 2024 10:41:46 +0000 Subject: [PATCH 27/39] add registry --- crates/common/src/metrics.rs | 118 +++++++++++++++++++++++------------ 1 file changed, 77 insertions(+), 41 deletions(-) diff --git a/crates/common/src/metrics.rs b/crates/common/src/metrics.rs index 695af43..56f3d46 100644 --- a/crates/common/src/metrics.rs +++ b/crates/common/src/metrics.rs @@ -7,8 +7,9 @@ use axum::{ use eyre::bail; use lazy_static::lazy_static; use prometheus::{ - register_histogram, register_histogram_vec, register_int_counter, register_int_counter_vec, - Encoder, Histogram, HistogramTimer, HistogramVec, IntCounter, IntCounterVec, TextEncoder, + register_histogram_vec_with_registry, register_histogram_with_registry, + register_int_counter_vec_with_registry, register_int_counter_with_registry, Encoder, Histogram, + HistogramTimer, HistogramVec, IntCounter, IntCounterVec, Registry, TextEncoder, }; use std::net::SocketAddr; use tokio::net::TcpListener; @@ -56,14 +57,14 @@ async fn handle_metrics() -> Response { match prepare_metrics() { Ok(response) => response, Err(err) => { - error!("Failed to prepare metrics: {:?}", err); + error!(?err, "failed to prepare metrics"); StatusCode::INTERNAL_SERVER_ERROR.into_response() } } } fn prepare_metrics() -> Result { - let metrics = prometheus::gather(); + let metrics = RELAY_METRICS_REGISTRY.gather(); let encoder = TextEncoder::new(); let s = encoder.encode_to_string(&metrics)?; @@ -84,124 +85,159 @@ enum MetricsError { } lazy_static! { + static ref RELAY_METRICS_REGISTRY: Registry = + Registry::new_custom(Some("helix".to_string()), None).unwrap(); + //////////////// API //////////////// /// Count for requests by API and endpoint - static ref REQUEST_COUNTS: IntCounterVec = - register_int_counter_vec!("request_count_total", "Count of requests", &["endpoint"]) - .unwrap(); + static ref REQUEST_COUNTS: IntCounterVec = register_int_counter_vec_with_registry!( + "request_count_total", + "Count of requests", + &["endpoint"], + &RELAY_METRICS_REGISTRY + ) + .unwrap(); /// Count for status codes by API and endpoint static ref REQUEST_STATUS: IntCounterVec = - register_int_counter_vec!("request_status_total", "Count of status codes", &["endpoint", "http_status_code"]) - .unwrap(); + register_int_counter_vec_with_registry!( + "request_status_total", + "Count of status codes", + &["endpoint", "http_status_code"], + &RELAY_METRICS_REGISTRY + ) + .unwrap(); /// Duration of request in seconds - static ref REQUEST_LATENCY: HistogramVec = register_histogram_vec!( + static ref REQUEST_LATENCY: HistogramVec = register_histogram_vec_with_registry!( "request_latency_sec", "Latency of requests", - &["endpoint"] + &["endpoint"], + &RELAY_METRICS_REGISTRY ) .unwrap(); /// Request size in bytes - static ref REQUEST_SIZE: IntCounterVec = register_int_counter_vec!( + static ref REQUEST_SIZE: IntCounterVec = register_int_counter_vec_with_registry!( "request_size_bytes", "Size of requests", - &["endpoint"] + &["endpoint"], + &RELAY_METRICS_REGISTRY ) .unwrap(); //////////////// SIMULATOR //////////////// - static ref SIMULATOR_COUNTS: IntCounterVec = - register_int_counter_vec!("simulator_count_total", "Count of sim requests", &["is_optimistic"]) - .unwrap(); + static ref SIMULATOR_COUNTS: IntCounterVec = register_int_counter_vec_with_registry!( + "simulator_count_total", + "Count of sim requests", + &["is_optimistic"], + &RELAY_METRICS_REGISTRY + ) + .unwrap(); - static ref SIMULATOR_STATUS: IntCounterVec = - register_int_counter_vec!("simulator_status_total", "Count of sim statuses", &["is_success"]) - .unwrap(); + static ref SIMULATOR_STATUS: IntCounterVec = register_int_counter_vec_with_registry!( + "simulator_status_total", + "Count of sim statuses", + &["is_success"], + &RELAY_METRICS_REGISTRY + ) + .unwrap(); - static ref SIMULATOR_LATENCY: Histogram = register_histogram!( + static ref SIMULATOR_LATENCY: Histogram = register_histogram_with_registry!( "sim_latency_sec", "Latency of simulations", + &RELAY_METRICS_REGISTRY ) .unwrap(); - static ref BUILDER_DEMOTION_COUNT: IntCounter = - register_int_counter!("builder_demotion_count_total", "Count of builder demotions") - .unwrap(); + static ref BUILDER_DEMOTION_COUNT: IntCounter = register_int_counter_with_registry!( + "builder_demotion_count_total", + "Count of builder demotions", + &RELAY_METRICS_REGISTRY + ) + .unwrap(); //////////////// GOSSIP //////////////// /// Received gossip messages coutn - static ref IN_GOSSIP_COUNTS: IntCounterVec = register_int_counter_vec!( + static ref IN_GOSSIP_COUNTS: IntCounterVec = register_int_counter_vec_with_registry!( "in_gossip_count_total", "Count of received gossip messages", - &["endpoint"] + &["endpoint"], + &RELAY_METRICS_REGISTRY ) .unwrap(); /// Received gossip size in bytes - static ref IN_GOSSIP_SIZE: IntCounterVec = register_int_counter_vec!( + static ref IN_GOSSIP_SIZE: IntCounterVec = register_int_counter_vec_with_registry!( "in_gossip_size_bytes", "Size of receivedgossip messages", - &["endpoint"] + &["endpoint"], + &RELAY_METRICS_REGISTRY ) .unwrap(); /// Sent gossip messages count - static ref OUT_GOSSIP_COUNTS: IntCounterVec = register_int_counter_vec!( + static ref OUT_GOSSIP_COUNTS: IntCounterVec = register_int_counter_vec_with_registry!( "out_gossip_count_total", "Count of sent gossip messages", - &["endpoint", "is_success"] + &["endpoint", "is_success"], + &RELAY_METRICS_REGISTRY ) .unwrap(); /// Sent gossip latency - static ref OUT_GOSSIP_LATENCY: HistogramVec = register_histogram_vec!( + static ref OUT_GOSSIP_LATENCY: HistogramVec = register_histogram_vec_with_registry!( "out_gossip_latency_sec", "Latency of sent gossip messages", - &["endpoint"] + &["endpoint"], + &RELAY_METRICS_REGISTRY ) .unwrap(); /// Sent gossip size in bytes - static ref OUT_GOSSIP_SIZE: IntCounterVec = register_int_counter_vec!( + static ref OUT_GOSSIP_SIZE: IntCounterVec = register_int_counter_vec_with_registry!( "out_gossip_size_bytes", "Size of sent gossip messages", - &["endpoint"] + &["endpoint"], + &RELAY_METRICS_REGISTRY ) .unwrap(); //////////////// DB //////////////// - static ref DB_COUNTS: IntCounterVec = register_int_counter_vec!( + static ref DB_COUNTS: IntCounterVec = register_int_counter_vec_with_registry!( "db_count_total", "Count of db operations", - &["endpoint", "is_success"] + &["endpoint", "is_success"], + &RELAY_METRICS_REGISTRY ) .unwrap(); - static ref DB_LATENCY: HistogramVec = register_histogram_vec!( + static ref DB_LATENCY: HistogramVec = register_histogram_vec_with_registry!( "db_latency_sec", "Latency of db operations", - &["endpoint"] + &["endpoint"], + &RELAY_METRICS_REGISTRY ) .unwrap(); //////////////// REDIS //////////////// - static ref REDIS_COUNTS: IntCounterVec = register_int_counter_vec!( + static ref REDIS_COUNTS: IntCounterVec = register_int_counter_vec_with_registry!( "redis_count_total", "Count of redis operations", - &["endpoint", "is_success"] + &["endpoint", "is_success"], + &RELAY_METRICS_REGISTRY ) .unwrap(); - static ref REDIS_LATENCY: HistogramVec = register_histogram_vec!( + static ref REDIS_LATENCY: HistogramVec = register_histogram_vec_with_registry!( "redis_latency_sec", "Latency of redis operations", - &["endpoint"] + &["endpoint"], + &RELAY_METRICS_REGISTRY ) .unwrap(); } From 84f0f2979f4929088eb661e1d516d5bad88ad1c6 Mon Sep 17 00:00:00 2001 From: ltitanb Date: Fri, 6 Dec 2024 15:49:27 +0000 Subject: [PATCH 28/39] builder api logs --- crates/api/src/builder/api.rs | 584 ++++++------------ .../src/builder/simulator/mock_simulator.rs | 2 - .../src/builder/simulator/multi_simulator.rs | 6 +- .../builder/simulator/optimistic_simulator.rs | 42 +- .../simulator/optimistic_simulator_tests.rs | 21 +- .../src/builder/simulator/rpc_simulator.rs | 7 +- .../src/builder/simulator/simulator_tests.rs | 16 +- crates/api/src/builder/simulator/traits.rs | 2 - crates/api/src/builder/tests.rs | 21 +- 9 files changed, 231 insertions(+), 470 deletions(-) diff --git a/crates/api/src/builder/api.rs b/crates/api/src/builder/api.rs index 638ad1c..2558a52 100644 --- a/crates/api/src/builder/api.rs +++ b/crates/api/src/builder/api.rs @@ -34,10 +34,10 @@ use tokio::{ mpsc::{self, error::SendError, Receiver, Sender}, RwLock, }, - time::{self, Instant}, + time::{self}, }; use tokio_stream::wrappers::BroadcastStream; -use tracing::{debug, error, info, warn}; +use tracing::{debug, error, info, warn, Instrument}; use uuid::Uuid; use helix_common::{ @@ -167,10 +167,7 @@ where let api_clone = api.clone(); tokio::spawn(async move { if let Err(err) = api_clone.housekeep(slot_update_subscription.clone()).await { - error!( - error = %err, - "BuilderApi. housekeep task encountered an error", - ); + error!(%err, "BuilderApi. housekeep task encountered an error"); } }); @@ -195,13 +192,13 @@ where /// This endpoint returns a list of signed constraints for a given `slot`. /// /// Implements this API: + #[tracing::instrument(skip_all, fields(slot = slot.slot))] pub async fn constraints( Extension(api): Extension>>, Query(slot): Query, ) -> Result { let slot = slot.slot; - - info!(slot, "builder requested constraints for slot"); + info!("builder requested constraints for slot"); let head_slot = api.curr_slot_info.read().await.0; @@ -216,7 +213,7 @@ where .map(|data| data.signed_constraints) .collect::>(); - info!(slot, len = constraints.len(), "returning constraints to builder"); + info!(len = constraints.len(), "returning constraints to builder"); Ok(Json(constraints)) } Ok(None) => { @@ -224,7 +221,7 @@ where Ok(Json(vec![])) // Return an empty vector if no delegations found } Err(err) => { - warn!(error=%err, "Failed to get constraints"); + warn!(%err, "Failed to get constraints"); Err(BuilderApiError::AuctioneerError(err)) } } @@ -233,6 +230,7 @@ where /// This endpoint returns a stream of signed constraints for a given `slot`. /// /// Implements this API: + #[tracing::instrument(skip_all)] pub async fn constraints_stream( Extension(api): Extension>>, ) -> Sse>> { @@ -263,6 +261,7 @@ where /// at the provided `slot`. The delegations are returned as a list of BLS pubkeys. /// /// Implements this API: + #[tracing::instrument(skip_all, fields(slot = slot.slot))] pub async fn delegations( Extension(api): Extension>>, Query(slot): Query, @@ -270,7 +269,7 @@ where let slot = slot.slot; let Some(duty_bytes) = &*api.proposer_duties_response.read().await else { - warn!(slot, "delegations -- could not find slot duty"); + warn!("could not find slot duty"); return Err(BuilderApiError::ProposerDutyNotFound); }; let Ok(proposer_duties) = @@ -302,35 +301,25 @@ where /// /// Implements this API: /// Implements this API: + #[tracing::instrument(skip_all, fields(id =% extract_request_id(&headers)))] pub async fn submit_block( Extension(api): Extension>>, headers: HeaderMap, req: Request, ) -> Result { - let request_id = extract_request_id(&headers); - let mut trace = SubmissionTrace { receive: get_nanos_timestamp()?, ..Default::default() }; let (head_slot, next_duty) = api.curr_slot_info.read().await.clone(); - debug!( - request_id = %request_id, - event = "submit_block", - head_slot = head_slot, - timestamp_request_start = trace.receive, - ); + debug!(head_slot, timestamp_request_start = trace.receive); // Decode the incoming request body into a payload - let (payload, is_cancellations_enabled) = - decode_payload(req, &mut trace, &request_id).await?; + let (payload, is_cancellations_enabled) = decode_payload(req, &mut trace).await?; let block_hash = payload.message().block_hash.clone(); // Verify the payload is for the current slot if payload.slot() <= head_slot { - debug!( - request_id = %request_id, - block_hash = ?block_hash, - "submission is for a past slot", - ); + debug!(?block_hash, "submission is for a past slot"); + return Err(BuilderApiError::SubmissionForPastSlot { current_slot: head_slot, submission_slot: payload.slot(), @@ -339,33 +328,22 @@ where // Verify that we have a validator connected for this slot if next_duty.is_none() { - warn!( - request_id = %request_id, - block_hash = ?block_hash, - "could not find slot duty" - ); + warn!(?block_hash, "could not find slot duty"); return Err(BuilderApiError::ProposerDutyNotFound) } let next_duty = next_duty.unwrap(); info!( - request_id = %request_id, - event = "submit_block", slot = payload.slot(), builder_pub_key = ?payload.builder_public_key(), block_value = %payload.value(), - block_hash = ?block_hash, + ?block_hash, "payload decoded", ); // Fetch the next payload attributes and validate basic information let payload_attributes = api - .fetch_payload_attributes( - payload.slot(), - payload.parent_hash(), - &block_hash, - &request_id, - ) + .fetch_payload_attributes(payload.slot(), payload.parent_hash(), &block_hash) .await?; // Handle duplicates. @@ -375,7 +353,6 @@ where payload.slot(), payload.parent_hash(), payload.proposer_public_key(), - &request_id, ) .await { @@ -386,8 +363,7 @@ where // with a valid header, which would block subsequent submissions with the same // header and valid payload. debug!( - request_id = %request_id, - block_hash = ?block_hash, + ?block_hash, builder_pub_key = ?payload.builder_public_key(), "block hash already seen" ); @@ -405,7 +381,6 @@ where payload.builder_public_key(), payload.value(), is_cancellations_enabled, - &request_id, ) .await?; trace.floor_bid_checks = get_nanos_timestamp()?; @@ -417,9 +392,8 @@ where if !api.check_if_trusted_builder(&next_duty, &builder_info).await { let proposer_trusted_builders = next_duty.entry.preferences.trusted_builders.unwrap(); debug!( - request_id = %request_id, builder_pub_key = ?payload.builder_public_key(), - proposer_trusted_builders = ?proposer_trusted_builders, + ?proposer_trusted_builders, "builder not in proposer trusted builders list", ); return Err(BuilderApiError::BuilderNotInProposersTrustedList { @@ -431,13 +405,13 @@ where match api.auctioneer.get_last_slot_delivered().await { Ok(Some(slot)) => { if payload.slot() <= slot { - debug!(request_id = %request_id, "payload already delivered"); + debug!("payload already delivered"); return Err(BuilderApiError::PayloadAlreadyDelivered) } } Ok(None) => {} Err(err) => { - error!(request_id = %request_id, error = %err, "failed to get last slot delivered"); + error!(%err, "failed to get last slot delivered"); } } @@ -449,7 +423,7 @@ where &payload_attributes, &api.chain_info, ) { - warn!(request_id = %request_id, error = %err, "failed sanity check"); + warn!(%err, "failed sanity check"); return Err(err) } trace.pre_checks = get_nanos_timestamp()?; @@ -460,7 +434,6 @@ where next_duty, &builder_info, &mut trace, - &request_id, &payload_attributes, ) .await?; @@ -480,42 +453,29 @@ where payload.value() <= max_block_value_to_verify }); if should_verify_and_save_proofs { - if let Err(err) = - api.verify_and_save_inclusion_proofs(&payload, constraints, &request_id).await + if let Err(err) = api.verify_and_save_inclusion_proofs(&payload, constraints).await { - warn!(request_id = %request_id, error = %err, "failed to verify and save inclusion proofs"); + warn!(%err, "failed to verify and save inclusion proofs"); return Err(err) } } else { - info!( - request_id = %request_id, - block_value = %payload.value(), - "block value is greater than max value to verify, inclusion proof verification and saving is skipped", - ); + info!(block_value = %payload.value(), "block value is greater than max value to verify, inclusion proof verification and saving is skipped"); } } else { - info!(%request_id, "no constraints found for slot, proof verification is not needed"); + info!("no constraints found for slot, proof verification is not needed"); } // If cancellations are enabled, then abort now if there is a later submission if is_cancellations_enabled { - if let Err(err) = - api.check_for_later_submissions(&payload, trace.receive, &request_id).await - { - warn!(request_id = %request_id, error = %err, "already processing later submission"); + if let Err(err) = api.check_for_later_submissions(&payload, trace.receive).await { + warn!(%err, "already processing later submission"); return Err(err) } } // Save bid to auctioneer match api - .save_bid_to_auctioneer( - &payload, - &mut trace, - is_cancellations_enabled, - floor_bid_value, - &request_id, - ) + .save_bid_to_auctioneer(&payload, &mut trace, is_cancellations_enabled, floor_bid_value) .await? { // If the bid was succesfully saved then we gossip the header and payload to all other @@ -527,7 +487,6 @@ where builder_bid, is_cancellations_enabled, trace.receive, - &request_id, ) .await; } @@ -537,8 +496,7 @@ where // Log some final info trace.request_finish = get_nanos_timestamp()?; debug!( - request_id = %request_id, - trace = ?trace, + ?trace, request_duration_ns = trace.request_finish.saturating_sub(trace.receive), "submit_block request finished" ); @@ -550,64 +508,52 @@ where }; // Save submission to db. - tokio::spawn(async move { - if let Err(err) = api - .db - .store_block_submission(payload, Arc::new(trace), optimistic_version as i16) - .await - { - error!( - error = %err, - "failed to store block submission", - ) + tokio::spawn( + async move { + if let Err(err) = api + .db + .store_block_submission(payload, Arc::new(trace), optimistic_version as i16) + .await + { + error!(%err, "failed to store block submission") + } } - }); + .in_current_span(), + ); Ok(StatusCode::OK) } /// Handles the submission of a new payload header by performing various checks and /// verifications before saving the headre to the auctioneer. + #[tracing::instrument(skip_all, fields(id =% extract_request_id(&headers)))] pub async fn submit_header( Extension(api): Extension>>, headers: HeaderMap, req: Request, ) -> Result { - let request_id = extract_request_id(&headers); - let mut trace = HeaderSubmissionTrace { receive: get_nanos_timestamp()?, ..Default::default() }; let (head_slot, next_duty) = api.curr_slot_info.read().await.clone(); - debug!( - request_id = %request_id, - event = "submit_header", - head_slot = head_slot, - timestamp_request_start = trace.receive, - ); + debug!(head_slot, timestamp_request_start = trace.receive,); // Decode the incoming request body into a payload let (mut payload, is_cancellations_enabled) = - decode_header_submission(req, &mut trace, &request_id).await?; + decode_header_submission(req, &mut trace).await?; let block_hash = payload.block_hash().clone(); info!( - request_id = %request_id, - event = "submit_header", slot = payload.slot(), builder_pub_key = ?payload.builder_public_key(), block_value = %payload.value(), - block_hash = ?block_hash, + ?block_hash, "header submission decoded", ); // Verify the payload is for the current slot if payload.slot() <= head_slot { - debug!( - request_id = %request_id, - block_hash = ?block_hash, - "submission is for a past slot", - ); + debug!(?block_hash, "submission is for a past slot",); return Err(BuilderApiError::SubmissionForPastSlot { current_slot: head_slot, submission_slot: payload.slot(), @@ -616,23 +562,14 @@ where // Verify that we have a validator connected for this slot if next_duty.is_none() { - warn!( - request_id = %request_id, - block_hash = ?block_hash, - "could not find slot duty" - ); + warn!(?block_hash, "could not find slot duty"); return Err(BuilderApiError::ProposerDutyNotFound) } let next_duty = next_duty.unwrap(); // Fetch the next payload attributes and validate basic information let payload_attributes = api - .fetch_payload_attributes( - payload.slot(), - payload.parent_hash(), - &block_hash, - &request_id, - ) + .fetch_payload_attributes(payload.slot(), payload.parent_hash(), &block_hash) .await?; // Fetch builder info @@ -640,8 +577,8 @@ where // Submit header can only be processed optimistically. // Make sure that the builder has enough collateral to cover the submission. - if let Err(err) = api.check_builder_collateral(&payload, &builder_info, &request_id).await { - warn!(request_id = %request_id, error = %err, "builder has insufficient collateral"); + if let Err(err) = api.check_builder_collateral(&payload, &builder_info).await { + warn!(%err, "builder has insufficient collateral"); return Err(err) } @@ -652,7 +589,6 @@ where payload.slot(), payload.parent_hash(), payload.proposer_public_key(), - &request_id, ) .await { @@ -662,12 +598,7 @@ where // This mitigates the risk of someone sending an invalid payload // with a valid header, which would block subsequent submissions with the same // header and valid payload. - debug!( - request_id = %request_id, - block_hash = ?block_hash, - builder_pub_key = ?payload.builder_public_key(), - "block hash already seen" - ); + debug!(?block_hash, builder_pub_key = ?payload.builder_public_key(), "block hash already seen"); } _ => return Err(err), } @@ -675,7 +606,7 @@ where // Discard any OptimisticV2 submissions if the proposer has regional filtering enabled if next_duty.entry.preferences.filtering.is_regional() { - warn!(request_id = %request_id, "proposer has regional filtering, discarding optimistic v2 submission"); + warn!("proposer has regional filtering, discarding optimistic v2 submission"); return Err(BuilderApiError::V2SubmissionsInvalidIfProposerRequiresRegionalFiltering) } @@ -687,7 +618,7 @@ where &payload_attributes, &api.chain_info, ) { - warn!(request_id = %request_id, error = %err, "failed sanity check"); + warn!(%err, "failed sanity check"); return Err(err) } @@ -695,7 +626,6 @@ where if !api.check_if_trusted_builder(&next_duty, &builder_info).await { let proposer_trusted_builders = next_duty.entry.preferences.trusted_builders.unwrap(); debug!( - request_id = %request_id, builder_pub_key = ?payload.builder_public_key(), proposer_trusted_builders = ?proposer_trusted_builders, "builder not in proposer trusted builders list", @@ -709,7 +639,7 @@ where // Verify the payload signature if let Err(err) = payload.verify_signature(&api.chain_info.context) { - warn!(request_id = %request_id, error = %err, "failed to verify signature"); + warn!(%err, "failed to verify signature"); return Err(BuilderApiError::SignatureVerificationFailed) } trace.signature = get_nanos_timestamp()?; @@ -718,13 +648,13 @@ where match api.auctioneer.get_last_slot_delivered().await { Ok(Some(slot)) => { if payload.slot() <= slot { - debug!(request_id = %request_id, "payload already delivered"); + debug!("payload already delivered"); return Err(BuilderApiError::PayloadAlreadyDelivered) } } Ok(None) => {} Err(err) => { - error!(request_id = %request_id, error = %err, "failed to get last slot delivered"); + error!(%err, "failed to get last slot delivered"); } } @@ -739,7 +669,6 @@ where payload.builder_public_key(), payload.value(), is_cancellations_enabled, - &request_id, ) .await?; trace.floor_bid_checks = get_nanos_timestamp()?; @@ -751,7 +680,6 @@ where &mut trace, is_cancellations_enabled, floor_bid_value, - &request_id, ) .await? { @@ -761,7 +689,6 @@ where payload.bid_trace(), is_cancellations_enabled, trace.receive, - &request_id, ) .await; } @@ -771,8 +698,7 @@ where // Log some final info trace.request_finish = get_nanos_timestamp()?; info!( - request_id = %request_id, - trace = ?trace, + ?trace, request_duration_ns = trace.request_finish.saturating_sub(trace.receive), "submit_header request finished" ); @@ -784,22 +710,26 @@ where payload.builder_public_key(), payload.block_hash(), trace.receive / 1_000_000, // convert to ms - ).await + ) + .await .map_err(|err| { - error!(request_id = %request_id, error = %err, "failed to save pending block header"); + error!(%err, "failed to save pending block header"); BuilderApiError::AuctioneerError(err) })?; // Save submission to db let db = api.db.clone(); - tokio::spawn(async move { - if let Err(err) = db.store_header_submission(payload, Arc::new(trace)).await { - error!( - error = %err, - "failed to store header submission", - ) + tokio::spawn( + async move { + if let Err(err) = db.store_header_submission(payload, Arc::new(trace)).await { + error!( + %err, + "failed to store header submission", + ) + } } - }); + .in_current_span(), + ); Ok(StatusCode::OK) } @@ -814,33 +744,25 @@ where /// 5. Simulates the block to validate the payment. /// 6. Saves the bid to auctioneer and db. /// - /// Implements this API: TODO: point to gattaca spec. rename? + /// Implements this API: https://docs.titanrelay.xyz/builders/builder-integration#optimistic-v2 + #[tracing::instrument(skip_all, fields(id =% extract_request_id(&headers)))] pub async fn submit_block_v2( Extension(api): Extension>>, headers: HeaderMap, req: Request, ) -> Result { - let request_id = extract_request_id(&headers); - let now = SystemTime::now(); let mut trace = SubmissionTrace { receive: get_nanos_from(now)?, ..Default::default() }; let (head_slot, next_duty) = api.curr_slot_info.read().await.clone(); - debug!( - request_id = %request_id, - event = "submit_block_v2", - head_slot = head_slot, - timestamp_request_start = trace.receive, - ); + debug!(head_slot, timestamp_request_start = trace.receive); // Decode the incoming request body into a payload - let (payload, _) = decode_payload(req, &mut trace, &request_id).await?; + let (payload, _) = decode_payload(req, &mut trace).await?; let builder_pub_key = payload.builder_public_key().clone(); let block_hash = payload.message().block_hash.clone(); info!( - request_id = %request_id, - event = "submit_block_v2", slot = payload.slot(), builder_pub_key = ?builder_pub_key, block_value = %payload.value(), @@ -855,19 +777,16 @@ where payload.builder_public_key(), payload.block_hash(), trace.receive / 1_000_000, // convert to ms - ).await + ) + .await .map_err(|err| { - error!(request_id = %request_id, error = %err, "failed to save pending block header"); + error!(%err, "failed to save pending block header"); BuilderApiError::AuctioneerError(err) })?; // Verify the payload is for the current slot if payload.slot() <= head_slot { - debug!( - request_id = %request_id, - block_hash = ?block_hash, - "submission is for a past slot", - ); + debug!(?block_hash, "submission is for a past slot",); return Err(BuilderApiError::SubmissionForPastSlot { current_slot: head_slot, submission_slot: payload.slot(), @@ -878,23 +797,14 @@ where // Note: in `submit_block_v2` we have to do this check after decoding // so we can send a `PayloadReceived` message. if next_duty.is_none() { - warn!( - request_id = %request_id, - block_hash = ?block_hash, - "could not find slot duty" - ); + warn!(?block_hash, "could not find slot duty"); return Err(BuilderApiError::ProposerDutyNotFound) } let next_duty = next_duty.unwrap(); // Fetch the next payload attributes and validate basic information let payload_attributes = api - .fetch_payload_attributes( - payload.slot(), - payload.parent_hash(), - &block_hash, - &request_id, - ) + .fetch_payload_attributes(payload.slot(), payload.parent_hash(), &block_hash) .await?; // Fetch builder info @@ -902,14 +812,14 @@ where // submit_block_v2 can only be processed optimistically. // Make sure that the builder has enough collateral to cover the submission. - if let Err(err) = api.check_builder_collateral(&payload, &builder_info, &request_id).await { - warn!(request_id = %request_id, error = %err, "builder has insufficient collateral"); + if let Err(err) = api.check_builder_collateral(&payload, &builder_info).await { + warn!(%err, "builder has insufficient collateral"); return Err(err) } // Discard any OptimisticV2 submissions if the proposer has regional filtering enabled if next_duty.entry.preferences.filtering.is_regional() { - warn!(request_id = %request_id, "proposer has regional filtering enabled, discarding optimistic v2 submission"); + warn!("proposer has regional filtering enabled, discarding optimistic v2 submission"); return Err(BuilderApiError::V2SubmissionsInvalidIfProposerRequiresRegionalFiltering) } @@ -917,7 +827,6 @@ where if !api.check_if_trusted_builder(&next_duty, &builder_info).await { let proposer_trusted_builders = next_duty.entry.preferences.trusted_builders.unwrap(); warn!( - request_id = %request_id, builder_pub_key = ?payload.builder_public_key(), proposer_trusted_builders = ?proposer_trusted_builders, "builder not in proposer trusted builders list", @@ -931,18 +840,18 @@ where match api.auctioneer.get_last_slot_delivered().await { Ok(Some(slot)) => { if payload.slot() <= slot { - debug!(request_id = %request_id, "payload already delivered"); + debug!("payload already delivered"); return Err(BuilderApiError::PayloadAlreadyDelivered) } } Ok(None) => {} Err(err) => { - error!(request_id = %request_id, error = %err, "failed to get last slot delivered"); + error!(%err, "failed to get last slot delivered"); } } // Check for tx root against header received - if let Err(err) = api.check_tx_root_against_header(&payload, &request_id).await { + if let Err(err) = api.check_tx_root_against_header(&payload).await { match err { // Could have just received the payload before the header BuilderApiError::MissingTransactionsRoot => {} @@ -951,7 +860,7 @@ where return Err(BuilderApiError::AuctioneerError(err)) } _ => { - api.demote_builder(&builder_pub_key, &block_hash, &err, &request_id).await; + api.demote_builder(&builder_pub_key, &block_hash, &err).await; return Err(err) } } @@ -965,7 +874,7 @@ where &payload_attributes, &api.chain_info, ) { - warn!(request_id = %request_id, error = %err, "failed sanity check"); + warn!(%err, "failed sanity check"); return Err(err) } trace.pre_checks = get_nanos_timestamp()?; @@ -976,7 +885,6 @@ where next_duty, &builder_info, &mut trace, - &request_id, &payload_attributes, ) .await @@ -984,7 +892,7 @@ where Ok(val) => val, Err(err) => { // Any invalid submission for optimistic v2 results in a demotion. - api.demote_builder(&builder_pub_key, &block_hash, &err, &request_id).await; + api.demote_builder(&builder_pub_key, &block_hash, &err).await; return Err(err) } }; @@ -1000,36 +908,35 @@ where ) .await { - error!(request_id = %request_id, error = %err, "failed to save execution payload"); + error!(%err, "failed to save execution payload"); return Err(BuilderApiError::AuctioneerError(err)) } trace.auctioneer_update = get_nanos_timestamp()?; // Gossip to other relays - api.gossip_payload(&payload, payload.payload_and_blobs(), &request_id).await; + api.gossip_payload(&payload, payload.payload_and_blobs()).await; // Log some final info trace.request_finish = get_nanos_timestamp()?; debug!( - request_id = %request_id, - trace = ?trace, + ?trace, request_duration_ns = trace.request_finish.saturating_sub(trace.receive), "sumbit_block_v2 request finished" ); // Save submission to db - tokio::spawn(async move { - if let Err(err) = api - .db - .store_block_submission(payload, Arc::new(trace), OptimisticVersion::V2 as i16) - .await - { - error!( - error = %err, - "failed to store block submission", - ) + tokio::spawn( + async move { + if let Err(err) = api + .db + .store_block_submission(payload, Arc::new(trace), OptimisticVersion::V2 as i16) + .await + { + error!(%err, "failed to store block submission") + } } - }); + .in_current_span(), + ); Ok(StatusCode::OK) } @@ -1037,34 +944,29 @@ where /// Handles the cancellation of a bid for a builder. Builders currently cached bid in the /// auctioneer is deleted, the top bid is recalculated, and the cancellation is gossiped to /// all other relays. + #[tracing::instrument(skip_all, fields(id))] pub async fn cancel_bid( Extension(api): Extension>>, headers: HeaderMap, Json(mut signed_cancellation): Json, ) -> Result { let request_id = extract_request_id(&headers); + tracing::Span::current().record("id", &request_id.to_string()); let (head_slot, _next_duty) = api.curr_slot_info.read().await.clone(); let slot = signed_cancellation.message.slot; - info!( - request_id = %request_id, - event = "cancel_bid", - head_slot = head_slot, - ); + info!(head_slot, "processing cancellation"); // Verify the cancellation is for the current slot if slot <= head_slot { - debug!( - request_id = %request_id, - "cancellation is for a past slot", - ); + debug!("cancellation is for a past slot",); } // Verify the payload signature if let Err(err) = signed_cancellation.verify_signature(&api.chain_info.context) { - warn!(request_id = %request_id, error = %err, "failed to verify signature"); + warn!(%err, "failed to verify signature"); return Err(BuilderApiError::SignatureVerificationFailed) } @@ -1072,13 +974,13 @@ where match api.auctioneer.get_last_slot_delivered().await { Ok(Some(del_slot)) => { if slot <= del_slot { - debug!(request_id = %request_id, "payload already delivered"); + debug!("payload already delivered"); return Err(BuilderApiError::PayloadAlreadyDelivered) } } Ok(None) => {} Err(err) => { - error!(request_id = %request_id, error = %err, "failed to get last slot delivered"); + error!(%err, "failed to get last slot delivered"); } } @@ -1092,11 +994,7 @@ where ) .await { - error!( - request_id = %request_id, - error = %err, - "Failed processing cancellable bid below floor. Could not delete builder bid.", - ); + error!(%err, "Failed processing cancellable bid below floor. Could not delete builder bid."); return Err(BuilderApiError::InternalError) } @@ -1105,6 +1003,7 @@ where Ok(StatusCode::OK) } + #[tracing::instrument(skip_all)] pub async fn get_top_bid( Extension(api): Extension>>, headers: HeaderMap, @@ -1116,7 +1015,7 @@ where Ok(true) => {} Ok(false) => return Err(BuilderApiError::InvalidApiKey), Err(err) => { - error!(error = %err, "failed to check api key"); + error!(%err, "failed to check api key"); return Err(BuilderApiError::InternalError) } }, @@ -1135,13 +1034,9 @@ where S: BlockSimulator + 'static, G: GossipClientTrait + 'static, { + #[tracing::instrument(skip_all, fields(id = %Uuid::new_v4()))] pub async fn process_gossiped_header(&self, req: BroadcastHeaderParams) { - let request_id = Uuid::new_v4(); - debug!( - request_id = %request_id, - block_hash = ?req.signed_builder_bid.block_hash(), - "received gossiped header", - ); + debug!(block_hash = ?req.signed_builder_bid.block_hash(), "received gossiped header"); let mut trace = GossipedHeaderTrace { on_receive: req.on_receive, @@ -1152,10 +1047,7 @@ where // Verify that the gossiped header is not for a past slot let (head_slot, _) = self.curr_slot_info.read().await.clone(); if req.slot <= head_slot { - debug!( - request_id = %request_id, - "received gossiped header for a past slot", - ); + debug!("received gossiped header for a past slot"); return } @@ -1166,7 +1058,6 @@ where req.slot, &req.parent_hash, &req.proposer_pub_key, - &request_id, ) .await .is_err() @@ -1178,13 +1069,13 @@ where match self.auctioneer.get_last_slot_delivered().await { Ok(Some(slot)) => { if req.slot <= slot { - debug!(request_id = %request_id, "payload already delivered"); + debug!("payload already delivered"); return } } Ok(None) => {} Err(err) => { - error!(request_id = %request_id, error = %err, "failed to get last slot delivered"); + error!(%err, "failed to get last slot delivered"); } } @@ -1197,13 +1088,12 @@ where &req.builder_pub_key, req.signed_builder_bid.value(), req.is_cancellations_enabled, - &request_id, ) .await { Ok(floor_bid_value) => floor_bid_value, Err(err) => { - warn!(request_id = %request_id, error = %err, "bid is below floor"); + warn!(%err, "bid is below floor"); return } }; @@ -1224,13 +1114,13 @@ where ) .await { - warn!(request_id = %request_id, error = %err, "failed to save header bid"); + warn!(%err, "failed to save header bid"); return } trace.auctioneer_update = get_nanos_timestamp().unwrap_or_default(); - debug!(request_id = %request_id, "succesfully saved gossiped header"); + debug!("succesfully saved gossiped header"); // Save latency trace to db let db = self.db.clone(); @@ -1239,21 +1129,14 @@ where .save_gossiped_header_trace(req.bid_trace.block_hash.clone(), Arc::new(trace)) .await { - error!( - error = %err, - "failed to store gossiped header trace", - ) + error!(%err, "failed to store gossiped header trace") } }); } + #[tracing::instrument(skip_all, fields(id = %Uuid::new_v4()))] pub async fn process_gossiped_payload(&self, req: BroadcastPayloadParams) { - let request_id = Uuid::new_v4(); - debug!( - request_id = %request_id, - block_hash = ?req.execution_payload.execution_payload.block_hash(), - "received gossiped payload", - ); + debug!(block_hash = ?req.execution_payload.execution_payload.block_hash(), "received gossiped payload"); let mut trace = GossipedPayloadTrace { receive: get_nanos_timestamp().unwrap_or_default(), @@ -1271,16 +1154,13 @@ where ) .await { - error!(request_id = %request_id, error = %err, "failed to save pending block header"); + error!(%err, "failed to save pending block header"); } // Verify that the gossiped payload is not for a past slot let (head_slot, _) = self.curr_slot_info.read().await.clone(); if req.slot <= head_slot { - debug!( - request_id = %request_id, - "received gossiped payload for a past slot", - ); + debug!("received gossiped payload for a past slot"); return } @@ -1288,13 +1168,13 @@ where match self.auctioneer.get_last_slot_delivered().await { Ok(Some(slot)) => { if req.slot <= slot { - debug!(request_id = %request_id, "payload already delivered"); + debug!("payload already delivered"); return } } Ok(None) => {} Err(err) => { - error!(request_id = %request_id, error = %err, "failed to get last slot delivered"); + error!(%err, "failed to get last slot delivered"); } } @@ -1311,13 +1191,13 @@ where ) .await { - error!(request_id = %request_id, error = %err, "failed to save execution payload"); + error!(%err, "failed to save execution payload"); return } trace.auctioneer_update = get_nanos_timestamp().unwrap_or_default(); - debug!(request_id = %request_id, "succesfully saved gossiped payload"); + debug!("succesfully saved gossiped payload"); // Save gossiped payload trace to db let db = self.db.clone(); @@ -1329,21 +1209,16 @@ where ) .await { - error!( - error = %err, - "failed to store gossiped payload trace", - ) + error!(%err, "failed to store gossiped payload trace") } }); } /// Processes a gossiped cancellation message. No need to verify the signature as the message /// is gossiped internally and verification has been performed upstream. + #[tracing::instrument(skip_all, fields(id = %req.request_id))] pub async fn process_gossiped_cancellation(&self, req: BroadcastCancellationParams) { - debug!( - request_id = %req.request_id, - "received gossiped cancellation", - ); + debug!("received gossiped cancellation",); let (head_slot, _) = self.curr_slot_info.read().await.clone(); @@ -1351,22 +1226,19 @@ where // Verify the cancellation is for the current slot if slot <= head_slot { - warn!( - request_id = %req.request_id, - "cancellation is for a past slot", - ); + warn!("cancellation is for a past slot",); } // Verify payload has not already been delivered match self.auctioneer.get_last_slot_delivered().await { Ok(Some(del_slot)) => { if slot <= del_slot { - debug!(request_id = %req.request_id, "payload already delivered"); + debug!("payload already delivered"); } } Ok(None) => {} Err(err) => { - error!(request_id = %req.request_id, error = %err, "failed to get last slot delivered"); + error!(%err, "failed to get last slot delivered"); } } @@ -1380,11 +1252,7 @@ where ) .await { - error!( - request_id = %req.request_id, - error = %err, - "Failed processing cancellable bid below floor. Could not delete builder bid.", - ); + error!(%err, "Failed processing cancellable bid below floor. Could not delete builder bid."); } } @@ -1423,17 +1291,10 @@ where builder_bid: SignedBuilderBid, is_cancellations_enabled: bool, on_receive: u64, - request_id: &Uuid, ) { - self.gossip_header( - builder_bid, - payload.bid_trace(), - is_cancellations_enabled, - on_receive, - request_id, - ) - .await; - self.gossip_payload(payload, execution_payload, request_id).await; + self.gossip_header(builder_bid, payload.bid_trace(), is_cancellations_enabled, on_receive) + .await; + self.gossip_payload(payload, execution_payload).await; } async fn gossip_header( @@ -1442,7 +1303,6 @@ where bid_trace: &BidTrace, is_cancellations_enabled: bool, on_receive: u64, - request_id: &Uuid, ) { let params = BroadcastHeaderParams { signed_builder_bid: builder_bid, @@ -1455,7 +1315,7 @@ where on_receive, }; if let Err(err) = self.gossiper.broadcast_header(params).await { - error!(request_id = %request_id, error = %err, "failed to broadcast header"); + error!(%err, "failed to broadcast header"); } } @@ -1463,7 +1323,6 @@ where &self, payload: &SignedBidSubmission, execution_payload: PayloadAndBlobs, - request_id: &Uuid, ) { let params = BroadcastPayloadParams { execution_payload, @@ -1471,7 +1330,7 @@ where proposer_pub_key: payload.proposer_public_key().clone(), }; if let Err(err) = self.gossiper.broadcast_payload(params).await { - error!(request_id = %request_id, error = %err, "failed to broadcast payload"); + error!(%err, "failed to broadcast payload"); } } @@ -1507,12 +1366,11 @@ where next_duty: BuilderGetValidatorsResponseEntry, builder_info: &BuilderInfo, trace: &mut SubmissionTrace, - request_id: &Uuid, payload_attributes: &PayloadAttributesUpdate, ) -> Result<(Arc, bool), BuilderApiError> { // Verify the payload signature if let Err(err) = payload.verify_signature(&self.chain_info.context) { - warn!(request_id = %request_id, error = %err, "failed to verify signature"); + warn!(%err, "failed to verify signature"); return Err(BuilderApiError::SignatureVerificationFailed) } trace.signature = get_nanos_timestamp()?; @@ -1525,7 +1383,6 @@ where builder_info, trace, next_duty.entry, - request_id, payload_attributes, ) .await?; @@ -1543,7 +1400,6 @@ where slot: u64, parent_hash: &Hash32, proposer_public_key: &BlsPublicKey, - request_id: &Uuid, ) -> Result<(), BuilderApiError> { match self .auctioneer @@ -1552,11 +1408,11 @@ where { Ok(false) => Ok(()), Ok(true) => { - debug!(request_id = %request_id, block_hash = ?block_hash, "duplicate block hash"); + debug!(?block_hash, "duplicate block hash"); Err(BuilderApiError::DuplicateBlockHash { block_hash: block_hash.clone() }) } Err(err) => { - warn!(request_id = %request_id, err = %err, "failed to call seen_or_insert_block_hash"); + error!(%err, "failed to call seen_or_insert_block_hash"); Err(BuilderApiError::InternalError) } } @@ -1565,20 +1421,19 @@ where async fn check_tx_root_against_header( &self, payload: &SignedBidSubmission, - request_id: &Uuid, ) -> Result<(), BuilderApiError> { match self.auctioneer.get_header_tx_root(payload.block_hash()).await { Ok(Some(expected_tx_root)) => { let tx_root = match payload.transactions_root() { Some(tx_root) => tx_root, None => { - warn!(request_id = %request_id, "no tx root found in payload"); + warn!("no tx root found in payload"); return Err(BuilderApiError::MissingTransactions) } }; if expected_tx_root != tx_root { - warn!(request_id = %request_id, "tx root mismatch"); + warn!("tx root mismatch"); return Err(BuilderApiError::TransactionsRootMismatch { got: Hash32::try_from(tx_root.as_ref()).unwrap(), expected: Hash32::try_from(expected_tx_root.as_ref()).unwrap(), @@ -1586,11 +1441,11 @@ where } } Ok(None) => { - warn!(request_id = %request_id, "no tx root found for block hash"); + warn!("no tx root found for block hash"); return Err(BuilderApiError::MissingTransactionsRoot) } Err(err) => { - error!(request_id = %request_id, error = %err, "failed to get tx root"); + error!(%err, "failed to get tx root"); return Err(BuilderApiError::AuctioneerError(err)) } }; @@ -1612,14 +1467,13 @@ where builder_public_key: &BlsPublicKey, value: U256, is_cancellations_enabled: bool, - request_id: &Uuid, ) -> Result { let floor_bid_value = match self.auctioneer.get_floor_bid_value(slot, parent_hash, proposer_public_key).await { Ok(floor_value) => floor_value.unwrap_or(U256::ZERO), Err(err) => { - error!(request_id = %request_id, error = %err, "Failed to get floor bid value"); + error!(%err, "Failed to get floor bid value"); return Err(BuilderApiError::InternalError) } }; @@ -1627,7 +1481,7 @@ where // Ignore floor bid checks if this builder pubkey is part of the // `skip_floor_bid_builder_pubkeys` config. if self.relay_config.skip_floor_bid_builder_pubkeys.contains(builder_public_key) { - debug!(%request_id, ?builder_public_key, "skipping floor bid checks for submission"); + debug!(?builder_public_key, "skipping floor bid checks for submission"); return Ok(floor_bid_value) } @@ -1635,22 +1489,18 @@ where let is_bid_at_or_below_floor = value <= floor_bid_value; if is_cancellations_enabled && is_bid_below_floor { - debug!(request_id = %request_id, "submission below floor bid value, with cancellation"); + debug!("submission below floor bid value, with cancellation"); if let Err(err) = self .auctioneer .delete_builder_bid(slot, parent_hash, proposer_public_key, builder_public_key) .await { - error!( - request_id = %request_id, - error = %err, - "Failed processing cancellable bid below floor. Could not delete builder bid.", - ); + error!(%err, "Failed processing cancellable bid below floor. Could not delete builder bid."); return Err(BuilderApiError::InternalError) } return Err(BuilderApiError::BidBelowFloor) } else if !is_cancellations_enabled && is_bid_at_or_below_floor { - debug!(request_id = %request_id, "submission at or below floor bid value, without cancellation"); + debug!("submission at or below floor bid value, without cancellation"); return Err(BuilderApiError::BidBelowFloor) } Ok(floor_bid_value) @@ -1696,7 +1546,6 @@ where builder_info: &BuilderInfo, trace: &mut SubmissionTrace, registration_info: ValidatorRegistrationInfo, - request_id: &Uuid, payload_attributes: &PayloadAttributesUpdate, ) -> Result { let mut is_top_bid = false; @@ -1708,14 +1557,14 @@ where Ok(top_bid_value) => { let top_bid_value = top_bid_value.unwrap_or(U256::ZERO); is_top_bid = payload.value() > top_bid_value; - debug!(request_id = %request_id, top_bid_value = ?top_bid_value, new_bid_is_top_bid = is_top_bid); + debug!(?top_bid_value, new_bid_is_top_bid = is_top_bid); } Err(err) => { - error!(request_id = %request_id, error = %err, "failed to get top bid value from auctioneer"); + error!(%err, "failed to get top bid value from auctioneer"); } } - debug!(request_id = %request_id, timestamp_before_validation = get_nanos_timestamp()?); + debug!(timestamp_before_validation = get_nanos_timestamp()?); let sim_request = BlockSimRequest::new( registration_info.registration.message.gas_limit, @@ -1725,31 +1574,25 @@ where ); let result = self .simulator - .process_request( - sim_request, - builder_info, - is_top_bid, - self.db_sender.clone(), - *request_id, - ) + .process_request(sim_request, builder_info, is_top_bid, self.db_sender.clone()) .await; match result { Ok(sim_optimistic) => { - debug!(request_id = %request_id, "block simulation successful"); + debug!("block simulation successful"); trace.simulation = get_nanos_timestamp()?; - debug!(request_id = %request_id, sim_latency = trace.simulation.saturating_sub(trace.signature)); + debug!(sim_latency = trace.simulation.saturating_sub(trace.signature)); Ok(sim_optimistic) } Err(err) => match &err { BlockSimError::BlockValidationFailed(reason) => { - warn!(request_id = %request_id, error = %reason, "block validation failed"); + warn!(err = %reason, "block validation failed"); Err(BuilderApiError::BlockValidationError(err)) } _ => { - error!(request_id = %request_id, error = %err, "error simulating block"); + error!(%err, "error simulating block"); Err(BuilderApiError::InternalError) } }, @@ -1762,7 +1605,6 @@ where trace: &mut SubmissionTrace, is_cancellations_enabled: bool, floor_bid_value: U256, - request_id: &Uuid, ) -> Result, BuilderApiError> { let mut update_bid_result = SaveBidAndUpdateTopBidResponse::default(); @@ -1781,18 +1623,13 @@ where Ok(Some((builder_bid, execution_payload))) => { // Log the results of the bid submission trace.auctioneer_update = get_nanos_timestamp()?; - log_save_bid_info( - &update_bid_result, - trace.simulation, - trace.auctioneer_update, - request_id, - ); + log_save_bid_info(&update_bid_result, trace.simulation, trace.auctioneer_update); Ok(Some((builder_bid, execution_payload))) } Ok(None) => Ok(None), Err(err) => { - error!(request_id = %request_id, error = %err, "could not save bid and update top bids"); + error!(%err, "could not save bid and update top bids"); Err(BuilderApiError::AuctioneerError(err)) } } @@ -1805,14 +1642,13 @@ where proposer_pub_key: &BlsPublicKey, bid_block_hash: &Hash32, inclusion_proof: &InclusionProofs, - request_id: &Uuid, ) -> Result<(), BuilderApiError> { if let Err(err) = self .auctioneer .save_inclusion_proof(slot, proposer_pub_key, bid_block_hash, inclusion_proof) .await { - error!(request_id = %request_id, error = %err, "failed to save inclusion proof"); + error!(%err, "failed to save inclusion proof"); return Err(BuilderApiError::InternalError) } Ok(()) @@ -1824,7 +1660,6 @@ where trace: &mut HeaderSubmissionTrace, is_cancellations_enabled: bool, floor_bid_value: U256, - request_id: &Uuid, ) -> Result, BuilderApiError> { let mut update_bid_result = SaveBidAndUpdateTopBidResponse::default(); match self @@ -1846,14 +1681,13 @@ where &update_bid_result, trace.floor_bid_checks, trace.auctioneer_update, - request_id, ); Ok(Some(builder_bid)) } Ok(None) => Ok(None), Err(err) => { - error!(request_id = %request_id, error = %err, "could not save header submission and update top bid"); + error!(%err, "could not save header submission and update top bid"); Err(BuilderApiError::AuctioneerError(err)) } } @@ -1864,23 +1698,22 @@ where slot: u64, parent_hash: &Hash32, block_hash: &Hash32, - request_id: &Uuid, ) -> Result { let payload_attributes_key = get_payload_attributes_key(parent_hash, slot); let payload_attributes = self.payload_attributes.read().await.get(&payload_attributes_key).cloned().ok_or_else( || { - warn!( - request_id = %request_id, - block_hash = ?block_hash, - "payload attributes not yet known" - ); + warn!(?block_hash, "payload attributes not yet known"); BuilderApiError::PayloadAttributesNotYetKnown }, )?; if payload_attributes.slot != slot { - warn!(request_id = %request_id, "payload attributes slot mismatch with payload attributes"); + warn!( + got = slot, + expected = payload_attributes.slot, + "payload attributes slot mismatch with payload attributes" + ); return Err(BuilderApiError::PayloadSlotMismatchWithPayloadAttributes { got: slot, expected: payload_attributes.slot, @@ -1899,7 +1732,6 @@ where &self, payload: &impl BidSubmission, on_receive: u64, - request_id: &Uuid, ) -> Result<(), BuilderApiError> { match self .auctioneer @@ -1918,7 +1750,7 @@ where } Ok(None) => {} Err(err) => { - error!(request_id = %request_id, error = %err, "failed to get last slot delivered"); + error!(%err, "failed to get last slot delivered"); } } Ok(()) @@ -1933,11 +1765,9 @@ where &self, payload: &impl BidSubmission, builder_info: &BuilderInfo, - request_id: &Uuid, ) -> Result<(), BuilderApiError> { if !builder_info.is_optimistic { warn!( - request_id = %request_id, builder=%payload.builder_public_key(), "builder is not optimistic" ); @@ -1946,7 +1776,6 @@ where }) } else if builder_info.collateral < payload.value() { warn!( - request_id = %request_id, builder=?payload.builder_public_key(), collateral=%builder_info.collateral, collateral_required=%payload.value(), @@ -1981,36 +1810,18 @@ where async fn demote_builder( &self, - builder_pub_key: &BlsPublicKey, + builder: &BlsPublicKey, block_hash: &Hash32, err: &BuilderApiError, - request_id: &Uuid, ) { - error!( - request_id = %request_id, - error = %err, - builder_pub_key = ?builder_pub_key, - "verification failed for submit_block_v2. Demoting builder!", - ); + error!(%err, %builder, "verification failed for submit_block_v2. Demoting builder!"); - if let Err(err) = self.auctioneer.demote_builder(builder_pub_key).await { - error!( - builder=%builder_pub_key, - err=%err, - request_id=%request_id, - "Failed to demote builder in auctioneer" - ); + if let Err(err) = self.auctioneer.demote_builder(builder).await { + error!(%err, %builder, "failed to demote builder in auctioneer"); } - if let Err(err) = - self.db.db_demote_builder(builder_pub_key, block_hash, err.to_string()).await - { - error!( - builder=%builder_pub_key, - err=%err, - request_id=%request_id, - "Failed to demote builder in database" - ); + if let Err(err) = self.db.db_demote_builder(builder, block_hash, err.to_string()).await { + error!(%err, %builder, "Failed to demote builder in database"); } } @@ -2019,7 +1830,6 @@ where &self, payload: &SignedBidSubmission, constraints: Vec, - request_id: &Uuid, ) -> Result<(), BuilderApiError> { let transactions_root: B256 = payload .transactions() @@ -2036,9 +1846,9 @@ where let constraints_proofs: Vec<_> = constraints.iter().map(|c| &c.proof_data).collect(); verify_multiproofs(constraints_proofs.as_slice(), proofs, transactions_root).map_err( - |e| { - error!(error = %e, "failed to verify inclusion proofs"); - BuilderApiError::InclusionProofVerificationFailed(e) + |err| { + error!(%err, "failed to verify inclusion proofs"); + BuilderApiError::InclusionProofVerificationFailed(err) }, )?; @@ -2048,10 +1858,9 @@ where payload.proposer_public_key(), payload.block_hash(), proofs, - request_id, ) .await?; - info!(%request_id, "inclusion proofs verified and saved to auctioneer"); + info!("inclusion proofs verified and saved to auctioneer"); Ok(()) } } @@ -2107,7 +1916,7 @@ where match serde_json::to_vec(&response) { Ok(duty_bytes) => *self.proposer_duties_response.write().await = Some(duty_bytes), Err(err) => { - error!(error = %err, "failed to serialize proposer duties to JSON"); + error!(%err, "failed to serialize proposer duties to JSON"); *self.proposer_duties_response.write().await = None; } } @@ -2154,7 +1963,6 @@ where pub async fn decode_payload( req: Request, trace: &mut SubmissionTrace, - request_id: &Uuid, ) -> Result<(SignedBidSubmission, bool), BuilderApiError> { // Extract the query parameters let is_cancellations_enabled = req @@ -2213,7 +2021,7 @@ pub async fn decode_payload( Ok(payload) => payload, Err(err) => { // Fallback to JSON - warn!(request_id = %request_id, error = %err, "Failed to decode payload using SSZ; falling back to JSON"); + warn!(%err, "failed to decode payload using SSZ; falling back to JSON"); serde_json::from_slice(&body_bytes)? } } @@ -2223,7 +2031,6 @@ pub async fn decode_payload( trace.decode = get_nanos_timestamp()?; debug!( - request_id = %request_id, timestamp_after_decoding = trace.decode, decode_latency_ns = trace.decode.saturating_sub(trace.receive), builder_pub_key = ?payload.builder_public_key(), @@ -2325,7 +2132,6 @@ async fn push_top_bids(mut socket: WebSocket, auctionee pub async fn decode_header_submission( req: Request, trace: &mut HeaderSubmissionTrace, - request_id: &Uuid, ) -> Result<(SignedHeaderSubmission, bool), BuilderApiError> { // Extract the query parameters let is_cancellations_enabled = req @@ -2365,7 +2171,7 @@ pub async fn decode_header_submission( Ok(header) => header, Err(err) => { // Fallback to JSON - warn!(request_id = %request_id, error = %err, "Failed to decode header using SSZ; falling back to JSON"); + warn!(%err, "Failed to decode header using SSZ; falling back to JSON"); serde_json::from_slice(&body_bytes)? } } @@ -2375,8 +2181,7 @@ pub async fn decode_header_submission( trace.decode = get_nanos_timestamp()?; debug!( - request_id = %request_id, - timestamp_after_decoding = Instant::now().elapsed().as_nanos(), + timestamp_after_decoding = trace.decode, decode_latency_ns = trace.decode.saturating_sub(trace.receive), builder_pub_key = ?header.builder_public_key(), block_hash = ?header.block_hash(), @@ -2500,10 +2305,8 @@ fn log_save_bid_info( update_bid_result: &SaveBidAndUpdateTopBidResponse, bid_update_start: u64, bid_update_finish: u64, - request_id: &Uuid, ) { debug!( - request_id = %request_id, bid_update_latency = bid_update_finish.saturating_sub(bid_update_start), was_bid_saved_in = update_bid_result.was_bid_saved, was_top_bid_updated = update_bid_result.was_top_bid_updated, @@ -2515,7 +2318,7 @@ fn log_save_bid_info( ); if update_bid_result.was_bid_saved { - debug!(request_id = %request_id, eligible_at = bid_update_finish); + debug!(eligible_at = bid_update_finish); } } @@ -2594,8 +2397,6 @@ mod tests { HeaderValue, Uri, }; - use uuid::Uuid; - async fn build_test_request(payload: Vec, is_gzip: bool, is_ssz: bool) -> Request { let mut req = Request::new(Body::from(payload)); *req.uri_mut() = Uri::from_static("/some_path?cancellations=1"); @@ -2616,10 +2417,6 @@ mod tests { SubmissionTrace::default() } - async fn create_test_uuid() -> Uuid { - Uuid::new_v4() - } - #[tokio::test] async fn test_decode_json_payload() { let json_payload: Vec = vec![]; @@ -2882,9 +2679,8 @@ mod tests { let payload = vec![0u8; MAX_PAYLOAD_LENGTH + 1]; let req = build_test_request(payload, false, false).await; let mut trace = create_test_submission_trace().await; - let request_id = create_test_uuid().await; - let result = decode_payload(req, &mut trace, &request_id).await; + let result = decode_payload(req, &mut trace).await; match result { Ok(_) => panic!("Should have failed"), Err(err) => match err { diff --git a/crates/api/src/builder/simulator/mock_simulator.rs b/crates/api/src/builder/simulator/mock_simulator.rs index 16dcd32..b558302 100644 --- a/crates/api/src/builder/simulator/mock_simulator.rs +++ b/crates/api/src/builder/simulator/mock_simulator.rs @@ -2,7 +2,6 @@ use async_trait::async_trait; use tokio::sync::mpsc::Sender; use helix_common::{simulator::BlockSimError, BuilderInfo}; -use uuid::Uuid; use crate::builder::{traits::BlockSimulator, BlockSimRequest, DbInfo}; @@ -23,7 +22,6 @@ impl BlockSimulator for MockSimulator { _builder_info: &BuilderInfo, _is_top_bid: bool, _sim_result_saver_sender: Sender, - _request_id: Uuid, ) -> Result { Ok(true) } diff --git a/crates/api/src/builder/simulator/multi_simulator.rs b/crates/api/src/builder/simulator/multi_simulator.rs index f587e1d..b217ea7 100644 --- a/crates/api/src/builder/simulator/multi_simulator.rs +++ b/crates/api/src/builder/simulator/multi_simulator.rs @@ -5,7 +5,6 @@ use std::sync::{ Arc, }; use tokio::sync::mpsc::Sender; -use uuid::Uuid; use crate::builder::DbInfo; @@ -35,7 +34,6 @@ impl BlockSimulator for MultiSimulator { builder_info: &BuilderInfo, is_top_bid: bool, sim_result_saver_sender: Sender, - request_id: Uuid, ) -> Result { // Load balancing: round-robin selection let index = self @@ -48,8 +46,6 @@ impl BlockSimulator for MultiSimulator { let simulator = &self.simulators[index]; // Process the request with the selected simulator - simulator - .process_request(request, builder_info, is_top_bid, sim_result_saver_sender, request_id) - .await + simulator.process_request(request, builder_info, is_top_bid, sim_result_saver_sender).await } } diff --git a/crates/api/src/builder/simulator/optimistic_simulator.rs b/crates/api/src/builder/simulator/optimistic_simulator.rs index 2212303..27088f8 100644 --- a/crates/api/src/builder/simulator/optimistic_simulator.rs +++ b/crates/api/src/builder/simulator/optimistic_simulator.rs @@ -4,8 +4,7 @@ use async_trait::async_trait; use ethereum_consensus::primitives::{BlsPublicKey, Hash32}; use reqwest::Client; use tokio::sync::{mpsc::Sender, RwLock}; -use tracing::{debug, error, warn}; -use uuid::Uuid; +use tracing::{debug, error, warn, Instrument}; use helix_common::{metrics::SimulatorMetrics, simulator::BlockSimError, BuilderInfo}; use helix_database::DatabaseService; @@ -58,22 +57,14 @@ impl OptimisticSimulator is_top_bid: bool, sim_result_saver_sender: Sender, builder_info: BuilderInfo, - request_id: Uuid, ) -> Result<(), BlockSimError> { if let Err(err) = self .simulator - .process_request( - request.clone(), - &builder_info, - is_top_bid, - sim_result_saver_sender, - request_id, - ) + .process_request(request.clone(), &builder_info, is_top_bid, sim_result_saver_sender) .await { if builder_info.is_optimistic { warn!( - request_id=%request_id, builder=%request.message.builder_public_key, block_hash=%request.execution_payload.block_hash(), err=%err, @@ -156,37 +147,37 @@ impl BlockSimulator for OptimisticSimulator< builder_info: &BuilderInfo, is_top_bid: bool, sim_result_saver_sender: Sender, - request_id: Uuid, ) -> Result { if self.should_process_optimistically(&request, builder_info).await { SimulatorMetrics::sim_count(true); debug!( - request_id=%request_id, block_hash=%request.execution_payload.block_hash(), "optimistically processing request" ); let cloned_self = self.clone_for_async(); let builder_info = builder_info.clone(); - tokio::spawn(async move { - cloned_self - .handle_simulation( - request, - is_top_bid, - sim_result_saver_sender, - builder_info, - request_id, - ) - .await - }); + tokio::spawn( + async move { + cloned_self + .handle_simulation( + request, + is_top_bid, + sim_result_saver_sender, + builder_info, + ) + .await + } + .in_current_span(), + ); Ok(true) } else { SimulatorMetrics::sim_count(false); debug!( - request_id=%request_id, + block_hash=?request.execution_payload.block_hash(), block_parent_hash=?request.execution_payload.parent_hash(), block_number=%request.execution_payload.block_number(), @@ -198,7 +189,6 @@ impl BlockSimulator for OptimisticSimulator< is_top_bid, sim_result_saver_sender, builder_info.clone(), - request_id, ) .await .map(|_| false) diff --git a/crates/api/src/builder/simulator/optimistic_simulator_tests.rs b/crates/api/src/builder/simulator/optimistic_simulator_tests.rs index 7b8c736..fae91e2 100644 --- a/crates/api/src/builder/simulator/optimistic_simulator_tests.rs +++ b/crates/api/src/builder/simulator/optimistic_simulator_tests.rs @@ -24,7 +24,6 @@ mod simulator_tests { use reth_primitives::hex; use serde_json::json; use std::sync::{atomic::AtomicBool, Arc}; - use uuid::Uuid; // ++++ HELPERS ++++ fn get_optimistic_simulator( @@ -109,9 +108,8 @@ mod simulator_tests { builder_demoted.clone(), ); - let result = simulator - .process_request(get_sim_req(), &builder_info, true, sim_res_sender, Uuid::new_v4()) - .await; + let result = + simulator.process_request(get_sim_req(), &builder_info, true, sim_res_sender).await; // give the simulator time to process the request tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; @@ -140,9 +138,8 @@ mod simulator_tests { builder_demoted.clone(), ); - let result = simulator - .process_request(get_sim_req(), &builder_info, true, sim_res_sender, Uuid::new_v4()) - .await; + let result = + simulator.process_request(get_sim_req(), &builder_info, true, sim_res_sender).await; // give the simulator time to process the request tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; @@ -171,9 +168,8 @@ mod simulator_tests { builder_demoted.clone(), ); - let result = simulator - .process_request(get_sim_req(), &builder_info, true, sim_res_sender, Uuid::new_v4()) - .await; + let result = + simulator.process_request(get_sim_req(), &builder_info, true, sim_res_sender).await; // give the simulator time to process the request tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; @@ -202,9 +198,8 @@ mod simulator_tests { builder_demoted.clone(), ); - let result = simulator - .process_request(get_sim_req(), &builder_info, true, sim_res_sender, Uuid::new_v4()) - .await; + let result = + simulator.process_request(get_sim_req(), &builder_info, true, sim_res_sender).await; // give the simulator time to process the request tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; diff --git a/crates/api/src/builder/simulator/rpc_simulator.rs b/crates/api/src/builder/simulator/rpc_simulator.rs index 4fb27c4..2bf1425 100644 --- a/crates/api/src/builder/simulator/rpc_simulator.rs +++ b/crates/api/src/builder/simulator/rpc_simulator.rs @@ -9,7 +9,6 @@ use tokio::sync::mpsc::Sender; use tracing::{debug, error}; use helix_common::simulator::BlockSimError; -use uuid::Uuid; use crate::builder::{traits::BlockSimulator, BlockSimRequest, DbInfo}; @@ -94,14 +93,12 @@ impl BlockSimulator for RpcSimulator { _builder_info: &BuilderInfo, is_top_bid: bool, sim_result_saver_sender: Sender, - request_id: Uuid, ) -> Result { let timer = SimulatorMetrics::timer(); let block_hash = request.execution_payload.block_hash().clone(); debug!( - request_id = %request_id, - block_hash = %block_hash, + %block_hash, builder_pub_key = %request.message.builder_public_key, "RpcSimulator::process_request", ); @@ -124,7 +121,7 @@ impl BlockSimulator for RpcSimulator { } Err(err) => { timer.stop_and_discard(); - error!(request_id = %request_id, err = ?err, "Error sending RPC request"); + error!(?err, "Error sending RPC request"); SimulatorMetrics::sim_status(false); Err(BlockSimError::RpcError(err.to_string())) } diff --git a/crates/api/src/builder/simulator/simulator_tests.rs b/crates/api/src/builder/simulator/simulator_tests.rs index 653a0cb..0ef3ad7 100644 --- a/crates/api/src/builder/simulator/simulator_tests.rs +++ b/crates/api/src/builder/simulator/simulator_tests.rs @@ -16,7 +16,6 @@ use reqwest::Client; use reth_primitives::hex; use serde_json::json; use std::sync::Arc; -use uuid::Uuid; // ++++ HELPERS ++++ fn get_simulator(endpoint: &str) -> RpcSimulator { @@ -65,9 +64,8 @@ async fn test_process_request_ok() { let (sim_res_sender, mut sim_res_receiver) = tokio::sync::mpsc::channel(100); let simulator = get_simulator(&server.url()); let builder_info = BuilderInfo::default(); - let result = simulator - .process_request(get_sim_req(), &builder_info, true, sim_res_sender, Uuid::new_v4()) - .await; + let result = + simulator.process_request(get_sim_req(), &builder_info, true, sim_res_sender).await; mock.assert(); assert!(result.is_ok()); @@ -98,9 +96,8 @@ async fn test_process_request_error() { let (sim_res_sender, _sim_res_receiver) = tokio::sync::mpsc::channel(100); let simulator = get_simulator(&server.url()); let builder_info = BuilderInfo::default(); - let result = simulator - .process_request(get_sim_req(), &builder_info, true, sim_res_sender, Uuid::new_v4()) - .await; + let result = + simulator.process_request(get_sim_req(), &builder_info, true, sim_res_sender).await; mock.assert(); assert!(result.is_err()); @@ -130,9 +127,8 @@ async fn test_process_request_validation_failed() { let (sim_res_sender, _sim_res_receiver) = tokio::sync::mpsc::channel(100); let simulator = get_simulator(&server.url()); let builder_info = BuilderInfo::default(); - let result = simulator - .process_request(get_sim_req(), &builder_info, true, sim_res_sender, Uuid::new_v4()) - .await; + let result = + simulator.process_request(get_sim_req(), &builder_info, true, sim_res_sender).await; mock.assert(); assert!(result.is_err()); diff --git a/crates/api/src/builder/simulator/traits.rs b/crates/api/src/builder/simulator/traits.rs index a27b664..75a3d89 100644 --- a/crates/api/src/builder/simulator/traits.rs +++ b/crates/api/src/builder/simulator/traits.rs @@ -1,7 +1,6 @@ use async_trait::async_trait; use helix_common::{simulator::BlockSimError, BuilderInfo}; use tokio::sync::mpsc::Sender; -use uuid::Uuid; use crate::builder::{BlockSimRequest, DbInfo}; @@ -14,6 +13,5 @@ pub trait BlockSimulator: Send + Sync + Clone { builder_info: &BuilderInfo, is_top_bid: bool, sim_result_saver_sender: Sender, - request_id: Uuid, ) -> Result; } diff --git a/crates/api/src/builder/tests.rs b/crates/api/src/builder/tests.rs index d9f1d4e..8edae8f 100644 --- a/crates/api/src/builder/tests.rs +++ b/crates/api/src/builder/tests.rs @@ -465,10 +465,9 @@ async fn test_header_submission_decoding_json_capella() { load_bytes(current_dir.to_str().expect("Failed to convert path to string")); let mut header_submission_trace = HeaderSubmissionTrace::default(); - let uuid = uuid::Uuid::new_v4(); let request = generate_request(false, false, false, &req_payload_bytes); let decoded_submission = - decode_header_submission(request, &mut header_submission_trace, &uuid).await.unwrap(); + decode_header_submission(request, &mut header_submission_trace).await.unwrap(); assert_eq!(decoded_submission.0.slot(), 5552306); assert!(matches!( @@ -505,10 +504,10 @@ async fn test_header_submission_decoding_ssz_capella() { load_bytes(current_dir.to_str().expect("Failed to convert path to string")); let mut header_submission_trace = HeaderSubmissionTrace::default(); - let uuid = uuid::Uuid::new_v4(); + let request = generate_request(false, false, true, &req_payload_bytes); let decoded_submission = - decode_header_submission(request, &mut header_submission_trace, &uuid).await.unwrap(); + decode_header_submission(request, &mut header_submission_trace).await.unwrap(); assert!(matches!(decoded_submission.0, SignedHeaderSubmission::Capella(_))); assert!(decoded_submission.0.commitments().is_none()); @@ -529,10 +528,10 @@ async fn test_header_submission_decoding_ssz_deneb() { load_bytes(current_dir.to_str().expect("Failed to convert path to string")); let mut header_submission_trace = HeaderSubmissionTrace::default(); - let uuid = uuid::Uuid::new_v4(); + let request = generate_request(false, false, true, &req_payload_bytes); let decoded_submission = - decode_header_submission(request, &mut header_submission_trace, &uuid).await.unwrap(); + decode_header_submission(request, &mut header_submission_trace).await.unwrap(); assert!(matches!(decoded_submission.0, SignedHeaderSubmission::Deneb(_))); assert!(decoded_submission.0.commitments().is_some()); @@ -553,9 +552,8 @@ async fn test_signed_bid_submission_decoding_capella() { load_bytes(current_dir.to_str().expect("Failed to convert path to string")); let mut submission_trace = SubmissionTrace::default(); - let uuid = uuid::Uuid::new_v4(); let request = generate_request(false, false, false, &req_payload_bytes); - let decoded_submission = decode_payload(request, &mut submission_trace, &uuid).await.unwrap(); + let decoded_submission = decode_payload(request, &mut submission_trace).await.unwrap(); assert_eq!(decoded_submission.0.message().slot, 5552306); assert!(matches!(decoded_submission.0.execution_payload(), ExecutionPayload::Capella(_))); @@ -574,9 +572,8 @@ async fn test_signed_bid_submission_decoding_capella_gzip() { load_bytes(current_dir.to_str().expect("Failed to convert path to string")); let mut submission_trace = SubmissionTrace::default(); - let uuid = uuid::Uuid::new_v4(); let request = generate_request(false, true, false, &req_payload_bytes); - let decoded_submission = decode_payload(request, &mut submission_trace, &uuid).await.unwrap(); + let decoded_submission = decode_payload(request, &mut submission_trace).await.unwrap(); assert_eq!(decoded_submission.0.message().slot, 5552306); assert!(matches!(decoded_submission.0.execution_payload(), ExecutionPayload::Capella(_))); @@ -595,10 +592,8 @@ async fn test_signed_bid_submission_decoding_deneb() { load_bytes(current_dir.to_str().expect("Failed to convert path to string")); let mut submission_trace = SubmissionTrace::default(); - let uuid = uuid::Uuid::new_v4(); let request = generate_request(false, false, false, &req_payload_bytes); - let (decoded_submission, _) = - decode_payload(request, &mut submission_trace, &uuid).await.unwrap(); + let (decoded_submission, _) = decode_payload(request, &mut submission_trace).await.unwrap(); assert_eq!(decoded_submission.message().slot, 5552306); assert!(matches!(decoded_submission.execution_payload(), ExecutionPayload::Deneb(_))); From 4bcbbb22ab0272344c65a419f39513faa8b50fcc Mon Sep 17 00:00:00 2001 From: ltitanb Date: Fri, 6 Dec 2024 16:33:05 +0000 Subject: [PATCH 29/39] clippy --- crates/api/src/builder/api.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/api/src/builder/api.rs b/crates/api/src/builder/api.rs index 2558a52..5f28785 100644 --- a/crates/api/src/builder/api.rs +++ b/crates/api/src/builder/api.rs @@ -951,7 +951,7 @@ where Json(mut signed_cancellation): Json, ) -> Result { let request_id = extract_request_id(&headers); - tracing::Span::current().record("id", &request_id.to_string()); + tracing::Span::current().record("id", request_id.to_string()); let (head_slot, _next_duty) = api.curr_slot_info.read().await.clone(); From 7765caa0dd3ff86ffc9e43ebe450ba5288de5123 Mon Sep 17 00:00:00 2001 From: ltitanb Date: Fri, 6 Dec 2024 18:46:32 +0000 Subject: [PATCH 30/39] utc functions --- Cargo.lock | 1 + crates/api/src/builder/api.rs | 79 +++++++------------ crates/api/src/constraints/api.rs | 43 ++++------ crates/api/src/proposer/api.rs | 66 ++++++---------- crates/api/src/proposer/tests.rs | 13 ++- crates/common/src/validator.rs | 10 +-- crates/database/Cargo.toml | 1 + .../src/postgres/postgres_db_service.rs | 3 +- .../src/postgres/postgres_db_service_tests.rs | 11 +-- crates/housekeeper/src/chain_event_updater.rs | 21 ++--- crates/housekeeper/src/housekeeper.rs | 10 +-- crates/utils/src/lib.rs | 20 +++++ 12 files changed, 112 insertions(+), 166 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f7c50a5..fe7c62c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2830,6 +2830,7 @@ dependencies = [ "env_logger", "ethereum-consensus", "helix-common", + "helix-utils", "hex", "rand", "refinery", diff --git a/crates/api/src/builder/api.rs b/crates/api/src/builder/api.rs index 5f28785..d68ddfd 100644 --- a/crates/api/src/builder/api.rs +++ b/crates/api/src/builder/api.rs @@ -1,9 +1,4 @@ -use std::{ - collections::HashMap, - io::Read, - sync::Arc, - time::{Duration, SystemTime, UNIX_EPOCH}, -}; +use std::{collections::HashMap, io::Read, sync::Arc, time::Duration}; use axum::{ body::{to_bytes, Body}, @@ -62,7 +57,7 @@ use helix_common::{ use helix_database::DatabaseService; use helix_datastore::{types::SaveBidAndUpdateTopBidResponse, Auctioneer}; use helix_housekeeper::{ChainUpdate, PayloadAttributesUpdate, SlotUpdate}; -use helix_utils::{extract_request_id, get_payload_attributes_key, has_reached_fork}; +use helix_utils::{extract_request_id, get_payload_attributes_key, has_reached_fork, utcnow_ns}; use serde::Deserialize; @@ -307,7 +302,7 @@ where headers: HeaderMap, req: Request, ) -> Result { - let mut trace = SubmissionTrace { receive: get_nanos_timestamp()?, ..Default::default() }; + let mut trace = SubmissionTrace { receive: utcnow_ns(), ..Default::default() }; let (head_slot, next_duty) = api.curr_slot_info.read().await.clone(); debug!(head_slot, timestamp_request_start = trace.receive); @@ -383,7 +378,7 @@ where is_cancellations_enabled, ) .await?; - trace.floor_bid_checks = get_nanos_timestamp()?; + trace.floor_bid_checks = utcnow_ns(); // Fetch builder info let builder_info = api.fetch_builder_info(payload.builder_public_key()).await; @@ -426,7 +421,7 @@ where warn!(%err, "failed sanity check"); return Err(err) } - trace.pre_checks = get_nanos_timestamp()?; + trace.pre_checks = utcnow_ns(); let (payload, was_simulated_optimistically) = api .verify_submitted_block( @@ -494,7 +489,7 @@ where } // Log some final info - trace.request_finish = get_nanos_timestamp()?; + trace.request_finish = utcnow_ns(); debug!( ?trace, request_duration_ns = trace.request_finish.saturating_sub(trace.receive), @@ -532,8 +527,7 @@ where headers: HeaderMap, req: Request, ) -> Result { - let mut trace = - HeaderSubmissionTrace { receive: get_nanos_timestamp()?, ..Default::default() }; + let mut trace = HeaderSubmissionTrace { receive: utcnow_ns(), ..Default::default() }; let (head_slot, next_duty) = api.curr_slot_info.read().await.clone(); debug!(head_slot, timestamp_request_start = trace.receive,); @@ -635,14 +629,14 @@ where }) } - trace.pre_checks = get_nanos_timestamp()?; + trace.pre_checks = utcnow_ns(); // Verify the payload signature if let Err(err) = payload.verify_signature(&api.chain_info.context) { warn!(%err, "failed to verify signature"); return Err(BuilderApiError::SignatureVerificationFailed) } - trace.signature = get_nanos_timestamp()?; + trace.signature = utcnow_ns(); // Verify payload has not already been delivered match api.auctioneer.get_last_slot_delivered().await { @@ -671,7 +665,7 @@ where is_cancellations_enabled, ) .await?; - trace.floor_bid_checks = get_nanos_timestamp()?; + trace.floor_bid_checks = utcnow_ns(); // Save bid to auctioneer match api @@ -696,7 +690,7 @@ where } // Log some final info - trace.request_finish = get_nanos_timestamp()?; + trace.request_finish = utcnow_ns(); info!( ?trace, request_duration_ns = trace.request_finish.saturating_sub(trace.receive), @@ -751,8 +745,7 @@ where headers: HeaderMap, req: Request, ) -> Result { - let now = SystemTime::now(); - let mut trace = SubmissionTrace { receive: get_nanos_from(now)?, ..Default::default() }; + let mut trace = SubmissionTrace { receive: utcnow_ns(), ..Default::default() }; let (head_slot, next_duty) = api.curr_slot_info.read().await.clone(); debug!(head_slot, timestamp_request_start = trace.receive); @@ -877,7 +870,7 @@ where warn!(%err, "failed sanity check"); return Err(err) } - trace.pre_checks = get_nanos_timestamp()?; + trace.pre_checks = utcnow_ns(); let (payload, _) = match api .verify_submitted_block( @@ -911,13 +904,13 @@ where error!(%err, "failed to save execution payload"); return Err(BuilderApiError::AuctioneerError(err)) } - trace.auctioneer_update = get_nanos_timestamp()?; + trace.auctioneer_update = utcnow_ns(); // Gossip to other relays api.gossip_payload(&payload, payload.payload_and_blobs()).await; // Log some final info - trace.request_finish = get_nanos_timestamp()?; + trace.request_finish = utcnow_ns(); debug!( ?trace, request_duration_ns = trace.request_finish.saturating_sub(trace.receive), @@ -1040,7 +1033,7 @@ where let mut trace = GossipedHeaderTrace { on_receive: req.on_receive, - on_gossip_receive: get_nanos_timestamp().unwrap_or_default(), + on_gossip_receive: utcnow_ns(), ..Default::default() }; @@ -1098,7 +1091,7 @@ where } }; - trace.pre_checks = get_nanos_timestamp().unwrap_or_default(); + trace.pre_checks = utcnow_ns(); // Save header to auctioneer let mut update_bid_result = SaveBidAndUpdateTopBidResponse::default(); @@ -1118,7 +1111,7 @@ where return } - trace.auctioneer_update = get_nanos_timestamp().unwrap_or_default(); + trace.auctioneer_update = utcnow_ns(); debug!("succesfully saved gossiped header"); @@ -1138,10 +1131,7 @@ where pub async fn process_gossiped_payload(&self, req: BroadcastPayloadParams) { debug!(block_hash = ?req.execution_payload.execution_payload.block_hash(), "received gossiped payload"); - let mut trace = GossipedPayloadTrace { - receive: get_nanos_timestamp().unwrap_or_default(), - ..Default::default() - }; + let mut trace = GossipedPayloadTrace { receive: utcnow_ns(), ..Default::default() }; // Save gossiped payload to auctioneer in case it was sent to diffent region than the header if let Err(err) = self @@ -1178,7 +1168,7 @@ where } } - trace.pre_checks = get_nanos_timestamp().unwrap_or_default(); + trace.pre_checks = utcnow_ns(); // Save payload to auctioneer if let Err(err) = self @@ -1195,7 +1185,7 @@ where return } - trace.auctioneer_update = get_nanos_timestamp().unwrap_or_default(); + trace.auctioneer_update = utcnow_ns(); debug!("succesfully saved gossiped payload"); @@ -1373,7 +1363,7 @@ where warn!(%err, "failed to verify signature"); return Err(BuilderApiError::SignatureVerificationFailed) } - trace.signature = get_nanos_timestamp()?; + trace.signature = utcnow_ns(); // Simulate the submission let payload = Arc::new(payload); @@ -1564,7 +1554,7 @@ where } } - debug!(timestamp_before_validation = get_nanos_timestamp()?); + debug!(timestamp_before_validation = utcnow_ns()); let sim_request = BlockSimRequest::new( registration_info.registration.message.gas_limit, @@ -1581,7 +1571,7 @@ where Ok(sim_optimistic) => { debug!("block simulation successful"); - trace.simulation = get_nanos_timestamp()?; + trace.simulation = utcnow_ns(); debug!(sim_latency = trace.simulation.saturating_sub(trace.signature)); Ok(sim_optimistic) @@ -1622,7 +1612,7 @@ where { Ok(Some((builder_bid, execution_payload))) => { // Log the results of the bid submission - trace.auctioneer_update = get_nanos_timestamp()?; + trace.auctioneer_update = utcnow_ns(); log_save_bid_info(&update_bid_result, trace.simulation, trace.auctioneer_update); Ok(Some((builder_bid, execution_payload))) @@ -1676,7 +1666,7 @@ where { Ok(Some(builder_bid)) => { // Log the results of the bid submission - trace.auctioneer_update = get_nanos_timestamp()?; + trace.auctioneer_update = utcnow_ns(); log_save_bid_info( &update_bid_result, trace.floor_bid_checks, @@ -2029,7 +2019,7 @@ pub async fn decode_payload( serde_json::from_slice(&body_bytes)? }; - trace.decode = get_nanos_timestamp()?; + trace.decode = utcnow_ns(); debug!( timestamp_after_decoding = trace.decode, decode_latency_ns = trace.decode.saturating_sub(trace.receive), @@ -2179,7 +2169,7 @@ pub async fn decode_header_submission( serde_json::from_slice(&body_bytes)? }; - trace.decode = get_nanos_timestamp()?; + trace.decode = utcnow_ns(); debug!( timestamp_after_decoding = trace.decode, decode_latency_ns = trace.decode.saturating_sub(trace.receive), @@ -2375,19 +2365,6 @@ async fn process_db_additions( } } -fn get_nanos_timestamp() -> Result { - SystemTime::now() - .duration_since(UNIX_EPOCH) - .map(|d| d.as_nanos() as u64) - .map_err(|_| BuilderApiError::InternalError) -} - -fn get_nanos_from(now: SystemTime) -> Result { - now.duration_since(UNIX_EPOCH) - .map(|d| d.as_nanos() as u64) - .map_err(|_| BuilderApiError::InternalError) -} - #[cfg(test)] mod tests { use super::*; diff --git a/crates/api/src/constraints/api.rs b/crates/api/src/constraints/api.rs index 16426aa..ecf516a 100644 --- a/crates/api/src/constraints/api.rs +++ b/crates/api/src/constraints/api.rs @@ -16,12 +16,11 @@ use helix_common::{ }; use helix_datastore::Auctioneer; use helix_housekeeper::{ChainUpdate, SlotUpdate}; -use helix_utils::signing::{verify_signed_message, COMMIT_BOOST_DOMAIN}; -use std::{ - collections::HashSet, - sync::Arc, - time::{SystemTime, UNIX_EPOCH}, +use helix_utils::{ + signing::{verify_signed_message, COMMIT_BOOST_DOMAIN}, + utcnow_ns, }; +use std::{self, collections::HashSet, sync::Arc}; use tokio::{ sync::{ broadcast, @@ -109,8 +108,7 @@ where req: Request, ) -> Result { let request_id = Uuid::new_v4(); - let mut trace = - ConstraintSubmissionTrace { receive: get_nanos_timestamp()?, ..Default::default() }; + let mut trace = ConstraintSubmissionTrace { receive: utcnow_ns(), ..Default::default() }; // Decode the incoming request body into a payload. let signed_constraints = @@ -214,7 +212,7 @@ where } // Log some final info - trace.request_finish = get_nanos_timestamp()?; + trace.request_finish = utcnow_ns(); trace!( request_id = %request_id, trace = ?trace, @@ -233,8 +231,7 @@ where req: Request, ) -> Result { let request_id = Uuid::new_v4(); - let mut trace = - ConstraintSubmissionTrace { receive: get_nanos_timestamp()?, ..Default::default() }; + let mut trace = ConstraintSubmissionTrace { receive: utcnow_ns(), ..Default::default() }; info!( request_id = %request_id, @@ -264,7 +261,7 @@ where return Err(ConstraintsApiError::InvalidDelegation) } }; - trace.decode = get_nanos_timestamp()?; + trace.decode = utcnow_ns(); for delegation in &signed_delegations { if let Err(e) = verify_signed_message( @@ -278,7 +275,7 @@ where return Err(ConstraintsApiError::InvalidSignature) }; } - trace.verify_signature = get_nanos_timestamp()?; + trace.verify_signature = utcnow_ns(); // Store the delegation in the database tokio::spawn(async move { @@ -288,7 +285,7 @@ where }); // Log some final info - trace.request_finish = get_nanos_timestamp()?; + trace.request_finish = utcnow_ns(); trace!( request_id = %request_id, trace = ?trace, @@ -307,8 +304,7 @@ where req: Request, ) -> Result { let request_id = Uuid::new_v4(); - let mut trace = - ConstraintSubmissionTrace { receive: get_nanos_timestamp()?, ..Default::default() }; + let mut trace = ConstraintSubmissionTrace { receive: utcnow_ns(), ..Default::default() }; info!( request_id = %request_id, @@ -338,7 +334,7 @@ where return Err(ConstraintsApiError::InvalidRevocation) } }; - trace.decode = get_nanos_timestamp()?; + trace.decode = utcnow_ns(); for revocation in &signed_revocations { if let Err(e) = verify_signed_message( @@ -352,7 +348,7 @@ where return Err(ConstraintsApiError::InvalidSignature) }; } - trace.verify_signature = get_nanos_timestamp()?; + trace.verify_signature = utcnow_ns(); // Store the delegation in the database tokio::spawn(async move { @@ -363,7 +359,7 @@ where }); // Log some final info - trace.request_finish = get_nanos_timestamp()?; + trace.request_finish = utcnow_ns(); info!( request_id = %request_id, trace = ?trace, @@ -395,7 +391,7 @@ where .await { Ok(()) => { - trace.auctioneer_update = get_nanos_timestamp()?; + trace.auctioneer_update = utcnow_ns(); info!( request_id = %request_id, timestamp_after_auctioneer = Instant::now().elapsed().as_nanos(), @@ -517,7 +513,7 @@ pub async fn decode_constraints_submission( serde_json::from_slice(&body_bytes)? }; - trace.decode = get_nanos_timestamp()?; + trace.decode = utcnow_ns(); info!( request_id = %request_id, timestamp_after_decoding = Instant::now().elapsed().as_nanos(), @@ -527,10 +523,3 @@ pub async fn decode_constraints_submission( Ok(constraints.to_vec()) } - -fn get_nanos_timestamp() -> Result { - SystemTime::now() - .duration_since(UNIX_EPOCH) - .map(|d| d.as_nanos() as u64) - .map_err(|_| ConstraintsApiError::InternalError) -} diff --git a/crates/api/src/proposer/api.rs b/crates/api/src/proposer/api.rs index 43c5893..fc191ad 100644 --- a/crates/api/src/proposer/api.rs +++ b/crates/api/src/proposer/api.rs @@ -1,7 +1,4 @@ -use std::{ - sync::Arc, - time::{Duration, SystemTime, UNIX_EPOCH}, -}; +use std::{sync::Arc, time::Duration}; use axum::{ body::{to_bytes, Body}, @@ -51,6 +48,7 @@ use helix_housekeeper::{ChainUpdate, SlotUpdate}; use helix_utils::{ extract_request_id, signing::{verify_signed_builder_message, verify_signed_consensus_message}, + utcnow_ms, utcnow_ns, }; use crate::{ @@ -172,8 +170,7 @@ where let request_id = extract_request_id(&headers); - let mut trace = - RegisterValidatorsTrace { receive: get_nanos_timestamp()?, ..Default::default() }; + let mut trace = RegisterValidatorsTrace { receive: utcnow_ns(), ..Default::default() }; // Get optional api key from headers let api_key = headers.get("x-api-key").and_then(|key| key.to_str().ok()); @@ -355,7 +352,7 @@ where } }); - trace.registrations_complete = get_nanos_timestamp()?; + trace.registrations_complete = utcnow_ns(); info!( request_id = %request_id, @@ -388,7 +385,7 @@ where let request_id = extract_request_id(&headers); - let mut trace = GetHeaderTrace { receive: get_nanos_timestamp()?, ..Default::default() }; + let mut trace = GetHeaderTrace { receive: utcnow_ns(), ..Default::default() }; let (head_slot, duty) = proposer_api.curr_slot_info.read().await.clone(); debug!( @@ -426,7 +423,7 @@ where return Err(err) } }; - trace.validation_complete = get_nanos_timestamp()?; + trace.validation_complete = utcnow_ns(); let user_agent = headers.get("user-agent").and_then(|v| v.to_str().ok()).map(|v| v.to_string()); @@ -436,7 +433,7 @@ where .auctioneer .get_best_bid(bid_request.slot, &bid_request.parent_hash, &bid_request.public_key) .await; - trace.best_bid_fetched = get_nanos_timestamp()?; + trace.best_bid_fetched = utcnow_ns(); info!(request_id = %request_id, trace = ?trace, "best bid fetched"); match get_best_bid_res { @@ -498,7 +495,7 @@ where Path(GetHeaderParams { slot, parent_hash, public_key }): Path, ) -> Result { let request_id = Uuid::new_v4(); - let mut trace = GetHeaderTrace { receive: get_nanos_timestamp()?, ..Default::default() }; + let mut trace = GetHeaderTrace { receive: utcnow_ns(), ..Default::default() }; let (head_slot, _) = *proposer_api.curr_slot_info.read().await; debug!( @@ -526,14 +523,14 @@ where warn!(request_id = %request_id, err = %err, "invalid bid request time"); return Err(err) } - trace.validation_complete = get_nanos_timestamp()?; + trace.validation_complete = utcnow_ns(); // Get best bid from auctioneer let get_best_bid_res = proposer_api .auctioneer .get_best_bid(bid_request.slot, &bid_request.parent_hash, &bid_request.public_key) .await; - trace.best_bid_fetched = get_nanos_timestamp()?; + trace.best_bid_fetched = utcnow_ns(); info!(request_id = %request_id, trace = ?trace, "best bid fetched"); let user_agent = @@ -650,7 +647,7 @@ where ) -> Result { let request_id = extract_request_id(&headers); - let mut trace = GetPayloadTrace { receive: get_nanos_timestamp()?, ..Default::default() }; + let mut trace = GetPayloadTrace { receive: utcnow_ns(), ..Default::default() }; let user_agent = headers.get("user-agent").and_then(|v| v.to_str().ok()).map(|v| v.to_string()); @@ -747,7 +744,7 @@ where warn!(request_id = %request_id, error = %err, "invalid proposal coordinate"); return Err(err) } - trace.proposer_index_validated = get_nanos_timestamp()?; + trace.proposer_index_validated = utcnow_ns(); let proposer_public_key = slot_duty.entry.registration.message.public_key; if let Err(err) = self.verify_signed_blinded_block_signature( @@ -759,7 +756,7 @@ where warn!(request_id = %request_id, error = %err, "invalid signature"); return Err(ProposerApiError::InvalidSignature(err)) } - trace.signature_validated = get_nanos_timestamp()?; + trace.signature_validated = utcnow_ns(); // Get execution payload from auctioneer let payload_result = self @@ -786,7 +783,7 @@ where } }; info!(request_id = %request_id, "found payload for blinded signed block"); - trace.payload_fetched = get_nanos_timestamp()?; + trace.payload_fetched = utcnow_ns(); // Check if get_payload has already been called if let Err(err) = self @@ -849,7 +846,7 @@ where return Err(err) } - trace.validation_complete = get_nanos_timestamp()?; + trace.validation_complete = utcnow_ns(); let unblinded_payload = match unblind_beacon_block(&signed_blinded_block, &versioned_payload) { @@ -899,7 +896,7 @@ where error!(request_id = %request_id_clone, error = %err, "error publishing block"); }; - trace_clone.beacon_client_broadcast = get_nanos_timestamp().unwrap_or_default(); + trace_clone.beacon_client_broadcast = utcnow_ns(); // Broadcast payload to all broadcasters self_clone.broadcast_signed_block( @@ -907,10 +904,10 @@ where Some(BroadcastValidation::Gossip), &request_id_clone, ); - trace_clone.broadcaster_block_broadcast = get_nanos_timestamp().unwrap_or_default(); + trace_clone.broadcaster_block_broadcast = utcnow_ns(); // While we wait for the block to propagate, we also store the payload information - trace_clone.on_deliver_payload = get_nanos_timestamp().unwrap_or_default(); + trace_clone.on_deliver_payload = utcnow_ns(); self_clone .save_delivered_payload_info( payload_clone, @@ -940,7 +937,7 @@ where // `TARGET_GET_PAYLOAD_PROPAGATION_DURATION_MS` to allow the block to // propagate through the network. let elapsed_since_propagate_start_ms = - (get_nanos_timestamp()?.saturating_sub(trace.beacon_client_broadcast)) / 1_000_000; + (utcnow_ns().saturating_sub(trace.beacon_client_broadcast)) / 1_000_000; let remaining_sleep_ms = self .relay_config .target_get_payload_propagation_duration_ms @@ -963,7 +960,7 @@ where }; // Return response - info!(request_id = %request_id, trace = ?trace, timestamp = get_nanos_timestamp()?, "delivering payload"); + info!(request_id = %request_id, trace = ?trace, timestamp = utcnow_ns(), "delivering payload"); Ok(get_payload_response) } } @@ -1032,7 +1029,7 @@ where /// /// Returns how many ms we are into the slot if ok. fn validate_bid_request_time(&self, bid_request: &BidRequest) -> Result { - let curr_timestamp_ms = get_millis_timestamp()? as i64; + let curr_timestamp_ms = utcnow_ms() as i64; let slot_start_timestamp = self.chain_info.genesis_time_in_secs + (bid_request.slot * self.chain_info.seconds_per_slot); let ms_into_slot = curr_timestamp_ms.saturating_sub((slot_start_timestamp * 1000) as i64); @@ -1276,10 +1273,7 @@ where if let GossipedMessage::GetPayload(payload) = msg { let api_clone = self.clone(); tokio::spawn(async move { - let mut trace = GetPayloadTrace { - receive: get_nanos_timestamp().unwrap_or_default(), - ..Default::default() - }; + let mut trace = GetPayloadTrace { receive: utcnow_ns(), ..Default::default() }; debug!(request_id = %payload.request_id, "processing gossiped payload"); match api_clone ._get_payload( @@ -1320,7 +1314,7 @@ where let mut last_error: Option = None; let mut first_try = true; // Try at least once to cover case where get_payload is called too late. - while first_try || get_millis_timestamp()? < slot_cutoff_millis { + while first_try || utcnow_ms() < slot_cutoff_millis { match self.auctioneer.get_execution_payload(slot, pub_key, block_hash).await { Ok(Some(versioned_payload)) => return Ok(versioned_payload), Ok(None) => { @@ -1511,20 +1505,6 @@ fn calculate_slot_time_info( (ms_into_slot, duration_until_slot_start) } -pub fn get_nanos_timestamp() -> Result { - SystemTime::now() - .duration_since(UNIX_EPOCH) - .map(|d| d.as_nanos() as u64) - .map_err(|_| ProposerApiError::InternalServerError) -} - -fn get_millis_timestamp() -> Result { - SystemTime::now() - .duration_since(UNIX_EPOCH) - .map(|d| d.as_millis() as u64) - .map_err(|_| ProposerApiError::InternalServerError) -} - fn get_consensus_version(block: &SignedBeaconBlock) -> ethereum_consensus::Fork { match block { SignedBeaconBlock::Phase0(_) => ethereum_consensus::Fork::Phase0, diff --git a/crates/api/src/proposer/tests.rs b/crates/api/src/proposer/tests.rs index 677d099..e401628 100644 --- a/crates/api/src/proposer/tests.rs +++ b/crates/api/src/proposer/tests.rs @@ -4,8 +4,8 @@ use ethereum_consensus::{ signing::compute_signing_root, }; use helix_common::chain_info::ChainInfo; +use helix_utils::utcnow_sec; use rand::thread_rng; -use std::time::{SystemTime, UNIX_EPOCH}; pub fn gen_signed_vr() -> SignedValidatorRegistration { let mut rng = thread_rng(); @@ -15,7 +15,7 @@ pub fn gen_signed_vr() -> SignedValidatorRegistration { let mut vr = ValidatorRegistration { fee_recipient: Default::default(), gas_limit: 0, - timestamp: SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs(), + timestamp: utcnow_sec(), public_key: pk, }; @@ -33,10 +33,7 @@ mod proposer_api_tests { // +++ IMPORTS +++ use crate::{ gossiper::{mock_gossiper::MockGossiper, types::GossipedMessage}, - proposer::{ - api::{get_nanos_timestamp, ProposerApi}, - PATH_GET_PAYLOAD, PATH_PROPOSER_API, - }, + proposer::{api::ProposerApi, PATH_GET_PAYLOAD, PATH_PROPOSER_API}, test_utils::proposer_api_app, }; @@ -69,7 +66,7 @@ mod proposer_api_tests { use helix_database::MockDatabaseService; use helix_datastore::MockAuctioneer; use helix_housekeeper::{ChainUpdate, PayloadAttributesUpdate, SlotUpdate}; - use helix_utils::signing::verify_signed_consensus_message; + use helix_utils::{signing::verify_signed_consensus_message, utcnow_ns}; use serial_test::serial; use std::{sync::Arc, time::Duration}; use tokio::{ @@ -237,7 +234,7 @@ mod proposer_api_tests { fn calculate_current_slot() -> u64 { let genesis_time_in_secs: u64 = ChainInfo::for_mainnet().genesis_time_in_secs; let seconds_per_slot: u64 = ChainInfo::for_mainnet().seconds_per_slot; - let request_time_in_ns = get_nanos_timestamp().unwrap(); + let request_time_in_ns = utcnow_ns(); let current_time_in_secs = request_time_in_ns / 1_000_000_000; let time_since_genesis = current_time_in_secs.saturating_sub(genesis_time_in_secs); diff --git a/crates/common/src/validator.rs b/crates/common/src/validator.rs index bb37d74..b48cc32 100644 --- a/crates/common/src/validator.rs +++ b/crates/common/src/validator.rs @@ -1,10 +1,9 @@ -use std::time::{SystemTime, UNIX_EPOCH}; - use ethereum_consensus::{ phase0::Validator, primitives::{BlsPublicKey, Gwei, ValidatorIndex}, serde::as_str, }; +use helix_utils::utcnow_ms; use serde::{Deserialize, Serialize}; use crate::api::proposer_api::ValidatorRegistrationInfo; @@ -51,12 +50,7 @@ impl SignedValidatorRegistrationEntry { pool_name: Option, user_agent: Option, ) -> Self { - Self { - registration_info, - inserted_at: SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_millis() as u64, - pool_name, - user_agent, - } + Self { registration_info, inserted_at: utcnow_ms(), pool_name, user_agent } } pub fn public_key(&self) -> &BlsPublicKey { diff --git a/crates/database/Cargo.toml b/crates/database/Cargo.toml index a7e20fa..86e7cd4 100644 --- a/crates/database/Cargo.toml +++ b/crates/database/Cargo.toml @@ -8,6 +8,7 @@ license.workspace = true [dependencies] helix-common.workspace = true +helix-utils.workspace = true # Async and Networking async-trait.workspace = true diff --git a/crates/database/src/postgres/postgres_db_service.rs b/crates/database/src/postgres/postgres_db_service.rs index a64dd63..b28debb 100644 --- a/crates/database/src/postgres/postgres_db_service.rs +++ b/crates/database/src/postgres/postgres_db_service.rs @@ -26,6 +26,7 @@ use helix_common::{ GossipedPayloadTrace, HeaderSubmissionTrace, ProposerInfo, RelayConfig, SignedValidatorRegistrationEntry, SubmissionTrace, ValidatorPreferences, ValidatorSummary, }; +use helix_utils::utcnow_ms; use tokio_postgres::{types::ToSql, NoTls}; use tracing::{error, info}; @@ -1330,7 +1331,7 @@ impl DatabaseService for PostgresDatabaseService { ) .await?; - let timestamp = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_millis() as u64; + let timestamp = utcnow_ms(); transaction .execute( " diff --git a/crates/database/src/postgres/postgres_db_service_tests.rs b/crates/database/src/postgres/postgres_db_service_tests.rs index 57204bc..bd96d9b 100644 --- a/crates/database/src/postgres/postgres_db_service_tests.rs +++ b/crates/database/src/postgres/postgres_db_service_tests.rs @@ -18,14 +18,9 @@ mod tests { versioned_payload::PayloadAndBlobs, Filtering, GetPayloadTrace, HeaderSubmissionTrace, SubmissionTrace, ValidatorSummary, }; + use helix_utils::utcnow_sec; use rand::{seq::SliceRandom, thread_rng, Rng}; - use std::{ - default::Default, - ops::DerefMut, - str::FromStr, - sync::Arc, - time::{Duration, SystemTime, UNIX_EPOCH}, - }; + use std::{default::Default, ops::DerefMut, str::FromStr, sync::Arc, time::Duration}; use tokio::time::sleep; use deadpool_postgres::{Config, ManagerConfig, Pool, RecyclingMethod}; @@ -93,7 +88,7 @@ mod tests { fn get_randomized_signed_validator_registration() -> ValidatorRegistrationInfo { let mut rng = rand::thread_rng(); - let timestamp = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs(); + let timestamp = utcnow_sec(); let gas_limit = 0; let key = SecretKey::random(&mut rng).unwrap(); let signature = key.sign("message".as_bytes()); diff --git a/crates/housekeeper/src/chain_event_updater.rs b/crates/housekeeper/src/chain_event_updater.rs index 8e1b29c..0321c46 100644 --- a/crates/housekeeper/src/chain_event_updater.rs +++ b/crates/housekeeper/src/chain_event_updater.rs @@ -1,8 +1,4 @@ -use std::{ - collections::HashMap, - sync::Arc, - time::{Duration, SystemTime, UNIX_EPOCH}, -}; +use std::{collections::HashMap, sync::Arc, time::Duration}; use ethereum_consensus::{ configs::goerli::CAPELLA_FORK_EPOCH, deneb::Withdrawal, primitives::Bytes32, @@ -20,7 +16,7 @@ use helix_common::{ chain_info::ChainInfo, }; use helix_database::DatabaseService; -use helix_utils::{get_payload_attributes_key, has_reached_fork}; +use helix_utils::{get_payload_attributes_key, has_reached_fork, utcnow_sec}; // Do not accept slots more than 60 seconds in the future const MAX_DISTANCE_FOR_FUTURE_SLOT: u64 = 60; @@ -154,13 +150,12 @@ impl ChainEventUpdater { info!(head_slot = slot, "Processing slot",); // Validate this isn't a faulty head slot - if let Ok(current_timestamp) = SystemTime::now().duration_since(UNIX_EPOCH) { - let slot_timestamp = - self.chain_info.genesis_time_in_secs + (slot * self.chain_info.seconds_per_slot); - if slot_timestamp > current_timestamp.as_secs() + MAX_DISTANCE_FOR_FUTURE_SLOT { - warn!(head_slot = slot, "slot is too far in the future",); - return - } + + let slot_timestamp = + self.chain_info.genesis_time_in_secs + (slot * self.chain_info.seconds_per_slot); + if slot_timestamp > utcnow_sec() + MAX_DISTANCE_FOR_FUTURE_SLOT { + warn!(head_slot = slot, "slot is too far in the future",); + return } // Log any missed slots diff --git a/crates/housekeeper/src/housekeeper.rs b/crates/housekeeper/src/housekeeper.rs index 23f0948..a598971 100644 --- a/crates/housekeeper/src/housekeeper.rs +++ b/crates/housekeeper/src/housekeeper.rs @@ -1,8 +1,4 @@ -use std::{ - collections::HashMap, - sync::Arc, - time::{Duration, SystemTime}, -}; +use std::{collections::HashMap, sync::Arc, time::Duration}; use ethereum_consensus::primitives::BlsPublicKey; use ethers::{ @@ -11,6 +7,7 @@ use ethers::{ providers::{Http, Provider}, types::U256, }; +use helix_utils::utcnow_ms; use reth_primitives::{constants::EPOCH_SLOTS, revm_primitives::HashSet}; use std::convert::TryFrom; use tokio::{ @@ -360,8 +357,7 @@ impl /// /// DB entries are also removed if they have been waiting for over 45 seconds. async fn demote_builders_with_expired_pending_blocks(&self) -> Result<(), HousekeeperError> { - let current_time = - SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_millis() as u64; + let current_time = utcnow_ms(); let mut demoted_builders = HashSet::new(); diff --git a/crates/utils/src/lib.rs b/crates/utils/src/lib.rs index c050178..8961f0c 100644 --- a/crates/utils/src/lib.rs +++ b/crates/utils/src/lib.rs @@ -4,6 +4,7 @@ use std::{ io::Write, panic, path::Path, + time::{SystemTime, UNIX_EPOCH}, }; use ::serde::de; @@ -134,3 +135,22 @@ pub fn extract_request_id(headers: &HeaderMap) -> Uuid { .and_then(|v| Uuid::parse_str(v).ok()) .unwrap_or(Uuid::new_v4()) } + +//// TIME //// + +/// Seconds +pub fn utcnow_sec() -> u64 { + SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs() +} +/// Millis +pub fn utcnow_ms() -> u64 { + SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_millis() as u64 +} +/// Micros +pub fn utcnow_us() -> u64 { + SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_micros() as u64 +} +/// Nanos +pub fn utcnow_ns() -> u64 { + SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_nanos() as u64 +} From f5e222ee177f689f89ddd72151312b229bf3b534 Mon Sep 17 00:00:00 2001 From: ltitanb Date: Fri, 6 Dec 2024 19:11:18 +0000 Subject: [PATCH 31/39] clippy --- crates/utils/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/utils/src/lib.rs b/crates/utils/src/lib.rs index 8961f0c..40b5af6 100644 --- a/crates/utils/src/lib.rs +++ b/crates/utils/src/lib.rs @@ -136,7 +136,7 @@ pub fn extract_request_id(headers: &HeaderMap) -> Uuid { .unwrap_or(Uuid::new_v4()) } -//// TIME //// +////// TIME ////// /// Seconds pub fn utcnow_sec() -> u64 { From f26f2577cbeeedb546997c924a244c0ecaa83df7 Mon Sep 17 00:00:00 2001 From: ltitanb Date: Fri, 6 Dec 2024 19:36:01 +0000 Subject: [PATCH 32/39] copy traces --- crates/api/src/builder/api.rs | 17 +++++++---------- crates/api/src/builder/types.rs | 8 ++++---- crates/api/src/proposer/api.rs | 7 +++---- crates/common/src/traces/builder_api_trace.rs | 10 ++++++---- .../common/src/traces/constraint_api_trace.rs | 4 +++- crates/common/src/traces/proposer_api.rs | 8 +++++--- crates/database/src/mock_database_service.rs | 8 ++++---- .../src/postgres/postgres_db_service.rs | 8 ++++---- .../src/postgres/postgres_db_service_tests.rs | 4 ++-- crates/database/src/traits.rs | 8 ++++---- 10 files changed, 42 insertions(+), 40 deletions(-) diff --git a/crates/api/src/builder/api.rs b/crates/api/src/builder/api.rs index 5f28785..4520496 100644 --- a/crates/api/src/builder/api.rs +++ b/crates/api/src/builder/api.rs @@ -510,10 +510,8 @@ where // Save submission to db. tokio::spawn( async move { - if let Err(err) = api - .db - .store_block_submission(payload, Arc::new(trace), optimistic_version as i16) - .await + if let Err(err) = + api.db.store_block_submission(payload, trace, optimistic_version as i16).await { error!(%err, "failed to store block submission") } @@ -721,7 +719,7 @@ where let db = api.db.clone(); tokio::spawn( async move { - if let Err(err) = db.store_header_submission(payload, Arc::new(trace)).await { + if let Err(err) = db.store_header_submission(payload, trace).await { error!( %err, "failed to store header submission", @@ -929,7 +927,7 @@ where async move { if let Err(err) = api .db - .store_block_submission(payload, Arc::new(trace), OptimisticVersion::V2 as i16) + .store_block_submission(payload, trace, OptimisticVersion::V2 as i16) .await { error!(%err, "failed to store block submission") @@ -1125,9 +1123,8 @@ where // Save latency trace to db let db = self.db.clone(); tokio::spawn(async move { - if let Err(err) = db - .save_gossiped_header_trace(req.bid_trace.block_hash.clone(), Arc::new(trace)) - .await + if let Err(err) = + db.save_gossiped_header_trace(req.bid_trace.block_hash.clone(), trace).await { error!(%err, "failed to store gossiped header trace") } @@ -1205,7 +1202,7 @@ where if let Err(err) = db .save_gossiped_payload_trace( req.execution_payload.execution_payload.block_hash().clone(), - Arc::new(trace), + trace, ) .await { diff --git a/crates/api/src/builder/types.rs b/crates/api/src/builder/types.rs index 5bdcd37..5fd9fb9 100644 --- a/crates/api/src/builder/types.rs +++ b/crates/api/src/builder/types.rs @@ -9,10 +9,10 @@ use helix_common::{ #[derive(Clone)] pub enum DbInfo { - NewSubmission(Arc, Arc, OptimisticVersion), - NewHeaderSubmission(Arc, Arc), - GossipedHeader { block_hash: ByteVector<32>, trace: Arc }, - GossipedPayload { block_hash: ByteVector<32>, trace: Arc }, + NewSubmission(Arc, SubmissionTrace, OptimisticVersion), + NewHeaderSubmission(Arc, HeaderSubmissionTrace), + GossipedHeader { block_hash: ByteVector<32>, trace: GossipedHeaderTrace }, + GossipedPayload { block_hash: ByteVector<32>, trace: GossipedPayloadTrace }, SimulationResult { block_hash: ByteVector<32>, block_sim_result: Result<(), BlockSimError> }, } diff --git a/crates/api/src/proposer/api.rs b/crates/api/src/proposer/api.rs index 43c5893..244a134 100644 --- a/crates/api/src/proposer/api.rs +++ b/crates/api/src/proposer/api.rs @@ -883,7 +883,7 @@ where let self_clone = self.clone(); let unblinded_payload_clone = unblinded_payload.clone(); let request_id_clone = *request_id; - let mut trace_clone = trace.clone(); + let mut trace_clone = *trace; let payload_clone = payload.clone(); tokio::spawn(async move { @@ -916,7 +916,7 @@ where payload_clone, &signed_blinded_block, &proposer_public_key, - &trace_clone, + trace_clone, &request_id_clone, user_agent, ) @@ -1369,7 +1369,7 @@ where payload: Arc, signed_blinded_block: &SignedBlindedBeaconBlock, proposer_public_key: &BlsPublicKey, - trace: &GetPayloadTrace, + trace: GetPayloadTrace, request_id: &Uuid, user_agent: Option, ) { @@ -1394,7 +1394,6 @@ where }; let db = self.db.clone(); - let trace = trace.clone(); let request_id = *request_id; tokio::spawn(async move { if let Err(err) = diff --git a/crates/common/src/traces/builder_api_trace.rs b/crates/common/src/traces/builder_api_trace.rs index 123d891..7419a02 100644 --- a/crates/common/src/traces/builder_api_trace.rs +++ b/crates/common/src/traces/builder_api_trace.rs @@ -1,4 +1,6 @@ -#[derive(Clone, Default, Debug, serde::Serialize, serde::Deserialize)] +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Copy, Default, Serialize, Deserialize)] pub struct SubmissionTrace { pub receive: u64, pub decode: u64, @@ -10,7 +12,7 @@ pub struct SubmissionTrace { pub request_finish: u64, } -#[derive(Clone, Default, Debug, serde::Serialize, serde::Deserialize)] +#[derive(Debug, Clone, Copy, Default, Serialize, Deserialize)] pub struct HeaderSubmissionTrace { pub receive: u64, pub decode: u64, @@ -21,14 +23,14 @@ pub struct HeaderSubmissionTrace { pub request_finish: u64, } -#[derive(Clone, Default, Debug, serde::Serialize, serde::Deserialize)] +#[derive(Debug, Clone, Copy, Default, Serialize, Deserialize)] pub struct GossipedPayloadTrace { pub receive: u64, pub pre_checks: u64, pub auctioneer_update: u64, } -#[derive(Clone, Default, Debug, serde::Serialize, serde::Deserialize)] +#[derive(Debug, Clone, Copy, Default, Serialize, Deserialize)] pub struct GossipedHeaderTrace { pub on_receive: u64, pub on_gossip_receive: u64, diff --git a/crates/common/src/traces/constraint_api_trace.rs b/crates/common/src/traces/constraint_api_trace.rs index aa08b5e..5e0b9d8 100644 --- a/crates/common/src/traces/constraint_api_trace.rs +++ b/crates/common/src/traces/constraint_api_trace.rs @@ -1,4 +1,6 @@ -#[derive(Clone, Default, Debug, serde::Serialize, serde::Deserialize)] +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Copy, Default, Serialize, Deserialize)] pub struct ConstraintSubmissionTrace { pub receive: u64, pub decode: u64, diff --git a/crates/common/src/traces/proposer_api.rs b/crates/common/src/traces/proposer_api.rs index 32f25c1..9f21ac9 100644 --- a/crates/common/src/traces/proposer_api.rs +++ b/crates/common/src/traces/proposer_api.rs @@ -1,17 +1,19 @@ -#[derive(Debug, Default)] +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Copy, Default)] pub struct RegisterValidatorsTrace { pub receive: u64, pub registrations_complete: u64, } -#[derive(Debug, Default, Clone)] +#[derive(Debug, Clone, Copy, Default)] pub struct GetHeaderTrace { pub receive: u64, pub validation_complete: u64, pub best_bid_fetched: u64, } -#[derive(Debug, Default, serde::Serialize, serde::Deserialize, Clone)] +#[derive(Debug, Clone, Copy, Default, Serialize, Deserialize)] pub struct GetPayloadTrace { pub receive: u64, pub proposer_index_validated: u64, diff --git a/crates/database/src/mock_database_service.rs b/crates/database/src/mock_database_service.rs index 6630622..e15b3d4 100644 --- a/crates/database/src/mock_database_service.rs +++ b/crates/database/src/mock_database_service.rs @@ -150,7 +150,7 @@ impl DatabaseService for MockDatabaseService { async fn store_block_submission( &self, _submission: Arc, - _trace: Arc, + _trace: SubmissionTrace, _optimistic_version: i16, ) -> Result<(), DatabaseError> { Ok(()) @@ -252,7 +252,7 @@ impl DatabaseService for MockDatabaseService { async fn store_header_submission( &self, _submission: Arc, - _trace: Arc, + _trace: HeaderSubmissionTrace, ) -> Result<(), DatabaseError> { Ok(()) } @@ -260,7 +260,7 @@ impl DatabaseService for MockDatabaseService { async fn save_gossiped_header_trace( &self, _block_hash: ByteVector<32>, - _trace: Arc, + _trace: GossipedHeaderTrace, ) -> Result<(), DatabaseError> { Ok(()) } @@ -268,7 +268,7 @@ impl DatabaseService for MockDatabaseService { async fn save_gossiped_payload_trace( &self, _block_hash: ByteVector<32>, - _trace: Arc, + _trace: GossipedPayloadTrace, ) -> Result<(), DatabaseError> { Ok(()) } diff --git a/crates/database/src/postgres/postgres_db_service.rs b/crates/database/src/postgres/postgres_db_service.rs index a64dd63..8cb8151 100644 --- a/crates/database/src/postgres/postgres_db_service.rs +++ b/crates/database/src/postgres/postgres_db_service.rs @@ -1134,7 +1134,7 @@ impl DatabaseService for PostgresDatabaseService { async fn store_block_submission( &self, submission: Arc, - trace: Arc, + trace: SubmissionTrace, optimistic_version: i16, ) -> Result<(), DatabaseError> { let mut record = DbMetricRecord::new("store_block_submission"); @@ -1717,7 +1717,7 @@ impl DatabaseService for PostgresDatabaseService { async fn store_header_submission( &self, submission: Arc, - trace: Arc, + trace: HeaderSubmissionTrace, ) -> Result<(), DatabaseError> { let mut record = DbMetricRecord::new("store_header_submission"); @@ -1780,7 +1780,7 @@ impl DatabaseService for PostgresDatabaseService { async fn save_gossiped_header_trace( &self, block_hash: ByteVector<32>, - trace: Arc, + trace: GossipedHeaderTrace, ) -> Result<(), DatabaseError> { let mut record = DbMetricRecord::new("save_gossiped_header_trace"); @@ -1810,7 +1810,7 @@ impl DatabaseService for PostgresDatabaseService { async fn save_gossiped_payload_trace( &self, block_hash: ByteVector<32>, - trace: Arc, + trace: GossipedPayloadTrace, ) -> Result<(), DatabaseError> { let mut record = DbMetricRecord::new("save_gossiped_payload_trace"); diff --git a/crates/database/src/postgres/postgres_db_service_tests.rs b/crates/database/src/postgres/postgres_db_service_tests.rs index 57204bc..d39298b 100644 --- a/crates/database/src/postgres/postgres_db_service_tests.rs +++ b/crates/database/src/postgres/postgres_db_service_tests.rs @@ -492,7 +492,7 @@ mod tests { }; db_service - .store_block_submission(Arc::new(signed_bid_submission), Arc::new(submission_trace), 0) + .store_block_submission(Arc::new(signed_bid_submission), submission_trace, 0) .await?; Ok(()) } @@ -691,7 +691,7 @@ mod tests { db_service .store_header_submission( Arc::new(signed_bid_submission), - Arc::new(HeaderSubmissionTrace::default()), + HeaderSubmissionTrace::default(), ) .await?; Ok(()) diff --git a/crates/database/src/traits.rs b/crates/database/src/traits.rs index c9db776..06690a0 100644 --- a/crates/database/src/traits.rs +++ b/crates/database/src/traits.rs @@ -105,7 +105,7 @@ pub trait DatabaseService: Send + Sync + Clone { async fn store_block_submission( &self, submission: Arc, - trace: Arc, + trace: SubmissionTrace, optimistic_version: i16, ) -> Result<(), DatabaseError>; @@ -175,19 +175,19 @@ pub trait DatabaseService: Send + Sync + Clone { async fn store_header_submission( &self, submission: Arc, - trace: Arc, + trace: HeaderSubmissionTrace, ) -> Result<(), DatabaseError>; async fn save_gossiped_header_trace( &self, block_hash: ByteVector<32>, - trace: Arc, + trace: GossipedHeaderTrace, ) -> Result<(), DatabaseError>; async fn save_gossiped_payload_trace( &self, block_hash: ByteVector<32>, - trace: Arc, + trace: GossipedPayloadTrace, ) -> Result<(), DatabaseError>; async fn get_trusted_proposers(&self) -> Result, DatabaseError>; From dd02d6e48b08af78e8ca38fb8dec794f2f8748f0 Mon Sep 17 00:00:00 2001 From: ltitanb Date: Fri, 6 Dec 2024 20:21:12 +0000 Subject: [PATCH 33/39] add license --- LICENSE-APACHE | 201 +++++++++++++++++++++++++++++++++++++++++++++++++ LICENSE-MIT | 21 ++++++ 2 files changed, 222 insertions(+) create mode 100644 LICENSE-APACHE create mode 100644 LICENSE-MIT diff --git a/LICENSE-APACHE b/LICENSE-APACHE new file mode 100644 index 0000000..c7b48f0 --- /dev/null +++ b/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2024 Gattaca + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/LICENSE-MIT b/LICENSE-MIT new file mode 100644 index 0000000..f71715f --- /dev/null +++ b/LICENSE-MIT @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 Gattaca + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file From e75401e0b86374306fc8567bfe102b8eb20a6da8 Mon Sep 17 00:00:00 2001 From: ltitanb <163874448+ltitanb@users.noreply.github.com> Date: Sat, 7 Dec 2024 13:27:36 +0000 Subject: [PATCH 34/39] Create SECURITY.md (#64) --- SECURITY.md | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 SECURITY.md diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..a92cee3 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,39 @@ +# Security Policy + +## Reporting a Vulnerability + +If you believe you have found a security vulnerability, please report it to us responsibly at security@gattaca.com + + +**Please DO NOT report security vulnerabilities through public GitHub issues.** + +In your report, please include the following information: +- Description of the vulnerability +- Steps to reproduce the issue +- Potential impact +- Any possible mitigations + + +## Bug Bounty Program + +We maintain a bug bounty program to reward security researchers who help us identify and fix vulnerabilities. + + +### Scope + +In scope in `main` branch: +- Main application code +- API endpoints +- Authentication systems + + +### Rewards + +Bounties are awarded based on severity, impact and likelihood: + +| Severity | Maximum | Example | +|----------|------------:|----------------------------------------------------------------------------------| +| Low | $1,000 USD | A bug that causes the relay to stop serving headers | +| Medium | $5,000 USD | A bug that causes the relay to go offline | +| High | $15,000 USD | A bug that causes proposers to miss several slots | +| Critical | $25,000 USD | A bug that causes an untrusted proposer to access an invalid unblinded payload | From d187ea4a04d37438ff6c75f592a3aed7770727cc Mon Sep 17 00:00:00 2001 From: ltitanb Date: Mon, 9 Dec 2024 11:26:50 +0000 Subject: [PATCH 35/39] fix path --- crates/api/src/middleware/metrics.rs | 18 ++++++++++++++++-- .../rate_limiting/rate_limit_by_ip.rs | 7 +++++-- crates/common/src/api/mod.rs | 8 ++++---- 3 files changed, 25 insertions(+), 8 deletions(-) diff --git a/crates/api/src/middleware/metrics.rs b/crates/api/src/middleware/metrics.rs index de2c0aa..1d32256 100644 --- a/crates/api/src/middleware/metrics.rs +++ b/crates/api/src/middleware/metrics.rs @@ -5,12 +5,23 @@ use axum::{ middleware::Next, response::{IntoResponse, Response}, }; -use helix_common::metrics::ApiMetrics; +use helix_common::{ + api::{PATH_BUILDER_API, PATH_CONSTRAINTS_API, PATH_DATA_API, PATH_PROPOSER_API}, + metrics::ApiMetrics, +}; use crate::builder::api::MAX_PAYLOAD_LENGTH; +use super::rate_limiting::rate_limit_by_ip::replace_dynamic_routes; + pub async fn metrics_middleware(req: Request, next: Next) -> Response { - let endpoint = req.uri().path().to_string(); + let endpoint = req.uri().path(); + + if !SUPPORTED_PATHS.iter().any(|path| endpoint.starts_with(path)) { + return next.run(req).await; + } + + let endpoint = replace_dynamic_routes(endpoint).to_string(); ApiMetrics::count(&endpoint); let _timer = ApiMetrics::timer(&endpoint); @@ -32,3 +43,6 @@ pub async fn metrics_middleware(req: Request, next: Next) -> Response { response } + +const SUPPORTED_PATHS: [&str; 4] = + [PATH_BUILDER_API, PATH_PROPOSER_API, PATH_DATA_API, PATH_CONSTRAINTS_API]; diff --git a/crates/api/src/middleware/rate_limiting/rate_limit_by_ip.rs b/crates/api/src/middleware/rate_limiting/rate_limit_by_ip.rs index 614a544..e23375d 100644 --- a/crates/api/src/middleware/rate_limiting/rate_limit_by_ip.rs +++ b/crates/api/src/middleware/rate_limiting/rate_limit_by_ip.rs @@ -33,10 +33,13 @@ impl RateLimitState { } // TODO: feels a bit hacky, maybe there's a better way to do this? -fn replace_dynamic_routes(route: &str) -> &str { - // Currently, the only dynamic route is /eth/v1/builder/header/:slot/:parent_hash/:pubkey +pub fn replace_dynamic_routes(route: &str) -> &str { + // Currently, the only dynamic route is /eth/v1/builder/header/:slot/:parent_hash/:pubkey and + // /eth/v1/builder/header_with_proofs/:slot/:parent_hash/:pubkey if route.starts_with("/eth/v1/builder/header") { "/eth/v1/builder/header/:slot/:parent_hash/:pubkey" + } else if route.starts_with("/eth/v1/builder/header_with_proofs") { + "/eth/v1/builder/header_with_proofs/:slot/:parent_hash/:pubkey" } else { route } diff --git a/crates/common/src/api/mod.rs b/crates/common/src/api/mod.rs index 8b78973..2633f8d 100644 --- a/crates/common/src/api/mod.rs +++ b/crates/common/src/api/mod.rs @@ -3,7 +3,7 @@ pub mod constraints_api; pub mod data_api; pub mod proposer_api; -pub(crate) const PATH_BUILDER_API: &str = "/relay/v1/builder"; +pub const PATH_BUILDER_API: &str = "/relay/v1/builder"; pub(crate) const PATH_GET_VALIDATORS: &str = "/validators"; pub(crate) const PATH_SUBMIT_BLOCK: &str = "/blocks"; @@ -16,7 +16,7 @@ pub(crate) const PATH_BUILDER_CONSTRAINTS: &str = "/constraints"; pub(crate) const PATH_BUILDER_CONSTRAINTS_STREAM: &str = "/constraints_stream"; pub(crate) const PATH_BUILDER_DELEGATIONS: &str = "/delegations"; -pub(crate) const PATH_PROPOSER_API: &str = "/eth/v1/builder"; +pub const PATH_PROPOSER_API: &str = "/eth/v1/builder"; pub(crate) const PATH_STATUS: &str = "/status"; pub(crate) const PATH_REGISTER_VALIDATORS: &str = "/validators"; @@ -25,13 +25,13 @@ pub(crate) const PATH_GET_PAYLOAD: &str = "/blinded_blocks"; pub(crate) const PATH_GET_HEADER_WITH_PROOFS: &str = "/header_with_proofs/:slot/:parent_hash/:pubkey"; -pub(crate) const PATH_DATA_API: &str = "/relay/v1/data"; +pub const PATH_DATA_API: &str = "/relay/v1/data"; pub(crate) const PATH_PROPOSER_PAYLOAD_DELIVERED: &str = "/bidtraces/proposer_payload_delivered"; pub(crate) const PATH_BUILDER_BIDS_RECEIVED: &str = "/bidtraces/builder_blocks_received"; pub(crate) const PATH_VALIDATOR_REGISTRATION: &str = "/validator_registration"; -pub(crate) const PATH_CONSTRAINTS_API: &str = "/constraints/v1"; +pub const PATH_CONSTRAINTS_API: &str = "/constraints/v1"; pub(crate) const PATH_SUBMIT_BUILDER_CONSTRAINTS: &str = "/builder/constraints"; pub(crate) const PATH_DELEGATE_SUBMISSION_RIGHTS: &str = "/builder/delegate"; From b43dbd169c8c6a2e5e84de2802c05b5fd4042f8f Mon Sep 17 00:00:00 2001 From: ltitanb Date: Mon, 9 Dec 2024 11:32:56 +0000 Subject: [PATCH 36/39] lint --- crates/api/src/middleware/metrics.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/api/src/middleware/metrics.rs b/crates/api/src/middleware/metrics.rs index 1d32256..1678e0d 100644 --- a/crates/api/src/middleware/metrics.rs +++ b/crates/api/src/middleware/metrics.rs @@ -18,7 +18,7 @@ pub async fn metrics_middleware(req: Request, next: Next) -> Response { let endpoint = req.uri().path(); if !SUPPORTED_PATHS.iter().any(|path| endpoint.starts_with(path)) { - return next.run(req).await; + return next.run(req).await } let endpoint = replace_dynamic_routes(endpoint).to_string(); From 54bfbd1c60d89196fa8fcb5412efa6d2a59078a4 Mon Sep 17 00:00:00 2001 From: gd <90608901+gd-0@users.noreply.github.com> Date: Mon, 9 Dec 2024 16:54:33 +0000 Subject: [PATCH 37/39] Update SECURITY.md --- SECURITY.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/SECURITY.md b/SECURITY.md index a92cee3..0339444 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -33,7 +33,6 @@ Bounties are awarded based on severity, impact and likelihood: | Severity | Maximum | Example | |----------|------------:|----------------------------------------------------------------------------------| -| Low | $1,000 USD | A bug that causes the relay to stop serving headers | -| Medium | $5,000 USD | A bug that causes the relay to go offline | +| Medium | $1,000 USD | A bug that causes the relay to go offline | | High | $15,000 USD | A bug that causes proposers to miss several slots | | Critical | $25,000 USD | A bug that causes an untrusted proposer to access an invalid unblinded payload | From f8457b2d9e8eecb3ae548d7bdd5fbc1f8fd98ba6 Mon Sep 17 00:00:00 2001 From: ltitanb <163874448+ltitanb@users.noreply.github.com> Date: Wed, 11 Dec 2024 15:22:37 +0000 Subject: [PATCH 38/39] chore: remove build action (#68) --- .github/workflows/build.yml | 44 ------------------------------------- 1 file changed, 44 deletions(-) delete mode 100644 .github/workflows/build.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml deleted file mode 100644 index efdacd0..0000000 --- a/.github/workflows/build.yml +++ /dev/null @@ -1,44 +0,0 @@ -name: Build - -on: - push: - branches: [main, develop, "release/**"] - pull_request: - branches: [main, develop, "release/**"] - -env: - CARGO_TERM_COLOR: always - -jobs: - build: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v2 - with: - fetch-depth: "0" - path: ./repos/${{ secrets.REPO_NAME }} - ref: ${{ github.ref }} - - - name: Set Image Name - run: | - if [ ${{ github.ref }} == 'refs/heads/main' ]; then - echo "IMAGE_NAME=latest" >> $GITHUB_ENV; - else - BRANCH_NAME=$(echo ${{ github.ref }} | sed 's:.*/::'); - echo "IMAGE_NAME=$BRANCH_NAME" >> $GITHUB_ENV; - fi - - - name: Run Dockerfile - env: - DOCKER_REG_USR: ${{ secrets.DOCKER_REG_USR }} - DOCKER_REG_PW: ${{ secrets.DOCKER_REG_PW }} - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - REGISTRY_URL: ${{ secrets.REGISTRY_URL }} - REPO_NAME: ${{ secrets.REPO_NAME }} - run: | - DOCKER_BUILDKIT=1 docker build -t helix:${{ env.IMAGE_NAME }} --build-arg AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID --build-arg AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY --build-arg REPO_NAME=$REPO_NAME -f ./repos/$REPO_NAME/Dockerfile . - DOCKER_BUILDKIT=1 docker tag helix:${{ env.IMAGE_NAME }} $REGISTRY_URL/helix:${{ env.IMAGE_NAME }} - DOCKER_BUILDKIT=1 docker login https://$REGISTRY_URL/ -u "$DOCKER_REG_USR" -p "$DOCKER_REG_PW" - DOCKER_BUILDKIT=1 docker push $REGISTRY_URL/helix:${{ env.IMAGE_NAME }} From 781c5266e42326bb65bf559f11edb06dcf81709b Mon Sep 17 00:00:00 2001 From: David <3173957+0ex-d@users.noreply.github.com> Date: Wed, 18 Dec 2024 11:23:23 +0000 Subject: [PATCH 39/39] setup --- .gitignore | 2 ++ README.md | 54 +++++++++++++++++++++++++++++++++++++++++ config.yml | 70 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 126 insertions(+) create mode 100644 config.yml diff --git a/.gitignore b/.gitignore index c2c144f..f7e6626 100644 --- a/.gitignore +++ b/.gitignore @@ -39,3 +39,5 @@ db-tools/ # Coverage report lcov.info + +logs/ diff --git a/README.md b/README.md index 16543ca..6fd1583 100644 --- a/README.md +++ b/README.md @@ -77,6 +77,60 @@ The graphs illustrate a marked reduction in latency across several operational s In addition to the improvements made in internal processing efficiency, using the OptimisticV2 implementation has resulted in significantly lower network latencies from our builders. +## How to run +The relay can be run locally for development, or configured for staging and production environments. Depending on the setup, you may want to leverage Docker for building and running the service, along with a Postgres database, Redis instance. + +#### Local setup +```bash +# Build the image +$ docker build -t helix_mev_relayer -f local.Dockerfile . + +# Run the container +$ docker run --name helix_mev_relayer helix_mev_relayer +``` + +#### Staging or Production-Ready setup +AWS configuration is required as a cloud storage option for [sccache](https://github.com/mozilla/sccache.git) (a rust wrapper for caching builds for faster development). +For environments closer to production, you can use the provided [Dockerfile](./Dockerfile). In these environments, [sccache](https://github.com/mozilla/sccache.git) can be configured to store build artifacts in AWS S3 for faster incremental builds. You must supply your AWS credentials as build arguments: + +```bash +$ docker build \ + --build-arg AWS_ACCESS_KEY_ID= \ + --build-arg AWS_SECRET_ACCESS_KEY= \ + --build-arg REPO_NAME= \ + -t helix_mev_relayer . + +$ docker run --name helix_mev_relayer helix_mev_relayer +``` + +### Configuration + +[config.yml](./config.yml) contains options for the relay. + +### Databases +The relay relies on postgres database for persistent storage. Ensure you point the `config.postgres` settings to a database with `timescaledb` extension installed and enabled. + +For Redis set `config.redis.url` to a connection url to a running redis instance. + +### The simulator service +`config.simulators`: should be an RPC endpoint for the payload validator. This service is responsible for sending block requests to the relay for validation. + +### Beacon client +Beacon clients provide APIs for validators to register and propose blocks. You can point the relay to one or multiple beacon clients using `config.beacon_clients`. Examples: [Prysm](https://github.com/prysmaticlabs/prysm.git), [Lighthouse](https://github.com/sigp/lighthouse.git). + +It is the **Consensus Layer** (CL) of the blockchain and is dependent on an **Execution Layer** (EL). + +Each beacon client must have **HTTP API** enabled and requires a working EL client like [geth](https://github.com/ethereum/go-ethereum.git) or [reth](https://github.com/paradigmxyz/reth.git) (Rust ecosystem). Make sure both CL and EL are synchronized and accessible to the relay. + +### Logging +Logs are tailed in `config.logging`. when there is a panic call by `panic::set_hook` logging is done in **{config.dir_path}/crash.log**. + +You can also add a webhook to log `panic!` messages to discord using the `config.discord_webhook_url`. Note this uses `reqwest::blocking` under the hood. + +### Additional Notes +- Ensure that your environment is properly set up before starting the relay (e.g., databases running, AWS credentials ready if using sccache). +- For best performance in production, always verify that your beacon and execution clients are properly synced and that the relay has the appropriate network permissions. + ## Future Work ### In-Memory Auctioneer diff --git a/config.yml b/config.yml new file mode 100644 index 0000000..492d80d --- /dev/null +++ b/config.yml @@ -0,0 +1,70 @@ +network_config: Sepolia +website: + enabled: false + port: 8080 + listen_address: 0.0.0.0 + show_config_details: false + network_name: "" + relay_url: "" + relay_pubkey: "" + link_beaconchain: "" + link_etherscan: "" + link_data_api: "" +postgres: + hostname: localhost + port: 5434 + db_name: helix_mev_relayer + user: postgres + password: postgres + region: 0 + region_name: "" +redis: + url: redis://localhost:6379 +broadcasters: + - !BeaconClient + url: http://localhost:8545 + gossip_blobs_enabled: false +simulators: + - url: http://localhost:8545 +beacon_clients: + - url: http://localhost:8545 + gossip_blobs_enabled: false +relays: [] +builders: [] +logging: !File + dir_path: logs + file_name: log +validator_preferences: + filtering: regional + trusted_builders: null + header_delay: true + gossip_blobs: false +router_config: + enabled_routes: + - route: GetValidators + rate_limit: null + - route: SubmitBlock + rate_limit: null + - route: SubmitBlockOptimistic + rate_limit: null + - route: ValidatorRegistration + rate_limit: null + - route: GetHeader + rate_limit: + limit_duration_ms: 12 + max_requests: 3 + - route: GetPayload + rate_limit: null + - route: ProposerPayloadDelivered + rate_limit: null + - route: RegisterValidators + rate_limit: null + - route: Status + rate_limit: null +target_get_payload_propagation_duration_ms: 0 +constraints_api_config: + check_constraints_signature: true + max_block_value_to_verify_wei: null +primev_config: null +skip_floor_bid_builder_pubkeys: [] +discord_webhook_url: null