Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions crates/bid-scraper/src/bid_scraper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use crate::bloxroute_ws_publisher::{
use crate::config::{NamedPublisherConfig, PublisherConfig};
use crate::get_timestamp_f64;
use crate::headers_publisher::{HeadersPublisherService, RelayHeadersPublisherConfig};
use crate::types::{BlockBid, PublisherType};
use crate::types::{PublisherType, ScrapedRelayBlockBid};
use crate::ultrasound_ws_publisher::{
UltrasoundWsConnectionHandler, UltrasoundWsPublisher, UltrasoundWsPublisherConfig,
};
Expand Down Expand Up @@ -255,7 +255,7 @@ impl BidSender2BestBidValueSink {

impl BestBidValueSink for BidSender2BestBidValueSink {
fn send(&self, bid: BestBidValue) {
let bid = BlockBid {
let bid = ScrapedRelayBlockBid {
seen_time: get_timestamp_f64(),
publisher_name: self.name.clone(),
publisher_type: PublisherType::ExternalWs,
Expand Down
6 changes: 3 additions & 3 deletions crates/bid-scraper/src/bid_scraper_client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,12 @@ use std::{sync::Arc, time::Duration};
use tokio_util::sync::CancellationToken;
use tracing::info;

use crate::types::BlockBid;
use crate::types::ScrapedRelayBlockBid;

/// Sink for scraped bids.
pub trait ScrapedBidsObs: Send + Sync {
/// Be careful, we don't assume any kind of filtering here so bid may contain our own bids.
fn update_new_bid(&self, bid: BlockBid);
fn update_new_bid(&self, bid: ScrapedRelayBlockBid);
}

/// NNG subscriber with infinite retries.
Expand Down Expand Up @@ -63,7 +63,7 @@ async fn run_nng_subscriber(

loop {
let msg = tokio::time::timeout(timeout, nng_reader.receive()).await??;
let block_bid: BlockBid = serde_json::from_slice(msg.body())?;
let block_bid: ScrapedRelayBlockBid = serde_json::from_slice(msg.body())?;
obs.update_new_bid(block_bid);
}
}
8 changes: 4 additions & 4 deletions crates/bid-scraper/src/bid_sender.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@ use runng::{protocol::Pub0, SendSocket};
use tokio_util::sync::CancellationToken;
use tracing::error;

use crate::types::BlockBid;
use crate::types::ScrapedRelayBlockBid;

/// Trait for sending scraped bids.
pub trait BidSender: Send + Sync {
fn send(&self, bid: BlockBid) -> Result<(), BidSenderError>;
fn send(&self, bid: ScrapedRelayBlockBid) -> Result<(), BidSenderError>;
}

/// Implementation of BidSender that publishes the bids to the network using NNG.
Expand All @@ -33,7 +33,7 @@ impl NNGBidSender {
}

impl BidSender for NNGBidSender {
fn send(&self, bid: BlockBid) -> Result<(), BidSenderError> {
fn send(&self, bid: ScrapedRelayBlockBid) -> Result<(), BidSenderError> {
match serde_json::to_vec(&bid) {
Ok(data) => {
if let Err(err) = self.nng_publisher_socket.send(&data) {
Expand Down Expand Up @@ -73,7 +73,7 @@ impl BidSenderCanceller {
}

impl BidSender for BidSenderCanceller {
fn send(&self, bid: BlockBid) -> Result<(), BidSenderError> {
fn send(&self, bid: ScrapedRelayBlockBid) -> Result<(), BidSenderError> {
let res = self.bid_sender.send(bid);
if let Err(err) = &res {
match err {
Expand Down
8 changes: 4 additions & 4 deletions crates/bid-scraper/src/bids_publisher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use crate::{
CfgWithSimpleRelayPublisherConfig, Service, ServiceInner, SimpleRelayPublisherConfig,
},
slot,
types::{BlockBid, PublisherType},
types::{PublisherType, ScrapedRelayBlockBid},
DynResult, RPC_TIMEOUT,
};
use alloy_primitives::{Address, BlockHash, U256};
Expand Down Expand Up @@ -70,7 +70,7 @@ impl Service<RelayBidsPublisherConfig> for BidsPublisherService {
self,
relay_name: String,
relay_endpoint: String,
bids_seen: Arc<Mutex<LruCache<BlockBid, ()>>>,
bids_seen: Arc<Mutex<LruCache<ScrapedRelayBlockBid, ()>>>,
client: Arc<reqwest::Client>,
) {
let mut new_bids = 0;
Expand Down Expand Up @@ -153,7 +153,7 @@ impl BidsPublisherService {
relay_name: &str,
relay_endpoint: &str,
client: &reqwest::Client,
) -> DynResult<Vec<BlockBid>> {
) -> DynResult<Vec<ScrapedRelayBlockBid>> {
debug!("Getting bids for relay {relay_name}");
let block_number = self.inner().last_block_number + 1;
let url = format!(
Expand All @@ -175,7 +175,7 @@ impl BidsPublisherService {

let mut bids = Vec::with_capacity(json_bids.len());
for json_bid in json_bids.iter_mut() {
let bid = BlockBid {
let bid = ScrapedRelayBlockBid {
publisher_name: self.name.clone(),
publisher_type: PublisherType::RelayBids,
builder_pubkey: Some(BlsPublicKey::from_str(
Expand Down
4 changes: 2 additions & 2 deletions crates/bid-scraper/src/bin/bid-scraper-test-client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,14 @@ use alloy_primitives::utils::format_ether;
use bid_scraper::{
bid_scraper_client::{run_nng_subscriber_with_retries, ScrapedBidsObs},
code_from_rbuilder::{setup_tracing_subscriber, LoggerConfig},
types::BlockBid,
types::ScrapedRelayBlockBid,
};
use tokio::signal::ctrl_c;
use tokio_util::sync::CancellationToken;

struct ScrapedBidsPrinter {}
impl ScrapedBidsObs for ScrapedBidsPrinter {
fn update_new_bid(&self, bid: BlockBid) {
fn update_new_bid(&self, bid: ScrapedRelayBlockBid) {
println!(
"New bid {:?} ({:?}) Block {:?} val {:?}",
bid.publisher_name,
Expand Down
8 changes: 4 additions & 4 deletions crates/bid-scraper/src/bloxroute_ws_publisher.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use crate::{
code_from_rbuilder::EnvOrValue,
get_timestamp_f64,
types::{BlockBid, PublisherType},
types::{PublisherType, ScrapedRelayBlockBid},
ws_publisher::{ConnectionHandler, Service},
DynResult, RPC_TIMEOUT,
};
Expand Down Expand Up @@ -59,7 +59,7 @@ impl BloxrouteWsConnectionHandler {
Self { cfg, name }
}

fn parse_bid(&self, json_bid: &serde_json::Value) -> DynResult<Option<BlockBid>> {
fn parse_bid(&self, json_bid: &serde_json::Value) -> DynResult<Option<ScrapedRelayBlockBid>> {
let parsed = match serde_json::from_value::<BloxrouteWsBid>(json_bid.clone()) {
Ok(bid) => bid,
Err(error) => {
Expand All @@ -70,7 +70,7 @@ impl BloxrouteWsConnectionHandler {

let relay_name = format!("bloxroute-{}", parsed.relay_type);

let bid = BlockBid {
let bid = ScrapedRelayBlockBid {
publisher_name: self.name.clone(),
publisher_type: PublisherType::BloxrouteWs,
builder_pubkey: Some(BlsPublicKey::from_str(&parsed.builder_pubkey)?),
Expand Down Expand Up @@ -133,7 +133,7 @@ impl ConnectionHandler for BloxrouteWsConnectionHandler {
Ok(())
}

fn parse(&self, message: Message) -> eyre::Result<Option<BlockBid>> {
fn parse(&self, message: Message) -> eyre::Result<Option<ScrapedRelayBlockBid>> {
match message {
Message::Text(data) => {
let json_bid: serde_json::Value =
Expand Down
8 changes: 4 additions & 4 deletions crates/bid-scraper/src/headers_publisher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use crate::{
CfgWithSimpleRelayPublisherConfig, Service, ServiceInner, SimpleRelayPublisherConfig,
},
slot,
types::{BlockBid, PublisherType},
types::{PublisherType, ScrapedRelayBlockBid},
DynResult, REQUEST_TIMEOUT, RPC_TIMEOUT,
};
use alloy_primitives::{Address, BlockHash, U256};
Expand Down Expand Up @@ -73,7 +73,7 @@ impl Service<RelayHeadersPublisherConfig> for HeadersPublisherService {
self,
relay_name: String,
relay_endpoint: String,
headers_seen: Arc<Mutex<LruCache<BlockBid, ()>>>,
headers_seen: Arc<Mutex<LruCache<ScrapedRelayBlockBid, ()>>>,
client: Arc<reqwest::Client>,
) {
let header = match self.get_header(&relay_name, &relay_endpoint, &client).await {
Expand Down Expand Up @@ -190,7 +190,7 @@ impl HeadersPublisherService {
relay_name: &str,
relay_endpoint: &str,
client: &reqwest::Client,
) -> DynResult<Option<BlockBid>> {
) -> DynResult<Option<ScrapedRelayBlockBid>> {
debug!("Getting header for relay {relay_name}");

let (next_slot, last_block_hash, next_validator_pubkey) = {
Expand Down Expand Up @@ -228,7 +228,7 @@ impl HeadersPublisherService {

let msg = &json_header["data"]["message"];

let header = BlockBid {
let header = ScrapedRelayBlockBid {
publisher_name: self.name.clone(),
publisher_type: PublisherType::RelayHeaders,
relay_name: relay_name.to_string(),
Expand Down
7 changes: 4 additions & 3 deletions crates/bid-scraper/src/relay_api_publisher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@ use tokio_util::sync::CancellationToken;
use tracing::{error, info};

use crate::{
bid_sender::BidSender, get_timestamp_f64, slot, types::BlockBid, REQUEST_TIMEOUT, RPC_TIMEOUT,
bid_sender::BidSender, get_timestamp_f64, slot, types::ScrapedRelayBlockBid, REQUEST_TIMEOUT,
RPC_TIMEOUT,
};

#[derive(Debug, Clone, Deserialize)]
Expand Down Expand Up @@ -96,7 +97,7 @@ pub trait Service<CfgType: CfgWithSimpleRelayPublisherConfig>: Clone + Sized + S
self,
relay_name: String,
relay_endpoint: String,
bids_seen: Arc<Mutex<LruCache<BlockBid, ()>>>,
bids_seen: Arc<Mutex<LruCache<ScrapedRelayBlockBid, ()>>>,
client: Arc<reqwest::Client>,
);

Expand Down Expand Up @@ -186,7 +187,7 @@ pub trait Service<CfgType: CfgWithSimpleRelayPublisherConfig>: Clone + Sized + S
return Ok(());
}

let headers_seen: Arc<Mutex<LruCache<BlockBid, ()>>> =
let headers_seen: Arc<Mutex<LruCache<ScrapedRelayBlockBid, ()>>> =
Arc::new(Mutex::new(LruCache::new(NonZeroUsize::new(4096).unwrap())));
let client = Arc::new(
reqwest::Client::builder()
Expand Down
12 changes: 6 additions & 6 deletions crates/bid-scraper/src/types/bid.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ impl PublisherType {
/// `Hash` - we voluntarily omit `seen_time` (metadata that we add) and `relay_time` (not hashable and we don't care about it)
#[derive(Debug, Clone, Derivative, Serialize, Deserialize)]
#[derivative(Hash, PartialEq, Eq)]
pub struct BlockBid {
pub struct ScrapedRelayBlockBid {
// time when the bids-publisher saw & sent it.
#[derivative(PartialEq = "ignore")]
#[derivative(Hash = "ignore")]
Expand Down Expand Up @@ -93,9 +93,9 @@ mod tests {
}
}
// TODO: derive `Arbitrary` instead
impl Arbitrary for BlockBid {
impl Arbitrary for ScrapedRelayBlockBid {
type Parameters = ();
type Strategy = proptest::strategy::BoxedStrategy<BlockBid>;
type Strategy = proptest::strategy::BoxedStrategy<ScrapedRelayBlockBid>;
fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
any::<(
(
Expand Down Expand Up @@ -138,7 +138,7 @@ mod tests {
gas_used,
optimistic_submission,
)| {
BlockBid {
ScrapedRelayBlockBid {
seen_time,
publisher_name,
publisher_type,
Expand All @@ -164,7 +164,7 @@ mod tests {

proptest! {
#[test]
fn bid_equality((bid, other_bid, other_seen_time) in any::<(BlockBid, BlockBid, f64)>()) {
fn bid_equality((bid, other_bid, other_seen_time) in any::<(ScrapedRelayBlockBid, ScrapedRelayBlockBid, f64)>()) {
let mut equivalent_bid = bid.clone();
equivalent_bid.seen_time = other_seen_time;
prop_assert_eq!(&bid, &equivalent_bid);
Expand All @@ -173,7 +173,7 @@ mod tests {
}

#[test]
fn bid_hashing((bid, other_bid, other_seen_time, other_relay_time) in any::<(BlockBid, BlockBid, f64, Option<f64>)>()) {
fn bid_hashing((bid, other_bid, other_seen_time, other_relay_time) in any::<(ScrapedRelayBlockBid, ScrapedRelayBlockBid, f64, Option<f64>)>()) {
let mut equivalent_bid = bid.clone();
equivalent_bid.seen_time = other_seen_time;
equivalent_bid.relay_time = other_relay_time;
Expand Down
6 changes: 3 additions & 3 deletions crates/bid-scraper/src/types/mod.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use crate::get_timestamp_f64;

pub mod bid;
pub use bid::{BlockBid, PublisherType};
pub use bid::{PublisherType, ScrapedRelayBlockBid};

mod bid_update;
pub use bid_update::TopBidUpdate;
Expand All @@ -11,8 +11,8 @@ pub fn block_bid_from_update(
relay_name: &str,
publisher_name: &str,
publisher_type: PublisherType,
) -> BlockBid {
BlockBid {
) -> ScrapedRelayBlockBid {
ScrapedRelayBlockBid {
publisher_name: publisher_name.to_owned(),
publisher_type: publisher_type.to_owned(),
builder_pubkey: Some(update.builder_pubkey),
Expand Down
4 changes: 2 additions & 2 deletions crates/bid-scraper/src/ultrasound_ws_publisher.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use crate::{
types::{block_bid_from_update, BlockBid, PublisherType, TopBidUpdate},
types::{block_bid_from_update, PublisherType, ScrapedRelayBlockBid, TopBidUpdate},
ws_publisher::{ConnectionHandler, Service},
};
use eyre::{eyre, Context};
Expand Down Expand Up @@ -63,7 +63,7 @@ impl ConnectionHandler for UltrasoundWsConnectionHandler {
Ok(())
}

fn parse(&self, message: Message) -> eyre::Result<Option<BlockBid>> {
fn parse(&self, message: Message) -> eyre::Result<Option<ScrapedRelayBlockBid>> {
match message {
Message::Binary(data) => {
let update = TopBidUpdate::from_ssz_bytes(&data)
Expand Down
4 changes: 2 additions & 2 deletions crates/bid-scraper/src/ws_publisher.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use std::sync::Arc;

use crate::{bid_sender::BidSender, types::BlockBid, RPC_TIMEOUT};
use crate::{bid_sender::BidSender, types::ScrapedRelayBlockBid, RPC_TIMEOUT};
use eyre::{eyre, Context};
use futures::stream::{SplitSink, SplitStream};
use futures_util::{SinkExt, StreamExt};
Expand All @@ -25,7 +25,7 @@ pub trait ConnectionHandler {
read: &mut SplitStream<WebSocketStream<MaybeTlsStream<TcpStream>>>,
) -> eyre::Result<()>;
/// No need to handle ping/pong. Only any accepted data.
fn parse(&self, message: Message) -> eyre::Result<Option<BlockBid>>;
fn parse(&self, message: Message) -> eyre::Result<Option<ScrapedRelayBlockBid>>;
}
pub struct Service<ConnectionHandlerType: 'static> {
handler: ConnectionHandlerType,
Expand Down
8 changes: 0 additions & 8 deletions crates/rbuilder/benches/benchmarks/mev_boost.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ use rbuilder::mev_boost::{
BLSBlockSigner,
};
use reth::primitives::SealedBlock;
use reth_chainspec::SEPOLIA;
use reth_primitives::kzg::Blob;
use ssz::Encode;
use std::{fs, path::PathBuf, sync::Arc};
Expand Down Expand Up @@ -78,7 +77,6 @@ fn bench_mevboost_sign(c: &mut Criterion) {
)));
}

let chain_spec = SEPOLIA.clone();
let payload = generator.create_payload_attribute_data();

let mut group = c.benchmark_group("MEV-Boost Sign block for relay");
Expand All @@ -89,9 +87,6 @@ fn bench_mevboost_sign(c: &mut Criterion) {
let _ = sign_block_for_relay(
&signer,
&sealed_block,
&blobs,
&Vec::new(),
&chain_spec,
&payload,
H384::default(),
U256::default(),
Expand All @@ -117,9 +112,6 @@ fn bench_mevboost_sign(c: &mut Criterion) {
let _ = sign_block_for_relay(
&signer,
&sealed_block_deneb,
&blobs,
&Vec::new(),
&chain_spec,
&payload,
H384::default(),
U256::default(),
Expand Down
2 changes: 1 addition & 1 deletion crates/rbuilder/benches/benchmarks/txpool_fetcher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ async fn txpool_receive_util(count: u32) {

let provider = ProviderBuilder::new()
.wallet(wallet)
.on_http(anvil.endpoint().parse().unwrap());
.connect_http(anvil.endpoint().parse().unwrap());

let alice = anvil.addresses()[0];
let eip1559_est = provider.estimate_eip1559_fees().await.unwrap();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -460,7 +460,7 @@ mod tests {
];
for (idx, (chunk_idx, chunk_txs_block_idx, expected)) in cases.into_iter().enumerate() {
let got = find_allowed_range(block_len, chunk_idx, &chunk_txs_block_idx);
assert_eq!(expected, got, "Test index: {}", idx);
assert_eq!(expected, got, "Test index: {idx}");
}
}

Expand All @@ -474,7 +474,7 @@ mod tests {
for expected_result in expected {
let got_result = got
.get(&expected_result.order)
.unwrap_or_else(|| panic!("Order not found: {:?}", expected_result));
.unwrap_or_else(|| panic!("Order not found: {expected_result:?}"));
assert_eq!(expected_result, *got_result);
}
}
Expand Down
2 changes: 1 addition & 1 deletion crates/rbuilder/src/beacon_api_client/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,6 @@ mod tests {
// validate that the stream is not empty
// TODO: add timeout
let event = stream.next().await.unwrap().unwrap();
print!("{:?}", event);
print!("{event:?}");
}
}
Loading
Loading