Skip to content

Commit

Permalink
feat: Handle proofs events in the indexer (#219)
Browse files Browse the repository at this point in the history
  • Loading branch information
undercover-cactus authored Oct 17, 2024
1 parent 659259b commit f73fdf8
Show file tree
Hide file tree
Showing 20 changed files with 176 additions and 24 deletions.
3 changes: 2 additions & 1 deletion api-augment/dist/interfaces/lookup.js

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion api-augment/dist/interfaces/lookup.js.map

Large diffs are not rendered by default.

3 changes: 2 additions & 1 deletion api-augment/dist/types/interfaces/augment-api-events.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2032,10 +2032,11 @@ declare module "@polkadot/api-base/types/events" {
**/
ProofAccepted: AugmentedEvent<
ApiType,
[provider: H256, proof: PalletProofsDealerProof],
[provider: H256, proof: PalletProofsDealerProof, lastTickProven: u32],
{
provider: H256;
proof: PalletProofsDealerProof;
lastTickProven: u32;
}
>;
/**
Expand Down
1 change: 1 addition & 0 deletions api-augment/dist/types/interfaces/lookup.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1592,6 +1592,7 @@ declare const _default: {
ProofAccepted: {
provider: string;
proof: string;
lastTickProven: string;
};
NewChallengeSeed: {
challengesTicker: string;
Expand Down
1 change: 1 addition & 0 deletions api-augment/dist/types/interfaces/types-lookup.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2035,6 +2035,7 @@ declare module "@polkadot/types/lookup" {
readonly asProofAccepted: {
readonly provider: H256;
readonly proof: PalletProofsDealerProof;
readonly lastTickProven: u32;
} & Struct;
readonly isNewChallengeSeed: boolean;
readonly asNewChallengeSeed: {
Expand Down
4 changes: 2 additions & 2 deletions api-augment/src/interfaces/augment-api-events.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1566,8 +1566,8 @@ declare module "@polkadot/api-base/types/events" {
**/
ProofAccepted: AugmentedEvent<
ApiType,
[provider: H256, proof: PalletProofsDealerProof],
{ provider: H256; proof: PalletProofsDealerProof }
[provider: H256, proof: PalletProofsDealerProof, lastTickProven: u32],
{ provider: H256; proof: PalletProofsDealerProof; lastTickProven: u32 }
>;
/**
* A provider was marked as slashable and their challenge deadline was forcefully pushed.
Expand Down
3 changes: 2 additions & 1 deletion api-augment/src/interfaces/lookup.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1612,7 +1612,8 @@ export default {
},
ProofAccepted: {
provider: "H256",
proof: "PalletProofsDealerProof"
proof: "PalletProofsDealerProof",
lastTickProven: "u32"
},
NewChallengeSeed: {
challengesTicker: "u32",
Expand Down
1 change: 1 addition & 0 deletions api-augment/src/interfaces/types-lookup.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2129,6 +2129,7 @@ declare module "@polkadot/types/lookup" {
readonly asProofAccepted: {
readonly provider: H256;
readonly proof: PalletProofsDealerProof;
readonly lastTickProven: u32;
} & Struct;
readonly isNewChallengeSeed: boolean;
readonly asNewChallengeSeed: {
Expand Down
2 changes: 1 addition & 1 deletion api-augment/storagehub.json

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ CREATE TABLE bsp (
id SERIAL PRIMARY KEY,
account VARCHAR NOT NULL,
capacity NUMERIC(20, 0) NOT NULL,
stake NUMERIC(38, 0) NOT NULL DEFAULT 0,
last_tick_proven BIGINT NOT NULL DEFAULT 0,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
Expand All @@ -17,4 +19,4 @@ CREATE TABLE bsp_multiaddress (
);

-- Create index on bsp_id for faster lookups
CREATE INDEX idx_bsp_multiaddress_bsp_id ON bsp_multiaddress(bsp_id);
CREATE INDEX idx_bsp_multiaddress_bsp_id ON bsp_multiaddress(bsp_id);
41 changes: 41 additions & 0 deletions client/indexer-db/src/models/bsp.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ pub struct Bsp {
pub id: i32,
pub account: String,
pub capacity: BigDecimal,
pub stake: BigDecimal,
pub last_tick_proven: i64,
pub created_at: NaiveDateTime,
pub updated_at: NaiveDateTime,
pub onchain_bsp_id: String,
Expand All @@ -38,12 +40,14 @@ impl Bsp {
capacity: BigDecimal,
multiaddresses: Vec<MultiAddress>,
onchain_bsp_id: String,
stake: BigDecimal,
) -> Result<Self, diesel::result::Error> {
let bsp = diesel::insert_into(bsp::table)
.values((
bsp::account.eq(account),
bsp::capacity.eq(capacity),
bsp::onchain_bsp_id.eq(onchain_bsp_id),
bsp::stake.eq(stake),
))
.returning(Bsp::as_select())
.get_result(conn)
Expand Down Expand Up @@ -90,4 +94,41 @@ impl Bsp {
.await?;
Ok(())
}

pub async fn get_by_onchain_bsp_id<'a>(
conn: &mut DbConnection<'a>,
onchain_bsp_id: String,
) -> Result<Self, diesel::result::Error> {
let bsp = bsp::table
.filter(bsp::onchain_bsp_id.eq(onchain_bsp_id))
.first(conn)
.await?;
Ok(bsp)
}

pub async fn update_stake<'a>(
conn: &mut DbConnection<'a>,
onchain_bsp_id: String,
stake: BigDecimal,
) -> Result<(), diesel::result::Error> {
diesel::update(bsp::table)
.filter(bsp::onchain_bsp_id.eq(onchain_bsp_id))
.set(bsp::stake.eq(stake))
.execute(conn)
.await?;
Ok(())
}

pub async fn update_last_tick_proven<'a>(
conn: &mut DbConnection<'a>,
onchain_bsp_id: String,
last_tick_proven: i64,
) -> Result<(), diesel::result::Error> {
diesel::update(bsp::table)
.filter(bsp::onchain_bsp_id.eq(onchain_bsp_id))
.set(bsp::last_tick_proven.eq(last_tick_proven))
.execute(conn)
.await?;
Ok(())
}
}
2 changes: 2 additions & 0 deletions client/indexer-db/src/schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ diesel::table! {
id -> Int4,
account -> Varchar,
capacity -> Numeric,
stake -> Numeric,
last_tick_proven -> Int8,
created_at -> Timestamp,
updated_at -> Timestamp,
onchain_bsp_id -> Varchar,
Expand Down
62 changes: 56 additions & 6 deletions client/indexer-service/src/handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,15 @@ use sc_client_api::{BlockBackend, BlockchainEvents};
use sp_core::H256;
use sp_runtime::traits::Header;

use pallet_storage_providers_runtime_api::StorageProvidersApi;
use shc_actors_framework::actor::{Actor, ActorEventLoop};
use shc_common::blockchain_utils::EventsRetrievalError;
use shc_common::{
blockchain_utils::get_events_at_block,
types::{BlockNumber, ParachainClient},
};
use shc_indexer_db::{models::*, DbConnection, DbPool};
use sp_api::ProvideRuntimeApi;
use storage_hub_runtime::RuntimeEvent;

pub(crate) const LOG_TARGET: &str = "indexer-service";
Expand Down Expand Up @@ -105,7 +107,7 @@ impl IndexerService {
ServiceState::update(conn, block_number as i64).await?;

for ev in block_events {
self.index_event(conn, &ev.event).await?;
self.index_event(conn, &ev.event, block_hash).await?;
}

Ok(())
Expand All @@ -120,6 +122,7 @@ impl IndexerService {
&'b self,
conn: &mut DbConnection<'a>,
event: &RuntimeEvent,
block_hash: H256,
) -> Result<(), diesel::result::Error> {
match event {
RuntimeEvent::BucketNfts(event) => self.index_bucket_nfts_event(conn, event).await?,
Expand All @@ -130,7 +133,9 @@ impl IndexerService {
RuntimeEvent::ProofsDealer(event) => {
self.index_proofs_dealer_event(conn, event).await?
}
RuntimeEvent::Providers(event) => self.index_providers_event(conn, event).await?,
RuntimeEvent::Providers(event) => {
self.index_providers_event(conn, event, block_hash).await?
}
RuntimeEvent::Randomness(event) => self.index_randomness_event(conn, event).await?,
// Runtime events that we're not interested in.
// We add them here instead of directly matching (_ => {})
Expand Down Expand Up @@ -301,13 +306,24 @@ impl IndexerService {

async fn index_proofs_dealer_event<'a, 'b: 'a>(
&'b self,
_conn: &mut DbConnection<'a>,
conn: &mut DbConnection<'a>,
event: &pallet_proofs_dealer::Event<storage_hub_runtime::Runtime>,
) -> Result<(), diesel::result::Error> {
match event {
pallet_proofs_dealer::Event::MutationsApplied { .. } => {}
pallet_proofs_dealer::Event::NewChallenge { .. } => {}
pallet_proofs_dealer::Event::ProofAccepted { .. } => {}
pallet_proofs_dealer::Event::ProofAccepted {
provider,
proof: _proof,
last_tick_proven,
} => {
Bsp::update_last_tick_proven(
conn,
provider.to_string(),
(*last_tick_proven).into(),
)
.await?;
}
pallet_proofs_dealer::Event::NewChallengeSeed { .. } => {}
pallet_proofs_dealer::Event::NewCheckpointChallenge { .. } => {}
pallet_proofs_dealer::Event::SlashableProvider { .. } => {}
Expand All @@ -323,6 +339,7 @@ impl IndexerService {
&'b self,
conn: &mut DbConnection<'a>,
event: &pallet_storage_providers::Event<storage_hub_runtime::Runtime>,
block_hash: H256,
) -> Result<(), diesel::result::Error> {
match event {
pallet_storage_providers::Event::BspRequestSignUpSuccess { .. } => {}
Expand All @@ -332,6 +349,14 @@ impl IndexerService {
multiaddresses,
capacity,
} => {
let stake = self
.client
.runtime_api()
.get_bsp_stake(block_hash, bsp_id)
.expect("to have a stake")
.unwrap_or(Default::default())
.into();

let mut sql_multiaddresses = Vec::new();
for multiaddress in multiaddresses {
let multiaddress_str =
Expand All @@ -345,6 +370,7 @@ impl IndexerService {
capacity.into(),
sql_multiaddresses,
bsp_id.to_string(),
stake,
)
.await?;
}
Expand All @@ -361,8 +387,19 @@ impl IndexerService {
old_capacity: _old_capacity,
next_block_when_change_allowed: _next_block_when_change_allowed,
} => match provider_id {
StorageProviderId::BackupStorageProvider(_) => {
StorageProviderId::BackupStorageProvider(bsp_id) => {
Bsp::update_capacity(conn, who.to_string(), new_capacity.into()).await?;

// update also the stake
let stake = self
.client
.runtime_api()
.get_bsp_stake(block_hash, bsp_id)
.expect("to have a stake")
.unwrap_or(Default::default())
.into();

Bsp::update_stake(conn, bsp_id.to_string(), stake).await?;
}
StorageProviderId::MainStorageProvider(_) => {
Bsp::update_capacity(conn, who.to_string(), new_capacity.into()).await?;
Expand Down Expand Up @@ -403,7 +440,20 @@ impl IndexerService {
} => {
Msp::delete(conn, who.to_string()).await?;
}
pallet_storage_providers::Event::Slashed { .. } => {}
pallet_storage_providers::Event::Slashed {
provider_id,
amount_slashed: _amount_slashed,
} => {
let stake = self
.client
.runtime_api()
.get_bsp_stake(block_hash, provider_id)
.expect("to have a stake")
.unwrap_or(Default::default())
.into();

Bsp::update_stake(conn, provider_id.to_string(), stake).await?;
}
pallet_storage_providers::Event::__Ignore(_, _) => {}
}
Ok(())
Expand Down
9 changes: 7 additions & 2 deletions pallets/proofs-dealer/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -386,6 +386,7 @@ pub mod pallet {
ProofAccepted {
provider: ProviderIdFor<T>,
proof: Proof<T>,
last_tick_proven: BlockNumberFor<T>,
},

/// A new challenge seed was generated.
Expand Down Expand Up @@ -594,10 +595,14 @@ pub mod pallet {
}
};

Self::do_submit_proof(&provider, &proof)?;
let last_tick_proven = Self::do_submit_proof(&provider, &proof)?;

// Emit event.
Self::deposit_event(Event::ProofAccepted { provider, proof });
Self::deposit_event(Event::ProofAccepted {
provider,
proof,
last_tick_proven,
});

// Return a successful DispatchResultWithPostInfo.
// If the proof is valid, the execution of this extrinsic should be refunded.
Expand Down
12 changes: 12 additions & 0 deletions pallets/proofs-dealer/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -916,11 +916,15 @@ fn submit_proof_success() {
// Dispatch challenge extrinsic.
assert_ok!(ProofsDealer::submit_proof(user, proof.clone(), None));

let last_tick_proven =
LastTickProviderSubmittedAProofFor::<Test>::get(provider_id).unwrap();

// Check for event submitted.
System::assert_last_event(
Event::ProofAccepted {
provider: provider_id,
proof,
last_tick_proven,
}
.into(),
);
Expand Down Expand Up @@ -1061,11 +1065,15 @@ fn submit_proof_adds_provider_to_valid_submitters_set() {
// Dispatch challenge extrinsic.
assert_ok!(ProofsDealer::submit_proof(user, proof.clone(), None));

let last_tick_proven =
LastTickProviderSubmittedAProofFor::<Test>::get(provider_id).unwrap();

// Check for event submitted.
System::assert_last_event(
Event::ProofAccepted {
provider: provider_id,
proof,
last_tick_proven,
}
.into(),
);
Expand Down Expand Up @@ -3410,11 +3418,15 @@ fn new_challenges_round_bad_provider_marked_as_slashable_but_good_no() {
None
));

let last_tick_proven =
LastTickProviderSubmittedAProofFor::<Test>::get(alice_provider_id).unwrap();

// Check for event submitted.
System::assert_last_event(
Event::ProofAccepted {
provider: alice_provider_id,
proof,
last_tick_proven,
}
.into(),
);
Expand Down
7 changes: 5 additions & 2 deletions pallets/proofs-dealer/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,10 @@ where
/// has been a Checkpoint Challenge block in between the last tick proven and the current tick.
/// If there has been, the Provider should have included proofs for the challenges in that block.
/// It then verifies the forest proof and each key proof, using the `ForestVerifier` and `KeyVerifier`.
pub fn do_submit_proof(submitter: &ProviderIdFor<T>, proof: &Proof<T>) -> DispatchResult {
pub fn do_submit_proof(
submitter: &ProviderIdFor<T>,
proof: &Proof<T>,
) -> Result<BlockNumberFor<T>, DispatchError> {
let forest_proof = &proof.forest_proof;
let key_proofs = &proof.key_proofs;

Expand Down Expand Up @@ -388,7 +391,7 @@ where
}
}

Ok(())
Ok(challenges_tick)
}

/// Generate a new round of challenges, be it random or checkpoint.
Expand Down
Loading

0 comments on commit f73fdf8

Please sign in to comment.