diff --git a/components/clarinet-cli/src/frontend/cli.rs b/components/clarinet-cli/src/frontend/cli.rs index 6de2a289b..a7b8bfdb2 100644 --- a/components/clarinet-cli/src/frontend/cli.rs +++ b/components/clarinet-cli/src/frontend/cli.rs @@ -1072,7 +1072,7 @@ pub fn main() { contract.epoch, contract.clarity_version, ); - let mut analysis_db = AnalysisDatabase::new(&mut session.interpreter.datastore); + let mut analysis_db = AnalysisDatabase::new(&mut session.interpreter.clarity_datastore); let mut analysis_diagnostics = match analysis::run_analysis( &mut contract_analysis, &mut analysis_db, diff --git a/components/clarity-events/src/bin.rs b/components/clarity-events/src/bin.rs index 0cc612133..67521260f 100644 --- a/components/clarity-events/src/bin.rs +++ b/components/clarity-events/src/bin.rs @@ -65,7 +65,7 @@ pub fn main() { }; { - let mut analysis_db = session.interpreter.datastore.as_analysis_db(); + let mut analysis_db = session.interpreter.clarity_datastore.as_analysis_db(); let cost_track = LimitedCostTracker::new_free(); let type_checker = TypeChecker::new(&mut analysis_db, cost_track, true); let settings = Settings::default(); diff --git a/components/clarity-repl/src/repl/clarity_values.rs b/components/clarity-repl/src/repl/clarity_values.rs index 1131556ed..d0dce8ad3 100644 --- a/components/clarity-repl/src/repl/clarity_values.rs +++ b/components/clarity-repl/src/repl/clarity_values.rs @@ -22,47 +22,52 @@ pub fn uint8_to_value(mut value: &[u8]) -> Value { pub fn value_to_string(value: &Value) -> String { match value { - Value::Principal(principal_data) => { - format!("'{principal_data}") - } + Value::Principal(principal_data) => format!("'{}", principal_data), Value::Tuple(tup_data) => { - let mut data = Vec::new(); - for (name, value) in tup_data.data_map.iter() { - data.push(format!("{}: {}", &**name, value_to_string(value))) - } - format!("{{ {} }}", data.join(", ")) + let data = tup_data + .data_map + .iter() + .map(|(name, value)| format!("{}: {}", name, value_to_string(value))) + .collect::>() + .join(", "); + format!("{{ {} }}", data) } - Value::Optional(opt_data) => match opt_data.data { - Some(ref x) => format!("(some {})", value_to_string(x)), + Value::Optional(opt_data) => match &opt_data.data { + Some(x) => format!("(some {})", value_to_string(x)), None => "none".to_string(), }, - Value::Response(res_data) => match res_data.committed { - true => format!("(ok {})", value_to_string(&res_data.data)), - false => format!("(err {})", value_to_string(&res_data.data)), - }, - Value::Sequence(SequenceData::String(CharType::ASCII(data))) => { - format!("\"{}\"", String::from_utf8(data.data.clone()).unwrap()) + Value::Response(res_data) => { + let committed = if res_data.committed { "ok" } else { "err" }; + format!("({} {})", committed, value_to_string(&res_data.data)) + } + Value::Sequence(SequenceData::String(CharType::ASCII(ascii_data))) => { + format!("\"{}\"", String::from_utf8_lossy(&ascii_data.data)) } - Value::Sequence(SequenceData::String(CharType::UTF8(data))) => { - let mut result = String::new(); - for c in data.data.iter() { - if c.len() > 1 { - // escape extended charset - result.push_str(&format!("\\u{{{}}}", hash::to_hex(&c[..]))); - } else { - result.push(c[0] as char) - } - } - format!("u\"{result}\"") + Value::Sequence(SequenceData::String(CharType::UTF8(utf8_data))) => { + let result = utf8_data + .data + .iter() + .map(|c| { + if c.len() > 1 { + format!("\\u{{{}}}", hash::to_hex(&c[..])) + } else { + (c[0] as char).to_string() + } + }) + .collect::>() + .join(""); + format!("u\"{}\"", result) } Value::Sequence(SequenceData::List(list_data)) => { - let mut data = Vec::new(); - for value in list_data.data.iter() { - data.push(value_to_string(value)) - } - format!("(list {})", data.join(" ")) + let data = list_data + .data + .iter() + .map(value_to_string) + .collect::>() + .join(" "); + format!("(list {})", data) } - _ => format!("{value}"), + _ => format!("{}", value), } } diff --git a/components/clarity-repl/src/repl/datastore.rs b/components/clarity-repl/src/repl/datastore.rs index ffd5491b4..1f3c08414 100644 --- a/components/clarity-repl/src/repl/datastore.rs +++ b/components/clarity-repl/src/repl/datastore.rs @@ -37,24 +37,28 @@ fn epoch_to_peer_version(epoch: StacksEpochId) -> u8 { } #[derive(Clone, Debug)] -pub struct Datastore { - store: HashMap>, - block_id_lookup: HashMap, - metadata: HashMap<(String, String), String>, +pub struct ClarityDatastore { open_chain_tip: StacksBlockId, current_chain_tip: StacksBlockId, - chain_height: u32, + store: HashMap>, + metadata: HashMap<(String, String), String>, + block_id_lookup: HashMap, height_at_chain_tip: HashMap, } #[derive(Clone, Debug)] -pub struct BlockInfo { +pub struct BurnBlockInfo { + burn_block_time: u64, + burn_block_height: u32, +} + +#[derive(Clone, Debug)] +pub struct StacksBlockInfo { block_header_hash: BlockHeaderHash, burn_block_header_hash: BurnchainHeaderHash, consensus_hash: ConsensusHash, vrf_seed: VRFSeed, - burn_block_time: u64, - burn_block_height: u32, + stacks_block_time: u64, miner: StacksAddress, burnchain_tokens_spent_for_block: u128, get_burnchain_tokens_spent_for_winning_block: u128, @@ -71,21 +75,23 @@ pub struct StacksConstants { } #[derive(Clone, Debug)] -pub struct BurnDatastore { - store: HashMap, +pub struct Datastore { + burn_chain_height: u32, + burn_blocks: HashMap, + stacks_chain_height: u32, + stacks_blocks: HashMap, sortition_lookup: HashMap, consensus_hash_lookup: HashMap, - block_id_lookup: HashMap, - open_chain_tip: StacksBlockId, - current_chain_tip: StacksBlockId, - chain_height: u32, - height_at_chain_tip: HashMap, current_epoch: StacksEpochId, current_epoch_start_height: u32, constants: StacksConstants, genesis_time: u64, } +// fn height_to_hashed_bytes(height: u32) -> [u8; 32] { +// let hash = Sha512_256::digest(height.to_be_bytes()); +// hash.into() +// } fn height_to_hashed_bytes(height: u32) -> [u8; 32] { let input_bytes = height.to_be_bytes(); let mut hasher = Sha512_256::new(); @@ -98,107 +104,117 @@ fn height_to_id(height: u32) -> StacksBlockId { StacksBlockId(height_to_hashed_bytes(height)) } -fn height_to_block(height: u32, genesis_time: Option) -> BlockInfo { - let bytes = height_to_hashed_bytes(height); - let genesis_time = genesis_time.unwrap_or(0); - - let block_header_hash = { - let mut buffer = bytes; - buffer[0] = 1; - BlockHeaderHash(buffer) - }; - let burn_block_header_hash = { - let mut buffer = bytes; - buffer[0] = 2; - BurnchainHeaderHash(buffer) - }; - let consensus_hash = { - let mut buffer = bytes; - buffer[0] = 3; - ConsensusHash::from_bytes(&buffer[0..20]).unwrap() - }; - let vrf_seed = { - let mut buffer = bytes; - buffer[0] = 4; - VRFSeed(buffer) - }; - let time_since_genesis: u64 = (height * 600).into(); - let burn_block_time: u64 = genesis_time + time_since_genesis; - let burn_block_height = height; - let miner = StacksAddress::burn_address(true); - let burnchain_tokens_spent_for_block = 2000; - let get_burnchain_tokens_spent_for_winning_block = 2000; - let tokens_earned_for_block = 5000; - let pox_payout_addrs = (vec![], 0_u128); - - BlockInfo { - block_header_hash, - burn_block_header_hash, - consensus_hash, - vrf_seed, - burn_block_time, - burn_block_height, - miner, - burnchain_tokens_spent_for_block, - get_burnchain_tokens_spent_for_winning_block, - tokens_earned_for_block, - pox_payout_addrs, - } +fn height_to_burn_block_header_hash(height: u32) -> BurnchainHeaderHash { + let mut bytes = height_to_hashed_bytes(height); + bytes[0] = 2; + BurnchainHeaderHash(bytes) } -impl Default for Datastore { +impl Default for ClarityDatastore { fn default() -> Self { Self::new() } } -impl Datastore { +impl ClarityDatastore { pub fn new() -> Self { let id = height_to_id(0); - - let mut store = HashMap::new(); - store.insert(id, HashMap::new()); - - let mut block_id_lookup = HashMap::new(); - block_id_lookup.insert(id, id); - - let mut id_height_map = HashMap::new(); - id_height_map.insert(id, 0); - Self { - store, - block_id_lookup, - metadata: HashMap::new(), open_chain_tip: id, current_chain_tip: id, - chain_height: 0, - height_at_chain_tip: id_height_map, + store: HashMap::from([(id, HashMap::new())]), + metadata: HashMap::new(), + block_id_lookup: HashMap::from([(id, id)]), + height_at_chain_tip: HashMap::from([(id, 0)]), } } - pub fn advance_chain_tip(&mut self, count: u32) -> u32 { - let cur_height = self.chain_height; - let current_lookup_id = *self + pub fn open(_path_str: &str, _miner_tip: Option<&StacksBlockId>) -> Result { + Ok(ClarityDatastore::new()) + } + + pub fn as_analysis_db(&mut self) -> AnalysisDatabase<'_> { + AnalysisDatabase::new(self) + } + + /// begin, commit, rollback a save point identified by key + /// this is used to clean up any data from aborted blocks + /// (NOT aborted transactions that is handled by the clarity vm directly). + /// The block header hash is used for identifying savepoints. + /// this _cannot_ be used to rollback to arbitrary prior block hash, because that + /// blockhash would already have committed and no longer exist in the save point stack. + /// this is a "lower-level" rollback than the roll backs performed in + /// ClarityDatabase or AnalysisDatabase -- this is done at the backing store level. + + pub fn begin(&mut self, _current: &StacksBlockId, _next: &StacksBlockId) { + // self.marf.begin(current, next) + // .expect(&format!("ERROR: Failed to begin new MARF block {} - {})", current, next)); + // self.chain_tip = self.marf.get_open_chain_tip() + // .expect("ERROR: Failed to get open MARF") + // .clone(); + // self.side_store.begin(&self.chain_tip); + } + pub fn rollback(&mut self) { + // self.marf.drop_current(); + // self.side_store.rollback(&self.chain_tip); + // self.chain_tip = StacksBlockId::sentinel(); + } + // This is used by miners + // so that the block validation and processing logic doesn't + // reprocess the same data as if it were already loaded + pub fn commit_mined_block(&mut self, _will_move_to: &StacksBlockId) { + // rollback the side_store + // the side_store shouldn't commit data for blocks that won't be + // included in the processed chainstate (like a block constructed during mining) + // _if_ for some reason, we do want to be able to access that mined chain state in the future, + // we should probably commit the data to a different table which does not have uniqueness constraints. + // self.side_store.rollback(&self.chain_tip); + // self.marf.commit_mined(will_move_to) + // .expect("ERROR: Failed to commit MARF block"); + } + + pub fn commit_to(&mut self, _final_bhh: &StacksBlockId) { + // println!("commit_to({})", final_bhh); + // self.side_store.commit_metadata_to(&self.chain_tip, final_bhh); + // self.side_store.commit(&self.chain_tip); + // self.marf.commit_to(final_bhh) + // .expect("ERROR: Failed to commit MARF block"); + } + + pub fn put(&mut self, key: &str, value: &str) { + let lookup_id = self .block_id_lookup .get(&self.open_chain_tip) - .expect("Open chain tip missing in block id lookup table"); + .expect("Could not find current chain tip in block_id_lookup map"); - for i in 1..=count { - let height = cur_height + i; - let id = height_to_id(height); + // if there isn't a store for the open chain_tip, make one and update the + // entry for the block id in the lookup table + if *lookup_id != self.open_chain_tip { + self.store.insert( + self.open_chain_tip, + self.store + .get(lookup_id) + .unwrap_or_else(|| panic!("Block with ID {:?} does not exist", lookup_id)) + .clone(), + ); - self.block_id_lookup.insert(id, current_lookup_id); - self.height_at_chain_tip.insert(id, height); + self.block_id_lookup + .insert(self.open_chain_tip, self.current_chain_tip); } - self.chain_height += count; - self.open_chain_tip = height_to_id(self.chain_height); - self.current_chain_tip = self.open_chain_tip; - self.chain_height + if let Some(map) = self.store.get_mut(&self.open_chain_tip) { + map.insert(key.to_string(), value.to_string()); + } else { + panic!("Block does not exist for current chain tip"); + } + } + + pub fn make_contract_hash_key(contract: &QualifiedContractIdentifier) -> String { + format!("clarity-contract::{}", contract) } } -impl ClarityBackingStore for Datastore { +impl ClarityBackingStore for ClarityDatastore { fn put_all_data(&mut self, items: Vec<(String, String)>) -> Result<()> { for (key, value) in items { self.put(&key, &value); @@ -243,12 +259,15 @@ impl ClarityBackingStore for Datastore { fn get_current_block_height(&mut self) -> u32 { *self .height_at_chain_tip - .get(self.get_chain_tip()) + .get(&self.current_chain_tip) .unwrap_or(&u32::MAX) } fn get_open_chain_tip_height(&mut self) -> u32 { - self.chain_height + self.height_at_chain_tip + .get(&self.open_chain_tip) + .copied() + .unwrap_or(u32::MAX) } fn get_open_chain_tip(&mut self) -> StacksBlockId { @@ -267,8 +286,6 @@ impl ClarityBackingStore for Datastore { key: &str, value: &str, ) -> Result<()> { - // let bhh = self.get_open_chain_tip(); - // self.get_side_store().insert_metadata(&bhh, &contract.to_string(), key, value) self.metadata .insert((contract.to_string(), key.to_string()), value.to_string()); Ok(()) @@ -279,8 +296,6 @@ impl ClarityBackingStore for Datastore { contract: &QualifiedContractIdentifier, key: &str, ) -> Result> { - // let (bhh, _) = self.get_contract_hash(contract)?; - // Ok(self.get_side_store().get_metadata(&bhh, &contract.to_string(), key)) let key = &(contract.to_string(), key.to_string()); match self.metadata.get(key) { @@ -319,20 +334,37 @@ impl ClarityBackingStore for Datastore { } } -impl BurnDatastore { +impl Default for Datastore { + fn default() -> Self { + Self::new(StacksConstants { + burn_start_height: 0, + pox_prepare_length: 50, + pox_reward_cycle_length: 1050, + pox_rejection_fraction: 0, + }) + } +} + +impl Datastore { pub fn new(constants: StacksConstants) -> Self { let bytes = height_to_hashed_bytes(0); let id = StacksBlockId(bytes); let sortition_id = SortitionId(bytes); let genesis_time = chrono::Utc::now().timestamp() as u64; - let genesis_block = BlockInfo { + let first_burn_block_header_hash = BurnchainHeaderHash([0x00; 32]); + + let genesis_burn_block = BurnBlockInfo { + burn_block_time: genesis_time, + burn_block_height: 0, + }; + + let genesis_block = StacksBlockInfo { block_header_hash: BlockHeaderHash([0x00; 32]), - burn_block_header_hash: BurnchainHeaderHash([0x00; 32]), + burn_block_header_hash: first_burn_block_header_hash, consensus_hash: ConsensusHash([0x00; 20]), vrf_seed: VRFSeed([0x00; 32]), - burn_block_time: genesis_time, - burn_block_height: 0, + stacks_block_time: genesis_time, miner: StacksAddress::burn_address(false), burnchain_tokens_spent_for_block: 0, get_burnchain_tokens_spent_for_winning_block: 0, @@ -340,33 +372,18 @@ impl BurnDatastore { pox_payout_addrs: (vec![], 0), }; - let mut height_at_chain_tip = HashMap::new(); - height_at_chain_tip.insert(id, 0); - - let mut sortition_lookup = HashMap::new(); - sortition_lookup.insert(sortition_id, id); - - let mut consensus_hash_lookup = HashMap::new(); - consensus_hash_lookup.insert(genesis_block.consensus_hash, sortition_id); + let sortition_lookup = HashMap::from([(sortition_id, id)]); + let consensus_hash_lookup = HashMap::from([(genesis_block.consensus_hash, sortition_id)]); + let burn_blocks = HashMap::from([(first_burn_block_header_hash, genesis_burn_block)]); + let stacks_blocks = HashMap::from([(id, genesis_block)]); - let mut store = HashMap::new(); - store.insert(id, genesis_block); - - let mut block_id_lookup = HashMap::new(); - block_id_lookup.insert(id, id); - - let mut id_height_map = HashMap::new(); - id_height_map.insert(id, 0); - - BurnDatastore { - store, + Datastore { + burn_chain_height: 0, + burn_blocks, + stacks_chain_height: 0, + stacks_blocks, sortition_lookup, consensus_hash_lookup, - block_id_lookup, - open_chain_tip: id, - current_chain_tip: id, - chain_height: 0, - height_at_chain_tip, current_epoch: StacksEpochId::Epoch2_05, current_epoch_start_height: 0, constants, @@ -378,70 +395,139 @@ impl BurnDatastore { self.current_epoch } - pub fn get_current_block_height(&self) -> u32 { - self.chain_height + pub fn get_current_stacks_block_height(&self) -> u32 { + self.stacks_chain_height + } + + pub fn get_current_burn_block_height(&self) -> u32 { + self.burn_chain_height } - pub fn advance_chain_tip(&mut self, count: u32) -> u32 { - let cur_height = self.chain_height; - let current_lookup_id = *self + + fn build_stacks_block(&self) -> StacksBlockInfo { + let burn_chain_height = self.burn_chain_height; + let stacks_block_height = self.stacks_chain_height; + + let bytes = height_to_hashed_bytes(stacks_block_height); + + let block_header_hash = { + let mut buffer = bytes; + buffer[0] = 1; + BlockHeaderHash(buffer) + }; + let burn_block_header_hash = height_to_burn_block_header_hash(burn_chain_height); + let consensus_hash = { + let mut buffer = bytes; + buffer[0] = 3; + ConsensusHash::from_bytes(&buffer[0..20]).unwrap() + }; + let vrf_seed = { + let mut buffer = bytes; + buffer[0] = 4; + VRFSeed(buffer) + }; + let time_since_genesis: u64 = (stacks_block_height * 600).into(); + let stacks_block_time: u64 = self.genesis_time + time_since_genesis; + let miner = StacksAddress::burn_address(true); + let burnchain_tokens_spent_for_block = 2000; + let get_burnchain_tokens_spent_for_winning_block = 2000; + let tokens_earned_for_block = 5000; + let pox_payout_addrs = (vec![], 0_u128); + + StacksBlockInfo { + block_header_hash, + burn_block_header_hash, + consensus_hash, + vrf_seed, + stacks_block_time, + miner, + burnchain_tokens_spent_for_block, + get_burnchain_tokens_spent_for_winning_block, + tokens_earned_for_block, + pox_payout_addrs, + } + } + + pub fn advance_burn_chain_tip( + &mut self, + clarity_datastore: &mut ClarityDatastore, + count: u32, + ) -> u32 { + let genesis_time = self.genesis_time; + + for _ in 1..=count { + let height = self.burn_chain_height + 1; + let hash = height_to_burn_block_header_hash(height); + let burn_block_info = BurnBlockInfo { + burn_block_time: genesis_time + ((height * 600) as u64), + burn_block_height: height, + }; + + self.burn_blocks.insert(hash, burn_block_info); + self.burn_chain_height = height; + self.advance_stacks_chain_tip(clarity_datastore, 1); + } + + self.burn_chain_height + } + + pub fn advance_stacks_chain_tip( + &mut self, + clarity_datastore: &mut ClarityDatastore, + count: u32, + ) -> u32 { + let current_lookup_id = *clarity_datastore .block_id_lookup - .get(&self.open_chain_tip) + .get(&clarity_datastore.open_chain_tip) .expect("Open chain tip missing in block id lookup table"); - let genesis_time = self.genesis_time; for i in 1..=count { - let height = cur_height + i; + let height = self.stacks_chain_height + i; let bytes = height_to_hashed_bytes(height); let id = StacksBlockId(bytes); let sortition_id = SortitionId(bytes); - let block_info = height_to_block(height, Some(genesis_time)); - self.block_id_lookup.insert(id, current_lookup_id); - self.height_at_chain_tip.insert(id, height); + let block_info = self.build_stacks_block(); + clarity_datastore + .block_id_lookup + .insert(id, current_lookup_id); + clarity_datastore.height_at_chain_tip.insert(id, height); self.sortition_lookup.insert(sortition_id, id); self.consensus_hash_lookup .insert(block_info.consensus_hash, sortition_id); - self.store.insert(id, block_info); + self.stacks_blocks.insert(id, block_info); } - self.chain_height += count; - self.open_chain_tip = height_to_id(self.chain_height); - self.current_chain_tip = self.open_chain_tip; - self.chain_height + self.stacks_chain_height += count; + clarity_datastore.open_chain_tip = height_to_id(self.stacks_chain_height); + clarity_datastore.current_chain_tip = clarity_datastore.open_chain_tip; + self.stacks_chain_height } pub fn set_current_epoch(&mut self, epoch: StacksEpochId) { self.current_epoch = epoch; - self.current_epoch_start_height = self.chain_height; + self.current_epoch_start_height = self.stacks_chain_height; } } -impl HeadersDB for BurnDatastore { - // fn get(&mut self, key: &str) -> Option { - // let lookup_id = self - // .block_id_lookup - // .get(&self.current_chain_tip) - // .expect("Could not find current chain tip in block_id_lookup map"); - - // if let Some(map) = self.store.get(lookup_id) { - // map.get(key).map(|v| v.clone()) - // } else { - // panic!("Block does not exist for current chain tip"); - // } - // } - +impl HeadersDB for Datastore { fn get_stacks_block_header_hash_for_block( &self, id_bhh: &StacksBlockId, _epoch_id: &StacksEpochId, ) -> Option { - self.store.get(id_bhh).map(|id| id.block_header_hash) + self.stacks_blocks + .get(id_bhh) + .map(|id| id.block_header_hash) } fn get_burn_header_hash_for_block( &self, id_bhh: &StacksBlockId, ) -> Option { - self.store.get(id_bhh).map(|id| id.burn_block_header_hash) + let hash = self + .stacks_blocks + .get(id_bhh) + .map(|block| block.burn_block_header_hash); + hash } fn get_consensus_hash_for_block( @@ -449,63 +535,79 @@ impl HeadersDB for BurnDatastore { id_bhh: &StacksBlockId, _epoch_id: &StacksEpochId, ) -> Option { - self.store.get(id_bhh).map(|id| id.consensus_hash) + self.stacks_blocks.get(id_bhh).map(|id| id.consensus_hash) } + fn get_vrf_seed_for_block( &self, id_bhh: &StacksBlockId, _epoch_id: &StacksEpochId, ) -> Option { - self.store.get(id_bhh).map(|id| id.vrf_seed) + self.stacks_blocks.get(id_bhh).map(|id| id.vrf_seed) } + fn get_stacks_block_time_for_block(&self, id_bhh: &StacksBlockId) -> Option { - self.store.get(id_bhh).map(|id| id.burn_block_time) + self.stacks_blocks + .get(id_bhh) + .map(|id| id.stacks_block_time) } + fn get_burn_block_time_for_block( &self, id_bhh: &StacksBlockId, _epoch_id: Option<&StacksEpochId>, ) -> Option { - self.store.get(id_bhh).map(|id| id.burn_block_time) + self.get_burn_header_hash_for_block(id_bhh) + .and_then(|hash| self.burn_blocks.get(&hash)) + .map(|b| b.burn_block_time) } + fn get_burn_block_height_for_block(&self, id_bhh: &StacksBlockId) -> Option { - self.store.get(id_bhh).map(|id| id.burn_block_height) + self.get_burn_header_hash_for_block(id_bhh) + .and_then(|hash| self.burn_blocks.get(&hash)) + .map(|b| b.burn_block_height) } + fn get_miner_address( &self, id_bhh: &StacksBlockId, _epoch_id: &StacksEpochId, ) -> Option { - self.store.get(id_bhh).map(|id| id.miner) + self.stacks_blocks.get(id_bhh).map(|id| id.miner) } + fn get_burnchain_tokens_spent_for_block( &self, id_bhh: &StacksBlockId, _epoch_id: &StacksEpochId, ) -> Option { - self.store + self.stacks_blocks .get(id_bhh) .map(|id| id.burnchain_tokens_spent_for_block) } + fn get_burnchain_tokens_spent_for_winning_block( &self, id_bhh: &StacksBlockId, _epoch_id: &StacksEpochId, ) -> Option { - self.store + self.stacks_blocks .get(id_bhh) .map(|id| id.get_burnchain_tokens_spent_for_winning_block) } + fn get_tokens_earned_for_block( &self, id_bhh: &StacksBlockId, _epoch_id: &StacksEpochId, ) -> Option { - self.store.get(id_bhh).map(|id| id.tokens_earned_for_block) + self.stacks_blocks + .get(id_bhh) + .map(|id| id.tokens_earned_for_block) } } -impl BurnStateDB for BurnDatastore { +impl BurnStateDB for Datastore { fn get_v1_unlock_height(&self) -> u32 { 0 } @@ -527,11 +629,11 @@ impl BurnStateDB for BurnDatastore { } fn get_tip_burn_block_height(&self) -> Option { - Some(self.chain_height) + Some(self.burn_chain_height) } fn get_tip_sortition_id(&self) -> Option { - let bytes = height_to_hashed_bytes(self.chain_height); + let bytes = height_to_hashed_bytes(self.stacks_chain_height); let sortition_id = SortitionId(bytes); Some(sortition_id) } @@ -540,8 +642,10 @@ impl BurnStateDB for BurnDatastore { fn get_burn_block_height(&self, sortition_id: &SortitionId) -> Option { self.sortition_lookup .get(sortition_id) - .and_then(|id| self.store.get(id)) - .map(|block_info| block_info.burn_block_height) + .and_then(|id| self.stacks_blocks.get(id)) + .map(|stacks_block_info| stacks_block_info.burn_block_header_hash) + .and_then(|hash| self.burn_blocks.get(&hash)) + .map(|burn_block_info| burn_block_info.burn_block_height) } /// Returns the height of the burnchain when the Stacks chain started running. @@ -571,7 +675,7 @@ impl BurnStateDB for BurnDatastore { ) -> Option { self.sortition_lookup .get(sortition_id) - .and_then(|id| self.store.get(id)) + .and_then(|id| self.stacks_blocks.get(id)) .map(|block_info| block_info.burn_block_header_hash) } @@ -609,7 +713,7 @@ impl BurnStateDB for BurnDatastore { ) -> Option<(Vec, u128)> { self.sortition_lookup .get(sortition_id) - .and_then(|id| self.store.get(id)) + .and_then(|id| self.stacks_blocks.get(id)) .map(|block_info| block_info.pox_payout_addrs.clone()) } @@ -618,124 +722,23 @@ impl BurnStateDB for BurnDatastore { } } -impl Datastore { - pub fn open(_path_str: &str, _miner_tip: Option<&StacksBlockId>) -> Result { - Ok(Datastore::new()) - } - - pub fn as_analysis_db(&mut self) -> AnalysisDatabase<'_> { - AnalysisDatabase::new(self) - } - - /// begin, commit, rollback a save point identified by key - /// this is used to clean up any data from aborted blocks - /// (NOT aborted transactions that is handled by the clarity vm directly). - /// The block header hash is used for identifying savepoints. - /// this _cannot_ be used to rollback to arbitrary prior block hash, because that - /// blockhash would already have committed and no longer exist in the save point stack. - /// this is a "lower-level" rollback than the roll backs performed in - /// ClarityDatabase or AnalysisDatabase -- this is done at the backing store level. - - pub fn begin(&mut self, _current: &StacksBlockId, _next: &StacksBlockId) { - // self.marf.begin(current, next) - // .expect(&format!("ERROR: Failed to begin new MARF block {} - {})", current, next)); - // self.chain_tip = self.marf.get_open_chain_tip() - // .expect("ERROR: Failed to get open MARF") - // .clone(); - // self.side_store.begin(&self.chain_tip); - } - pub fn rollback(&mut self) { - // self.marf.drop_current(); - // self.side_store.rollback(&self.chain_tip); - // self.chain_tip = StacksBlockId::sentinel(); - } - // This is used by miners - // so that the block validation and processing logic doesn't - // reprocess the same data as if it were already loaded - pub fn commit_mined_block(&mut self, _will_move_to: &StacksBlockId) { - // rollback the side_store - // the side_store shouldn't commit data for blocks that won't be - // included in the processed chainstate (like a block constructed during mining) - // _if_ for some reason, we do want to be able to access that mined chain state in the future, - // we should probably commit the data to a different table which does not have uniqueness constraints. - // self.side_store.rollback(&self.chain_tip); - // self.marf.commit_mined(will_move_to) - // .expect("ERROR: Failed to commit MARF block"); - } - pub fn commit_to(&mut self, _final_bhh: &StacksBlockId) { - // println!("commit_to({})", final_bhh); - // self.side_store.commit_metadata_to(&self.chain_tip, final_bhh); - // self.side_store.commit(&self.chain_tip); - // self.marf.commit_to(final_bhh) - // .expect("ERROR: Failed to commit MARF block"); - } - pub fn get_chain_tip(&self) -> &StacksBlockId { - &self.current_chain_tip - } - - pub fn set_chain_tip(&mut self, bhh: &StacksBlockId) { - self.current_chain_tip = *bhh; - } - - pub fn put(&mut self, key: &str, value: &str) { - let lookup_id = self - .block_id_lookup - .get(&self.open_chain_tip) - .expect("Could not find current chain tip in block_id_lookup map"); - - // if there isn't a store for the open chain_tip, make one and update the - // entry for the block id in the lookup table - if *lookup_id != self.open_chain_tip { - self.store.insert( - self.open_chain_tip, - self.store - .get(lookup_id) - .unwrap_or_else(|| panic!("Block with ID {:?} does not exist", lookup_id)) - .clone(), - ); - - self.block_id_lookup - .insert(self.open_chain_tip, self.current_chain_tip); - } - - if let Some(map) = self.store.get_mut(&self.open_chain_tip) { - map.insert(key.to_string(), value.to_string()); - } else { - panic!("Block does not exist for current chain tip"); - } - } - - pub fn make_contract_hash_key(contract: &QualifiedContractIdentifier) -> String { - format!("clarity-contract::{}", contract) - } -} - #[cfg(test)] mod tests { use clarity::types::StacksEpoch; use super::*; - fn get_burn_datastore() -> BurnDatastore { - let constants = StacksConstants { - burn_start_height: 0, - pox_prepare_length: 50, - pox_reward_cycle_length: 1050, - pox_rejection_fraction: 0, - }; - BurnDatastore::new(constants) - } - #[test] fn test_advance_chain_tip() { - let mut datastore = get_burn_datastore(); - datastore.advance_chain_tip(5); - assert_eq!(datastore.chain_height, 5); + let mut datastore = Datastore::default(); + let mut clarity_datastore = ClarityDatastore::new(); + datastore.advance_burn_chain_tip(&mut clarity_datastore, 5); + assert_eq!(datastore.stacks_chain_height, 5); } #[test] fn test_set_current_epoch() { - let mut datastore = get_burn_datastore(); + let mut datastore = Datastore::default(); let epoch_id = StacksEpochId::Epoch25; datastore.set_current_epoch(epoch_id); assert_eq!(datastore.current_epoch, epoch_id); @@ -743,70 +746,71 @@ mod tests { #[test] fn test_get_v1_unlock_height() { - let datastore = get_burn_datastore(); + let datastore = Datastore::default(); assert_eq!(datastore.get_v1_unlock_height(), 0); } #[test] fn test_get_v2_unlock_height() { - let datastore = get_burn_datastore(); + let datastore = Datastore::default(); assert_eq!(datastore.get_v2_unlock_height(), 0); } #[test] fn test_get_v3_unlock_height() { - let datastore = get_burn_datastore(); + let datastore = Datastore::default(); assert_eq!(datastore.get_v3_unlock_height(), 0); } #[test] fn test_get_pox_3_activation_height() { - let datastore = get_burn_datastore(); + let datastore = Datastore::default(); assert_eq!(datastore.get_pox_3_activation_height(), 0); } #[test] fn test_get_pox_4_activation_height() { - let datastore = get_burn_datastore(); + let datastore = Datastore::default(); assert_eq!(datastore.get_pox_4_activation_height(), 0); } #[test] fn test_get_tip_burn_block_height() { - let mut datastore = get_burn_datastore(); + let mut datastore = Datastore::default(); + let mut clarity_datastore = ClarityDatastore::new(); let chain_height = 10; - datastore.chain_height = chain_height; + datastore.advance_burn_chain_tip(&mut clarity_datastore, 10); let tip_burn_block_height = datastore.get_tip_burn_block_height(); assert_eq!(tip_burn_block_height, Some(chain_height)); } #[test] fn test_get_burn_start_height() { - let datastore = get_burn_datastore(); + let datastore = Datastore::default(); assert_eq!(datastore.get_burn_start_height(), 0); } #[test] fn test_get_pox_prepare_length() { - let datastore = get_burn_datastore(); + let datastore = Datastore::default(); assert_eq!(datastore.get_pox_prepare_length(), 50); } #[test] fn test_get_pox_reward_cycle_length() { - let datastore = get_burn_datastore(); + let datastore = Datastore::default(); assert_eq!(datastore.get_pox_reward_cycle_length(), 1050); } #[test] fn test_get_pox_rejection_fraction() { - let datastore = get_burn_datastore(); + let datastore = Datastore::default(); assert_eq!(datastore.get_pox_rejection_fraction(), 0); } #[test] fn test_get_stacks_epoch() { - let datastore = get_burn_datastore(); + let datastore = Datastore::default(); let height = 10; let epoch = datastore.get_stacks_epoch(height); assert_eq!( @@ -823,7 +827,7 @@ mod tests { #[test] fn test_get_stacks_epoch_by_epoch_id() { - let datastore = get_burn_datastore(); + let datastore = Datastore::default(); let epoch_id = StacksEpochId::Epoch2_05; let epoch = datastore.get_stacks_epoch_by_epoch_id(&epoch_id); assert_eq!( diff --git a/components/clarity-repl/src/repl/interpreter.rs b/components/clarity-repl/src/repl/interpreter.rs index 8b081e9f8..afdafedbc 100644 --- a/components/clarity-repl/src/repl/interpreter.rs +++ b/components/clarity-repl/src/repl/interpreter.rs @@ -3,7 +3,7 @@ use std::collections::{btree_map::Entry, BTreeMap, BTreeSet}; use crate::analysis::annotation::{Annotation, AnnotationKind}; use crate::analysis::ast_dependency_detector::{ASTDependencyDetector, Dependency}; use crate::analysis::{self}; -use crate::repl::datastore::BurnDatastore; +use crate::repl::datastore::ClarityDatastore; use crate::repl::datastore::Datastore; use crate::repl::Settings; use clarity::consts::CHAIN_ID_TESTNET; @@ -28,7 +28,6 @@ use clarity::vm::{events::*, ClarityVersion}; use clarity::vm::{ContractEvaluationResult, EvalHook}; use clarity::vm::{CostSynthesis, ExecutionResult, ParsedContract}; -use super::datastore::StacksConstants; use super::{ClarityContract, DEFAULT_EPOCH}; pub const BLOCK_LIMIT_MAINNET: ExecutionCost = ExecutionCost { @@ -41,8 +40,8 @@ pub const BLOCK_LIMIT_MAINNET: ExecutionCost = ExecutionCost { #[derive(Clone, Debug)] pub struct ClarityInterpreter { + pub clarity_datastore: ClarityDatastore, pub datastore: Datastore, - pub burn_datastore: BurnDatastore, pub repl_settings: Settings, tx_sender: StandardPrincipalData, accounts: BTreeSet, @@ -54,19 +53,13 @@ pub struct Txid(pub [u8; 32]); impl ClarityInterpreter { pub fn new(tx_sender: StandardPrincipalData, repl_settings: Settings) -> Self { - let constants = StacksConstants { - burn_start_height: 0, - pox_prepare_length: 50, - pox_reward_cycle_length: 1050, - pox_rejection_fraction: 0, - }; Self { tx_sender, repl_settings, - datastore: Datastore::new(), + clarity_datastore: ClarityDatastore::new(), accounts: BTreeSet::new(), tokens: BTreeMap::new(), - burn_datastore: BurnDatastore::new(constants), + datastore: Datastore::default(), } } @@ -297,7 +290,7 @@ impl ClarityInterpreter { contract_ast: &ContractAST, annotations: &Vec, ) -> Result<(ContractAnalysis, Vec), Diagnostic> { - let mut analysis_db = AnalysisDatabase::new(&mut self.datastore); + let mut analysis_db = AnalysisDatabase::new(&mut self.clarity_datastore); // Run standard clarity analyses let mut contract_analysis = clarity::vm::analysis::run_analysis( @@ -327,9 +320,9 @@ impl ClarityInterpreter { pub fn get_block_time(&mut self) -> u64 { let block_height = self.get_block_height(); let mut conn = ClarityDatabase::new( - &mut self.datastore, - &self.burn_datastore, - &self.burn_datastore, + &mut self.clarity_datastore, + &self.datastore, + &self.datastore, ); conn.get_block_time(block_height) .expect("unable to get block time") @@ -342,7 +335,7 @@ impl ClarityInterpreter { ) -> Option { let key = ClarityDatabase::make_key_for_trip(contract_id, StoreType::Variable, var_name); let value_hex = self - .datastore + .clarity_datastore .get_data(&key) .expect("failed to get key from datastore")?; Some(format!("0x{value_hex}")) @@ -357,7 +350,7 @@ impl ClarityInterpreter { let key = ClarityDatabase::make_key_for_data_map_entry(contract_id, map_name, map_key).unwrap(); let value_hex = self - .datastore + .clarity_datastore .get_data(&key) .expect("failed to get map entry from datastore")?; Some(format!("0x{value_hex}")) @@ -377,9 +370,9 @@ impl ClarityInterpreter { ContractContext::new(contract_id.clone(), contract.clarity_version); let mut conn = ClarityDatabase::new( - &mut self.datastore, - &self.burn_datastore, - &self.burn_datastore, + &mut self.clarity_datastore, + &self.datastore, + &self.datastore, ); let tx_sender: PrincipalData = self.tx_sender.clone().into(); conn.begin(); @@ -581,7 +574,7 @@ impl ClarityInterpreter { } if contract_saved { - let mut analysis_db = AnalysisDatabase::new(&mut self.datastore); + let mut analysis_db = AnalysisDatabase::new(&mut self.clarity_datastore); analysis_db .execute(|db| db.insert_contract(&contract_id, &analysis)) .expect("Unable to save data"); @@ -606,9 +599,9 @@ impl ClarityInterpreter { ContractContext::new(contract_id.clone(), contract.clarity_version); let mut conn = ClarityDatabase::new( - &mut self.datastore, - &self.burn_datastore, - &self.burn_datastore, + &mut self.clarity_datastore, + &self.datastore, + &self.datastore, ); let tx_sender: PrincipalData = self.tx_sender.clone().into(); conn.begin(); @@ -821,7 +814,7 @@ impl ClarityInterpreter { } if contract_saved { - let mut analysis_db = AnalysisDatabase::new(&mut self.datastore); + let mut analysis_db = AnalysisDatabase::new(&mut self.clarity_datastore); analysis_db .execute(|db| db.insert_contract(&contract_id, &analysis)) .expect("Unable to save data"); @@ -842,9 +835,9 @@ impl ClarityInterpreter { eval_hooks: Option>, ) -> Result { let mut conn = ClarityDatabase::new( - &mut self.datastore, - &self.burn_datastore, - &self.burn_datastore, + &mut self.clarity_datastore, + &self.datastore, + &self.datastore, ); let tx_sender: PrincipalData = self.tx_sender.clone().into(); conn.begin(); @@ -1055,9 +1048,9 @@ impl ClarityInterpreter { ) -> Result { let final_balance = { let conn = ClarityDatabase::new( - &mut self.datastore, - &self.burn_datastore, - &self.burn_datastore, + &mut self.clarity_datastore, + &self.datastore, + &self.datastore, ); let mut global_context = GlobalContext::new( @@ -1098,8 +1091,12 @@ impl ClarityInterpreter { self.tx_sender.clone() } + pub fn set_current_epoch(&mut self, epoch: StacksEpochId) { + self.datastore.set_current_epoch(epoch); + } + pub fn advance_chain_tip(&mut self, count: u32) -> u32 { - let current_epoch = self.burn_datastore.get_current_epoch(); + let current_epoch = self.datastore.get_current_epoch(); if current_epoch < StacksEpochId::Epoch30 { self.advance_burn_chain_tip(count) } else { @@ -1111,26 +1108,31 @@ impl ClarityInterpreter { } pub fn advance_burn_chain_tip(&mut self, count: u32) -> u32 { - let new_height = self.burn_datastore.advance_chain_tip(count); - let _ = self.datastore.advance_chain_tip(count); + let new_height = self + .datastore + .advance_burn_chain_tip(&mut self.clarity_datastore, count); + // let _ = self.datastore.advance_stacks_chain_tip(count); self.set_tenure_height(); new_height } + pub fn advance_stacks_chain_tip(&mut self, count: u32) -> Result { - let current_epoch = self.burn_datastore.get_current_epoch(); + let current_epoch = self.datastore.get_current_epoch(); if current_epoch < StacksEpochId::Epoch30 { Err("only burn chain height can be advanced in epoch lower than 3.0".to_string()) } else { - Ok(self.datastore.advance_chain_tip(count)) + Ok(self + .datastore + .advance_stacks_chain_tip(&mut self.clarity_datastore, count)) } } pub fn set_tenure_height(&mut self) { let burn_block_height = self.get_burn_block_height(); let mut conn = ClarityDatabase::new( - &mut self.datastore, - &self.burn_datastore, - &self.burn_datastore, + &mut self.clarity_datastore, + &self.datastore, + &self.datastore, ); conn.begin(); conn.put_data("_stx-data::tenure_height", &burn_block_height) @@ -1139,11 +1141,11 @@ impl ClarityInterpreter { } pub fn get_block_height(&mut self) -> u32 { - self.datastore.get_current_block_height() + self.datastore.get_current_stacks_block_height() } pub fn get_burn_block_height(&mut self) -> u32 { - self.burn_datastore.get_current_block_height() + self.datastore.get_current_burn_block_height() } fn credit_token(&mut self, account: String, token: String, value: u128) { @@ -1289,7 +1291,7 @@ mod tests { let mut interpreter = ClarityInterpreter::new(StandardPrincipalData::transient(), Settings::default()); interpreter - .burn_datastore + .datastore .set_current_epoch(StacksEpochId::Epoch2_05); let count = 5; let initial_block_height = interpreter.get_burn_block_height(); @@ -1297,6 +1299,7 @@ mod tests { assert_eq!(interpreter.get_burn_block_height(), initial_block_height); assert_eq!(interpreter.get_block_height(), initial_block_height); } + #[test] fn test_advance_stacks_chain_tip() { let wasm_settings = Settings { @@ -1307,20 +1310,24 @@ mod tests { let mut interpreter = ClarityInterpreter::new(StandardPrincipalData::transient(), wasm_settings); interpreter - .burn_datastore + .datastore .set_current_epoch(StacksEpochId::Epoch30); let count = 5; let initial_block_height = interpreter.get_burn_block_height(); - assert_eq!(interpreter.advance_stacks_chain_tip(count), Ok(count)); + + let result = interpreter.advance_stacks_chain_tip(count); + assert_eq!(result, Ok(count)); + assert_eq!(interpreter.get_burn_block_height(), initial_block_height); assert_eq!(interpreter.get_block_height(), initial_block_height + count); } + #[test] fn test_advance_chain_tip_pre_epoch3() { let mut interpreter = ClarityInterpreter::new(StandardPrincipalData::transient(), Settings::default()); interpreter - .burn_datastore + .datastore .set_current_epoch(StacksEpochId::Epoch2_05); let count = 5; let initial_block_height = interpreter.get_block_height(); @@ -1331,12 +1338,13 @@ mod tests { initial_block_height + count ); } + #[test] fn test_advance_chain_tip() { let mut interpreter = ClarityInterpreter::new(StandardPrincipalData::transient(), Settings::default()); interpreter - .burn_datastore + .datastore .set_current_epoch(StacksEpochId::Epoch30); let count = 5; let initial_block_height = interpreter.get_block_height(); diff --git a/components/clarity-repl/src/repl/session.rs b/components/clarity-repl/src/repl/session.rs index affbafd8e..5a1fb3952 100644 --- a/components/clarity-repl/src/repl/session.rs +++ b/components/clarity-repl/src/repl/session.rs @@ -392,7 +392,7 @@ impl Session { } } EvaluationResult::Snippet(snippet_result) => { - output.push(format!("{}", snippet_result.result).green().to_string()) + output.push(value_to_string(&snippet_result.result).green().to_string()) } } Ok((output, result)) @@ -826,11 +826,11 @@ impl Session { output.join("\n") } - fn parse_and_advance_stacks_chain_tip(&mut self, command: &str) -> String { + fn parse_and_advance_chain_tip(&mut self, command: &str) -> String { let args: Vec<_> = command.split(' ').collect(); if args.len() != 2 { - return format!("{}", "Usage: ::advance_stacks_chain_tip ".red()); + return format!("{}", "Usage: ::advance_chain_tip ".red()); } let count = match args[1].parse::() { @@ -840,22 +840,17 @@ impl Session { } }; - match self.advance_stacks_chain_tip(count) { - Ok(new_height) => format!("{} blocks simulated, new height: {}", count, new_height) - .green() - .to_string(), - Err(_) => format!( - "{}", - "advance_stacks_chain_tip can't be called in epoch lower than 3.0".red() - ), - } + let new_height = self.advance_chain_tip(count); + format!("{} blocks simulated, new height: {}", count, new_height) + .green() + .to_string() } - fn parse_and_advance_chain_tip(&mut self, command: &str) -> String { + fn parse_and_advance_burn_chain_tip(&mut self, command: &str) -> String { let args: Vec<_> = command.split(' ').collect(); if args.len() != 2 { - return format!("{}", "Usage: ::advance_chain_tip ".red()); + return format!("{}", "Usage: ::advance_burn_chain_tip ".red()); } let count = match args[1].parse::() { @@ -865,16 +860,17 @@ impl Session { } }; - let new_height = self.advance_chain_tip(count); + let new_height = self.advance_burn_chain_tip(count); format!("{} blocks simulated, new height: {}", count, new_height) .green() .to_string() } - fn parse_and_advance_burn_chain_tip(&mut self, command: &str) -> String { + + fn parse_and_advance_stacks_chain_tip(&mut self, command: &str) -> String { let args: Vec<_> = command.split(' ').collect(); if args.len() != 2 { - return format!("{}", "Usage: ::advance_burn_chain_tip ".red()); + return format!("{}", "Usage: ::advance_stacks_chain_tip ".red()); } let count = match args[1].parse::() { @@ -884,20 +880,27 @@ impl Session { } }; - let new_height = self.advance_burn_chain_tip(count); - format!("{} blocks simulated, new height: {}", count, new_height) - .green() - .to_string() + match self.advance_stacks_chain_tip(count) { + Ok(new_height) => format!("{} blocks simulated, new height: {}", count, new_height) + .green() + .to_string(), + Err(_) => format!( + "{}", + "advance_stacks_chain_tip can't be called in epoch lower than 3.0".red() + ), + } } - pub fn advance_stacks_chain_tip(&mut self, count: u32) -> Result { - self.interpreter.advance_stacks_chain_tip(count) + pub fn advance_chain_tip(&mut self, count: u32) -> u32 { + self.interpreter.advance_chain_tip(count) } + pub fn advance_burn_chain_tip(&mut self, count: u32) -> u32 { self.interpreter.advance_burn_chain_tip(count) } - pub fn advance_chain_tip(&mut self, count: u32) -> u32 { - self.interpreter.advance_chain_tip(count) + + pub fn advance_stacks_chain_tip(&mut self, count: u32) -> Result { + self.interpreter.advance_stacks_chain_tip(count) } fn parse_and_set_tx_sender(&mut self, command: &str) -> String { @@ -992,7 +995,7 @@ impl Session { pub fn update_epoch(&mut self, epoch: StacksEpochId) { self.current_epoch = epoch; - self.interpreter.burn_datastore.set_current_epoch(epoch); + self.interpreter.set_current_epoch(epoch); if epoch >= StacksEpochId::Epoch30 { self.interpreter.set_tenure_height(); } @@ -1438,6 +1441,7 @@ mod tests { let new_height = session.handle_command("::get_burn_block_height"); assert_eq!(new_height, "Current height: 1"); } + #[test] fn set_epoch_command() { let mut session = Session::new(SessionSettings::default()); @@ -1759,7 +1763,6 @@ mod tests { _ => panic!("Unexpected result"), }; - println!("{}", time_block_2 - time_block_1); assert!(time_block_2 - time_block_1 == 600); }