Skip to content

Commit

Permalink
chore: upgrade nightly rust to 1.72
Browse files Browse the repository at this point in the history
  • Loading branch information
ShiKaiWi committed Aug 29, 2023
1 parent ee54c5c commit 5dca0d4
Show file tree
Hide file tree
Showing 45 changed files with 93 additions and 118 deletions.
4 changes: 2 additions & 2 deletions analytic_engine/src/compaction/picker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -543,7 +543,7 @@ impl TimeWindowPicker {

let (left, _) = Self::get_window_bounds_in_millis(window, ts);

let bucket_files = buckets.entry(left).or_insert_with(Vec::new);
let bucket_files = buckets.entry(left).or_default();

bucket_files.push(f.clone());

Expand Down Expand Up @@ -813,7 +813,7 @@ mod tests {

#[test]
fn test_time_window_picker() {
let picker_manager = PickerManager::default();
let picker_manager = PickerManager;
let twp = picker_manager.get_picker(CompactionStrategy::Default);
let mut ctx = PickerContext {
segment_duration: Duration::from_millis(1000),
Expand Down
2 changes: 1 addition & 1 deletion analytic_engine/src/compaction/scheduler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -311,7 +311,7 @@ impl SchedulerImpl {
space_store,
runtime: runtime.clone(),
schedule_interval: config.schedule_interval.0,
picker_manager: PickerManager::default(),
picker_manager: PickerManager,
max_ongoing_tasks: config.max_ongoing_tasks,
max_unflushed_duration: config.max_unflushed_duration.0,
write_sst_max_buffer_size,
Expand Down
6 changes: 0 additions & 6 deletions analytic_engine/src/instance/engine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -407,12 +407,6 @@ impl Instance {
let shard_id = request.shard_id;
let mut table_ctxs = Vec::with_capacity(request.table_defs.len());

// Open tables.
struct TableInfo {
name: String,
id: TableId,
}

let mut spaces_of_tables = Vec::with_capacity(request.table_defs.len());
for table_def in request.table_defs {
let context = SpaceContext {
Expand Down
4 changes: 2 additions & 2 deletions analytic_engine/src/instance/wal_replayer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ impl TableBasedReplay {
loop {
// fetch entries to log_entry_buf
let _timer = PULL_LOGS_DURATION_HISTOGRAM.start_timer();
let decoder = WalDecoder::default();
let decoder = WalDecoder;
log_entry_buf = log_iter
.next_log_entries(decoder, log_entry_buf)
.await
Expand Down Expand Up @@ -309,7 +309,7 @@ impl RegionBasedReplay {
// Split and replay logs.
loop {
let _timer = PULL_LOGS_DURATION_HISTOGRAM.start_timer();
let decoder = WalDecoder::default();
let decoder = WalDecoder;
log_entry_buf = log_iter
.next_log_entries(decoder, log_entry_buf)
.await
Expand Down
4 changes: 2 additions & 2 deletions analytic_engine/src/manifest/details.rs
Original file line number Diff line number Diff line change
Expand Up @@ -595,8 +595,8 @@ struct ObjectStoreBasedSnapshotStore {
}

impl ObjectStoreBasedSnapshotStore {
const CURRENT_SNAPSHOT_NAME: &str = "current";
const SNAPSHOT_PATH_PREFIX: &str = "manifest/snapshot";
const CURRENT_SNAPSHOT_NAME: &'static str = "current";
const SNAPSHOT_PATH_PREFIX: &'static str = "manifest/snapshot";

pub fn new(space_id: SpaceId, table_id: TableId, store: ObjectStoreRef) -> Self {
let snapshot_path = Self::snapshot_path(space_id, table_id);
Expand Down
2 changes: 1 addition & 1 deletion analytic_engine/src/setup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -429,7 +429,7 @@ async fn open_instance(
manifest_storages,
wal_manager,
store_picker,
Arc::new(FactoryImpl::default()),
Arc::new(FactoryImpl),
)
.await
.context(OpenInstance)?;
Expand Down
2 changes: 1 addition & 1 deletion analytic_engine/src/sst/file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -369,7 +369,7 @@ struct FileHandleSet {

impl FileHandleSet {
fn latest(&self) -> Option<FileHandle> {
if let Some(file) = self.file_map.values().rev().next() {
if let Some(file) = self.file_map.values().next_back() {
return Some(file.clone());
}
None
Expand Down
3 changes: 1 addition & 2 deletions analytic_engine/src/sst/parquet/async_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -778,8 +778,7 @@ mod tests {

impl MockRandomSenders {
fn start_to_send(&mut self) {
while !self.tx_group.is_empty() {
let tx = self.tx_group.pop().unwrap();
while let Some(tx) = self.tx_group.pop() {
let test_data = self.test_datas.pop().unwrap();
tokio::spawn(async move {
for datum in test_data {
Expand Down
16 changes: 8 additions & 8 deletions analytic_engine/src/table/version.rs
Original file line number Diff line number Diff line change
Expand Up @@ -931,9 +931,9 @@ mod tests {

#[test]
fn test_table_version_sampling() {
let memtable = MemTableMocker::default().build();
let memtable = MemTableMocker.build();
test_table_version_sampling_with_memtable(memtable);
let memtable = MemTableMocker::default().build_columnar();
let memtable = MemTableMocker.build_columnar();
test_table_version_sampling_with_memtable(memtable);
}

Expand Down Expand Up @@ -975,9 +975,9 @@ mod tests {

#[test]
fn test_table_version_sampling_switch() {
let memtable = MemTableMocker::default().build();
let memtable = MemTableMocker.build();
test_table_version_sampling_switch_with_memtable(memtable);
let memtable = MemTableMocker::default().build_columnar();
let memtable = MemTableMocker.build_columnar();
test_table_version_sampling_switch_with_memtable(memtable);
}

Expand Down Expand Up @@ -1027,7 +1027,7 @@ mod tests {
fn test_table_version_sampling_freeze() {
let version = new_table_version();

let memtable = MemTableMocker::default().build();
let memtable = MemTableMocker.build();
let schema = memtable.schema().clone();

let memtable_id1 = 1;
Expand Down Expand Up @@ -1063,7 +1063,7 @@ mod tests {
assert_eq!(memtable_id1, read_view.sampling_mem.as_ref().unwrap().id);
assert!(read_view.sampling_mem.unwrap().freezed);

let memtable = MemTableMocker::default().build();
let memtable = MemTableMocker.build();
let memtable_id2 = 2;
let mem_state = MemTableState {
mem: memtable,
Expand Down Expand Up @@ -1110,7 +1110,7 @@ mod tests {
fn test_table_version_sampling_apply_edit() {
let version = new_table_version();

let memtable = MemTableMocker::default().build();
let memtable = MemTableMocker.build();

let memtable_id1 = 1;
let sampling_mem = SamplingMemTable::new(memtable, memtable_id1);
Expand All @@ -1124,7 +1124,7 @@ mod tests {
TimeRange::bucket_of(now, table_options::DEFAULT_SEGMENT_DURATION).unwrap();

// Prepare mutable memtable.
let memtable = MemTableMocker::default().build();
let memtable = MemTableMocker.build();
let memtable_id2 = 2;
let mem_state = MemTableState {
mem: memtable,
Expand Down
2 changes: 1 addition & 1 deletion analytic_engine/src/table_options.rs
Original file line number Diff line number Diff line change
Expand Up @@ -427,7 +427,7 @@ impl TableOptions {
SEGMENT_DURATION.to_string(),
self.segment_duration
.map(|v| v.to_string())
.unwrap_or_else(String::new),
.unwrap_or_default(),
),
(UPDATE_MODE.to_string(), self.update_mode.to_string()),
(ENABLE_TTL.to_string(), self.enable_ttl.to_string()),
Expand Down
2 changes: 1 addition & 1 deletion analytic_engine/src/tests/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -617,7 +617,7 @@ impl EngineBuildContext for RocksDBEngineBuildContext {
type WalsOpener = RocksDBWalsOpener;

fn wals_opener(&self) -> Self::WalsOpener {
RocksDBWalsOpener::default()
RocksDBWalsOpener
}

fn config(&self) -> Config {
Expand Down
2 changes: 1 addition & 1 deletion benchmarks/src/merge_memtable_bench.rs
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ impl MergeMemTableBench {
let table_id = self.table_id;
let sequence = u64::MAX;
let projected_schema = self.projected_schema.clone();
let sst_factory: SstFactoryRef = Arc::new(FactoryImpl::default());
let sst_factory: SstFactoryRef = Arc::new(FactoryImpl);
let iter_options = IterOptions {
batch_size: self.sst_read_options.num_rows_per_row_group,
};
Expand Down
4 changes: 2 additions & 2 deletions benchmarks/src/merge_sst_bench.rs
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ impl MergeSstBench {
let table_id = self.table_id;
let sequence = u64::MAX;
let projected_schema = self.sst_read_options.projected_schema.clone();
let sst_factory: SstFactoryRef = Arc::new(FactoryImpl::default());
let sst_factory: SstFactoryRef = Arc::new(FactoryImpl);
let iter_options = IterOptions {
batch_size: self.sst_read_options.num_rows_per_row_group,
};
Expand Down Expand Up @@ -189,7 +189,7 @@ impl MergeSstBench {
let space_id = self.space_id;
let table_id = self.table_id;
let projected_schema = self.sst_read_options.projected_schema.clone();
let sst_factory: SstFactoryRef = Arc::new(FactoryImpl::default());
let sst_factory: SstFactoryRef = Arc::new(FactoryImpl);

let request_id = RequestId::next_id();
let store_picker: ObjectStorePickerRef = Arc::new(self.store.clone());
Expand Down
2 changes: 1 addition & 1 deletion benchmarks/src/sst_tools.rs
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ pub async fn merge_sst(config: MergeSstConfig, runtime: Arc<Runtime>) {
};

let request_id = RequestId::next_id();
let sst_factory: SstFactoryRef = Arc::new(FactoryImpl::default());
let sst_factory: SstFactoryRef = Arc::new(FactoryImpl);
let store_picker: ObjectStorePickerRef = Arc::new(store);
let projected_schema = ProjectedSchema::no_projection(schema.clone());
let sst_read_options = SstReadOptions {
Expand Down
2 changes: 1 addition & 1 deletion cluster/src/shard_lock_manager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -524,7 +524,7 @@ impl ShardLock {
lease_id: i64,
expired_at: Instant,
on_lock_expired: OnExpired,
etcd_client: &mut Client,
etcd_client: &Client,
runtime: &RuntimeRef,
) -> Result<()>
where
Expand Down
2 changes: 1 addition & 1 deletion cluster/src/topology.rs
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ impl SchemaTopologies {

self.topologies
.entry(schema_name.to_string())
.or_insert_with(Default::default)
.or_default()
.update_tables(tables);

true
Expand Down
8 changes: 6 additions & 2 deletions common_types/src/bitset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -298,7 +298,11 @@ mod tests {
}

fn iter_set_bools(bools: &[bool]) -> impl Iterator<Item = usize> + '_ {
bools.iter().enumerate().filter_map(|(x, y)| y.then(|| x))
bools
.iter()
.enumerate()
.filter(|&(_, y)| *y)
.map(|(x, _)| x)
}

#[test]
Expand Down Expand Up @@ -347,7 +351,7 @@ mod tests {
}

fn make_rng() -> StdRng {
let seed = OsRng::default().next_u64();
let seed = OsRng.next_u64();
println!("Seed: {seed}");
StdRng::seed_from_u64(seed)
}
Expand Down
2 changes: 1 addition & 1 deletion common_types/src/column_block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1033,7 +1033,7 @@ macro_rules! define_column_block_builder {

/// Append the [DatumView] into the builder, the datum view should have same the data
/// type of builder
pub fn append_view<'a>(&mut self, datum: DatumView<'a>) -> Result<()> {
pub fn append_view(&mut self, datum: DatumView<'_>) -> Result<()> {
let given = datum.kind();
match self {
Self::Null { rows } => match datum {
Expand Down
4 changes: 2 additions & 2 deletions common_types/src/column_schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -602,7 +602,7 @@ mod tests {

#[test]
fn test_valid_tag_type() {
let invalid_tag_types = vec![DatumKind::Null, DatumKind::Float, DatumKind::Double];
let invalid_tag_types = [DatumKind::Null, DatumKind::Float, DatumKind::Double];

for v in &DatumKind::VALUES {
assert_eq!(
Expand All @@ -614,7 +614,7 @@ mod tests {

#[test]
fn test_valid_dictionary_type() {
let valid_dictionary_types = vec![DatumKind::String];
let valid_dictionary_types = [DatumKind::String];

for v in &DatumKind::VALUES {
assert_eq!(
Expand Down
2 changes: 1 addition & 1 deletion components/codec/src/columnar/bool.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ impl Encoding {
}
}

fn decode<B, F>(&self, buf: &mut B, f: F) -> Result<()>
fn decode<B, F>(&self, buf: &B, f: F) -> Result<()>
where
B: Buf,
F: FnMut(bool) -> Result<()>,
Expand Down
2 changes: 1 addition & 1 deletion components/codec/src/columnar/bytes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ impl Encoding {
}

/// The layout can be referred to the docs of [`Encoding`].
fn decode<B, F>(&self, ctx: DecodeContext<'_>, buf: &mut B, f: F) -> Result<()>
fn decode<B, F>(&self, ctx: DecodeContext<'_>, buf: &B, f: F) -> Result<()>
where
B: Buf,
F: FnMut(Bytes) -> Result<()>,
Expand Down
4 changes: 2 additions & 2 deletions components/codec/src/columnar/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -392,7 +392,7 @@ impl ColumnarDecoder {
impl ColumnarDecoder {
fn decode_with_nulls<B: Buf>(
ctx: DecodeContext<'_>,
buf: &mut B,
buf: &B,
num_datums: usize,
datum_kind: DatumKind,
) -> Result<Vec<Datum>> {
Expand Down Expand Up @@ -570,7 +570,7 @@ mod tests {

#[test]
fn test_small_int() {
let datums = vec![10u32, 1u32, 2u32, 81u32, 82u32];
let datums = [10u32, 1u32, 2u32, 81u32, 82u32];

check_encode_end_decode(
10,
Expand Down
2 changes: 1 addition & 1 deletion components/future_ext/src/cancel.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ where
fn drop(&mut self) {
if !self.done {
let inner = self.inner.take().unwrap();
let handle = self.runtime.spawn(async move { inner.await });
let handle = self.runtime.spawn(inner);
drop(handle);
}
}
Expand Down
8 changes: 4 additions & 4 deletions components/object_store/src/disk_cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ struct Manifest {

impl Manifest {
const CURRENT_VERSION: usize = 2;
const FILE_NAME: &str = "manifest.json";
const FILE_NAME: &'static str = "manifest.json";

#[inline]
fn is_valid(&self, version: usize, page_size: usize) -> bool {
Expand Down Expand Up @@ -876,7 +876,7 @@ mod test {

// remove cached values, then get again
{
for range in vec![0..16, 16..32, 32..48, 48..64, 64..80, 80..96, 96..112] {
for range in [0..16, 16..32, 32..48, 48..64, 64..80, 80..96, 96..112] {
let data_cache = store
.inner
.cache
Expand All @@ -887,7 +887,7 @@ mod test {
assert!(test_file_exists(&store.cache_dir, &location, &range));
}

for range in vec![16..32, 48..64, 80..96] {
for range in [16..32, 48..64, 80..96] {
let mut data_cache = store
.inner
.cache
Expand Down Expand Up @@ -1105,7 +1105,7 @@ mod test {
.await
.unwrap()
};
for range in vec![16..32, 32..48, 48..64, 64..80, 80..96, 96..112] {
for range in [16..32, 32..48, 48..64, 64..80, 80..96, 96..112] {
let filename = DiskCacheStore::page_cache_name(&location, &range);
let cache = store.cache.meta_cache.lock(&filename);
assert!(cache.contains(&filename));
Expand Down
4 changes: 2 additions & 2 deletions components/object_store/src/obkv/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -639,7 +639,7 @@ impl<T: TableKv> ObjectStore for ObkvObjectStore<T> {
}));
}

let iter = futures::stream::iter(meta_list.into_iter());
let iter = futures::stream::iter(meta_list);
debug!(
"ObkvObjectStore list operation, prefix:{path}, cost:{:?}",
instant.elapsed()
Expand Down Expand Up @@ -682,7 +682,7 @@ impl<T: TableKv> ObjectStore for ObkvObjectStore<T> {
}
}

let common_prefixes = Vec::from_iter(common_prefixes.into_iter());
let common_prefixes = Vec::from_iter(common_prefixes);
debug!(
"ObkvObjectStore list_with_delimiter operation, prefix:{path}, cost:{:?}",
instant.elapsed()
Expand Down
2 changes: 1 addition & 1 deletion components/object_store/src/prefix.rs
Original file line number Diff line number Diff line change
Expand Up @@ -348,7 +348,7 @@ mod tests {

let _ = prefix_store.get(&test_filepath).await;
let _ = prefix_store.get_range(&test_filepath, 0..1).await;
let _ = prefix_store.get_ranges(&test_filepath, &[0..2]).await;
let _ = prefix_store.get_ranges(&test_filepath, &[0..2; 1]).await;

let meta = prefix_store.head(&test_filepath).await.unwrap();
assert!(!meta.location.as_ref().starts_with(test_prefix));
Expand Down
Loading

0 comments on commit 5dca0d4

Please sign in to comment.