Skip to content

QCon SF Final Workshop Code #2

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 24 additions & 3 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 8 additions & 0 deletions benchmarking/benches/benchmarks/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ use kcache::{KCache, SharableKCache};
use lru_cache::LruCache;
use multi_thread_cache_test::benchmark_cache_multi_threaded;
use pprof::criterion::{Output, PProfProfiler};
use sieve_cache::ConcurrentSieveCache;
use single_thread_cache_test::benchmark_cache_single_threaded;

mod kcache;
Expand Down Expand Up @@ -70,6 +71,13 @@ fn multi_threaded_comparison(c: &mut Criterion) {
thread_count,
LruCache::new(),
);

benchmark_cache_multi_threaded(
BenchmarkId::new("concurrent_sieve", thread_count),
&mut multi_thread_benchmark_group,
thread_count,
ConcurrentSieveCache::new(),
);
}
}

Expand Down
30 changes: 30 additions & 0 deletions intro/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,40 @@
use clap::Parser;

#[derive(Debug, Parser)]
struct Args {
/// This is the input.
input: String,

/// Shout out the input.
#[arg(long)]
loud: bool,
}

fn main() {
// This is a convenient way to set up the normal Rust ecosystem logging.
env_logger::init();

// Parse your arguments.
let args = Args::parse();

// Handle your argument - numbers versus strings.
let input = args.input;
let result = input.parse::<i64>();

match result {
Ok(value) => {
println!("{}", value * 2);
}
Err(_) => {
let input = if args.loud {
input.to_uppercase()
} else {
input
};

println!("You said \"{}\"", input);
}
}

// Print the result.
}
1 change: 1 addition & 0 deletions sieve_cache/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,4 @@ bench = false

[dependencies]
cache = { workspace = true }
dashmap = "6.1.0"
113 changes: 113 additions & 0 deletions sieve_cache/src/concurrent_sieve_cache.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
use std::hash::Hash;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Mutex;

use cache::ShareableCache;
use dashmap::DashMap;

struct Entry<K, V> {
data: Mutex<Option<(K, V)>>,
visited: AtomicBool,
}

pub struct ConcurrentSieveCache<K, V> {
data: Vec<Entry<K, V>>,
map: dashmap::DashMap<K, usize>,
hand: Mutex<usize>,
}

impl<K, V> ConcurrentSieveCache<K, V>
where
K: Send + Sync + Eq + Hash + Clone,
V: Send + Sync + Clone,
{
pub fn new() -> Self {
let mut data = Vec::with_capacity(cache::MAX_SIZE);
for _ in 0..cache::MAX_SIZE {
data.push(Entry {
data: Mutex::new(None),
visited: AtomicBool::new(false),
});
}

Self {
data,
map: DashMap::new(),
hand: Mutex::new(0),
}
}
}

impl<K, V> ConcurrentSieveCache<K, V>
where
K: Send + Sync + Eq + Hash + Clone,
V: Send + Sync + Clone,
{
fn evict(&self) -> usize {
let mut hand = self.hand.lock().unwrap();

loop {
let current = *hand;
*hand += 1;
if *hand >= self.data.len() {
*hand = 0;
}

let entry = &self.data[current];
let visited = entry.visited.swap(false, Ordering::Relaxed);

if visited {
continue;
}

return current;
}
}
}

impl<K, V> ShareableCache<K, V> for ConcurrentSieveCache<K, V>
where
K: Send + Sync + Eq + Hash + Clone,
V: Send + Sync + Clone,
{
fn get(&self, key: &K) -> Option<V> {
let index = *self.map.get(key)?;
let entry = &self.data[index];

entry.visited.store(true, Ordering::Relaxed);
let lock = entry.data.lock().expect("mutex was poisoned");
let (_, value) = lock.as_ref()?;

Some(value.clone())
}

fn set(&self, key: K, value: V) {
let index = self.evict();
let entry = &self.data[index];

let mut data = entry.data.lock().unwrap();

if let Some((key, _)) = data.take() {
self.map.remove(&key);
}

let prev = self.map.insert(key.clone(), index);
*data = Some((key, value));

drop(data);

if let Some(prev) = prev {
*self.data[prev].data.lock().unwrap() = None;
}
}
}

impl<K, V> Default for ConcurrentSieveCache<K, V>
where
K: Send + Sync + Eq + Hash + Clone,
V: Send + Sync + Clone,
{
fn default() -> Self {
Self::new()
}
}
2 changes: 2 additions & 0 deletions sieve_cache/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
mod concurrent_sieve_cache;
mod sieve_cache;

pub use concurrent_sieve_cache::ConcurrentSieveCache;
pub use sieve_cache::SieveCache;
78 changes: 63 additions & 15 deletions sieve_cache/src/sieve_cache.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,59 @@
use std::collections::HashMap;
use std::hash::Hash;

use cache::SizeLimitedCache;

struct Entry<Key, Value> {
data: Option<(Key, Value)>,
visited: bool,
}

pub struct SieveCache<Key, Value> {
/// This is a placeholder to allow the code to compile in a work-in-progress state.
/// You'll remove this field when you choose a data structure to hold the raw cache
/// values.
_phantom: std::marker::PhantomData<(Key, Value)>,
map: HashMap<Key, usize>,
data: Vec<Entry<Key, Value>>,
hand: usize,
}

impl<Key, Value> SieveCache<Key, Value> {
pub fn new() -> Self {
let mut data = Vec::with_capacity(cache::MAX_SIZE);
for _ in 0..cache::MAX_SIZE {
data.push(Entry {
data: None,
visited: false,
});
}

Self {
_phantom: std::marker::PhantomData,
data,
map: HashMap::default(),
hand: 0,
}
}
}

impl<Key, Value> SieveCache<Key, Value>
where
Key: Eq + Hash,
{
fn evict(&mut self) -> usize {
loop {
let current = self.hand;
self.hand += 1;
if self.hand >= self.data.len() {
self.hand = 0;
}

let entry = &mut self.data[current];
if std::mem::replace(&mut entry.visited, false) {
continue;
}

if let Some((key, _)) = entry.data.take() {
self.map.remove(&key);
}

return current;
}
}
}
Expand All @@ -22,21 +65,26 @@ where
Value: Clone,
{
fn get(&mut self, key: &Key) -> Option<Value> {
// These silence unused variable warnings. Delete them before you
// implement this method.
let _ = key;
let index = *self.map.get(key)?;
let entry = &mut self.data[index];

// todo!()
None
entry.visited = true;
let (_, value) = entry.data.as_ref()?;

Some(value.clone())
}

fn set(&mut self, key: Key, value: Value) {
// These silence unused variable warnings. Delete them before you
// implement this method.
let _ = key;
let _ = value;
if let Some(index) = self.map.get(&key) {
let entry = &mut self.data[*index];
entry.data = Some((key, value));
return;
}

// todo!()
let index = self.evict();
let entry = &mut self.data[index];
entry.data = Some((key.clone(), value));
self.map.insert(key, index);
}
}

Expand Down
2 changes: 1 addition & 1 deletion sieve_cache/tests/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ fn evict_skips_read_values() {
#[test]
fn evict_only_evicts_necessary_entries() {
let mut cache = SieveCache::new();

// This should fill up the cache to the maximum size.
for i in 0..cache::MAX_SIZE {
cache.set(i, i);
Expand Down