Skip to content
This repository was archived by the owner on Nov 6, 2020. It is now read-only.

Commit 0eee5da

Browse files
debris5chdn
authored andcommitted
ethcore-io retries failed work steal (#9651)
* ethcore-io uses newer version of crossbeam && retries failed work steal * ethcore-io non-mio service uses newer crossbeam
1 parent 723cb33 commit 0eee5da

File tree

7 files changed

+29
-28
lines changed

7 files changed

+29
-28
lines changed

Cargo.lock

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

ethcore/src/engines/tendermint/mod.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -693,7 +693,6 @@ impl Engine<EthereumMachine> for Tendermint {
693693
}
694694

695695
fn stop(&self) {
696-
self.step_service.stop()
697696
}
698697

699698
fn is_proposal(&self, header: &Header) -> bool {

util/io/Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ authors = ["Parity Technologies <admin@parity.io>"]
99
[dependencies]
1010
fnv = "1.0"
1111
mio = { version = "0.6.8", optional = true }
12-
crossbeam = "0.3"
12+
crossbeam-deque = "0.6"
1313
parking_lot = "0.6"
1414
log = "0.4"
1515
slab = "0.4"

util/io/src/lib.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ extern crate mio;
7474
#[macro_use]
7575
extern crate log as rlog;
7676
extern crate slab;
77-
extern crate crossbeam;
77+
extern crate crossbeam_deque as deque;
7878
extern crate parking_lot;
7979
extern crate num_cpus;
8080
extern crate timer;

util/io/src/service_mio.rs

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ use std::collections::HashMap;
2020
use mio::*;
2121
use mio::timer::{Timeout};
2222
use mio::deprecated::{EventLoop, Handler, Sender, EventLoopBuilder};
23-
use crossbeam::sync::chase_lev;
23+
use deque;
2424
use slab::Slab;
2525
use {IoError, IoHandler};
2626
use worker::{Worker, Work, WorkType};
@@ -184,7 +184,7 @@ pub struct IoManager<Message> where Message: Send + Sync {
184184
timers: Arc<RwLock<HashMap<HandlerId, UserTimer>>>,
185185
handlers: Arc<RwLock<Slab<Arc<IoHandler<Message>>>>>,
186186
workers: Vec<Worker>,
187-
worker_channel: chase_lev::Worker<Work<Message>>,
187+
worker_channel: deque::Worker<Work<Message>>,
188188
work_ready: Arc<Condvar>,
189189
}
190190

@@ -194,7 +194,7 @@ impl<Message> IoManager<Message> where Message: Send + Sync + 'static {
194194
event_loop: &mut EventLoop<IoManager<Message>>,
195195
handlers: Arc<RwLock<Slab<Arc<IoHandler<Message>>>>>
196196
) -> Result<(), IoError> {
197-
let (worker, stealer) = chase_lev::deque();
197+
let (worker, stealer) = deque::fifo();
198198
let num_workers = 4;
199199
let work_ready_mutex = Arc::new(Mutex::new(()));
200200
let work_ready = Arc::new(Condvar::new());
@@ -430,7 +430,7 @@ impl<Message> IoChannel<Message> where Message: Send + Sync + 'static {
430430
/// General IO Service. Starts an event loop and dispatches IO requests.
431431
/// 'Message' is a notification message type
432432
pub struct IoService<Message> where Message: Send + Sync + 'static {
433-
thread: Mutex<Option<JoinHandle<()>>>,
433+
thread: Option<JoinHandle<()>>,
434434
host_channel: Mutex<Sender<IoMessage<Message>>>,
435435
handlers: Arc<RwLock<Slab<Arc<IoHandler<Message>>>>>,
436436
}
@@ -448,19 +448,19 @@ impl<Message> IoService<Message> where Message: Send + Sync + 'static {
448448
IoManager::<Message>::start(&mut event_loop, h).expect("Error starting IO service");
449449
});
450450
Ok(IoService {
451-
thread: Mutex::new(Some(thread)),
451+
thread: Some(thread),
452452
host_channel: Mutex::new(channel),
453453
handlers: handlers,
454454
})
455455
}
456456

457-
pub fn stop(&self) {
457+
pub fn stop(&mut self) {
458458
trace!(target: "shutdown", "[IoService] Closing...");
459459
// Clear handlers so that shared pointers are not stuck on stack
460460
// in Channel::send_sync
461461
self.handlers.write().clear();
462462
self.host_channel.lock().send(IoMessage::Shutdown).unwrap_or_else(|e| warn!("Error on IO service shutdown: {:?}", e));
463-
if let Some(thread) = self.thread.lock().take() {
463+
if let Some(thread) = self.thread.take() {
464464
thread.join().unwrap_or_else(|e| {
465465
debug!(target: "shutdown", "Error joining IO service event loop thread: {:?}", e);
466466
});

util/io/src/service_non_mio.rs

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616

1717
use std::sync::{Arc, Weak};
1818
use std::thread;
19-
use crossbeam::sync::chase_lev;
19+
use deque;
2020
use slab::Slab;
2121
use fnv::FnvHashMap;
2222
use {IoError, IoHandler};
@@ -198,7 +198,7 @@ struct Shared<Message> where Message: Send + Sync + 'static {
198198
// necessary.
199199
timers: Mutex<FnvHashMap<TimerToken, TimerGuard>>,
200200
// Channel used to send work to the worker threads.
201-
channel: Mutex<Option<chase_lev::Worker<WorkTask<Message>>>>,
201+
channel: Mutex<Option<deque::Worker<WorkTask<Message>>>>,
202202
}
203203

204204
// Messages used to communicate with the event loop from other threads.
@@ -224,7 +224,7 @@ impl<Message> Clone for WorkTask<Message> where Message: Send + Sized {
224224
impl<Message> IoService<Message> where Message: Send + Sync + 'static {
225225
/// Starts IO event loop
226226
pub fn start() -> Result<IoService<Message>, IoError> {
227-
let (tx, rx) = chase_lev::deque();
227+
let (tx, rx) = deque::fifo();
228228

229229
let shared = Arc::new(Shared {
230230
handlers: RwLock::new(Slab::with_capacity(MAX_HANDLERS)),
@@ -251,7 +251,7 @@ impl<Message> IoService<Message> where Message: Send + Sync + 'static {
251251
}
252252

253253
/// Stops the IO service.
254-
pub fn stop(&self) {
254+
pub fn stop(&mut self) {
255255
trace!(target: "shutdown", "[IoService] Closing...");
256256
// Clear handlers so that shared pointers are not stuck on stack
257257
// in Channel::send_sync
@@ -307,23 +307,23 @@ impl<Message> Drop for IoService<Message> where Message: Send + Sync {
307307
}
308308
}
309309

310-
fn do_work<Message>(shared: &Arc<Shared<Message>>, rx: chase_lev::Stealer<WorkTask<Message>>)
311-
where Message: Send + Sync + 'static
310+
fn do_work<Message>(shared: &Arc<Shared<Message>>, rx: deque::Stealer<WorkTask<Message>>)
311+
where Message: Send + Sync + 'static
312312
{
313313
loop {
314314
match rx.steal() {
315-
chase_lev::Steal::Abort => continue,
316-
chase_lev::Steal::Empty => thread::park(),
317-
chase_lev::Steal::Data(WorkTask::Shutdown) => break,
318-
chase_lev::Steal::Data(WorkTask::UserMessage(message)) => {
315+
deque::Steal::Retry => continue,
316+
deque::Steal::Empty => thread::park(),
317+
deque::Steal::Data(WorkTask::Shutdown) => break,
318+
deque::Steal::Data(WorkTask::UserMessage(message)) => {
319319
for id in 0 .. MAX_HANDLERS {
320320
if let Some(handler) = shared.handlers.read().get(id) {
321321
let ctxt = IoContext { handler: id, shared: shared.clone() };
322322
handler.message(&ctxt, &message);
323323
}
324324
}
325325
},
326-
chase_lev::Steal::Data(WorkTask::TimerTrigger { handler_id, token }) => {
326+
deque::Steal::Data(WorkTask::TimerTrigger { handler_id, token }) => {
327327
if let Some(handler) = shared.handlers.read().get(handler_id) {
328328
let ctxt = IoContext { handler: handler_id, shared: shared.clone() };
329329
handler.timeout(&ctxt, token);

util/io/src/worker.rs

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
use std::sync::Arc;
1818
use std::thread::{JoinHandle, self};
1919
use std::sync::atomic::{AtomicBool, Ordering as AtomicOrdering};
20-
use crossbeam::sync::chase_lev;
20+
use deque;
2121
use service_mio::{HandlerId, IoChannel, IoContext};
2222
use IoHandler;
2323
use LOCAL_STACK_SIZE;
@@ -53,7 +53,7 @@ pub struct Worker {
5353
impl Worker {
5454
/// Creates a new worker instance.
5555
pub fn new<Message>(index: usize,
56-
stealer: chase_lev::Stealer<Work<Message>>,
56+
stealer: deque::Stealer<Work<Message>>,
5757
channel: IoChannel<Message>,
5858
wait: Arc<Condvar>,
5959
wait_mutex: Arc<Mutex<()>>,
@@ -75,8 +75,9 @@ impl Worker {
7575
worker
7676
}
7777

78-
fn work_loop<Message>(stealer: chase_lev::Stealer<Work<Message>>,
79-
channel: IoChannel<Message>, wait: Arc<Condvar>,
78+
fn work_loop<Message>(stealer: deque::Stealer<Work<Message>>,
79+
channel: IoChannel<Message>,
80+
wait: Arc<Condvar>,
8081
wait_mutex: Arc<Mutex<()>>,
8182
deleting: Arc<AtomicBool>)
8283
where Message: Send + Sync + 'static {
@@ -91,8 +92,9 @@ impl Worker {
9192

9293
while !deleting.load(AtomicOrdering::Acquire) {
9394
match stealer.steal() {
94-
chase_lev::Steal::Data(work) => Worker::do_work(work, channel.clone()),
95-
_ => break,
95+
deque::Steal::Data(work) => Worker::do_work(work, channel.clone()),
96+
deque::Steal::Retry => {},
97+
deque::Steal::Empty => break,
9698
}
9799
}
98100
}

0 commit comments

Comments
 (0)