Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Improve performance of thetasketch dinstinct #1102

Merged
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

100 changes: 100 additions & 0 deletions common_types/src/datum.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,23 @@ pub enum Error {

pub type Result<T> = std::result::Result<T, Error>;

// Float wrapper over f32/f64. Just because we cannot build std::hash::Hash for
// floats directly we have to do it through type wrapper Fork from datafusion
struct Fl<T>(T);

macro_rules! hash_float_value {
($(($t:ty, $i:ty)),+) => {
$(impl std::hash::Hash for Fl<$t> {
#[inline]
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
state.write(&<$i>::from_ne_bytes(self.0.to_ne_bytes()).to_ne_bytes())
}
})+
};
}

hash_float_value!((f64, u64), (f32, u32));

// FIXME(yingwen): How to handle timezone?

/// Data type of datum
Expand Down Expand Up @@ -1138,6 +1155,37 @@ impl<'a> DatumView<'a> {
}
}
}

pub fn as_str(&self) -> Option<&str> {
match self {
DatumView::String(v) => Some(v),
_ => None,
}
}
}

impl<'a> std::hash::Hash for DatumView<'a> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
match self {
DatumView::Null => 1.hash(state),
ShiKaiWi marked this conversation as resolved.
Show resolved Hide resolved
DatumView::Timestamp(v) => v.hash(state),
DatumView::Double(v) => Fl(*v).hash(state),
DatumView::Float(v) => Fl(*v).hash(state),
DatumView::Varbinary(v) => v.hash(state),
DatumView::String(v) => v.hash(state),
DatumView::UInt64(v) => v.hash(state),
DatumView::UInt32(v) => v.hash(state),
DatumView::UInt16(v) => v.hash(state),
DatumView::UInt8(v) => v.hash(state),
DatumView::Int64(v) => v.hash(state),
DatumView::Int32(v) => v.hash(state),
DatumView::Int16(v) => v.hash(state),
DatumView::Int8(v) => v.hash(state),
DatumView::Boolean(v) => v.hash(state),
DatumView::Date(v) => v.hash(state),
DatumView::Time(v) => v.hash(state),
}
}
}

impl DatumKind {
Expand Down Expand Up @@ -1359,6 +1407,11 @@ impl From<DatumKind> for DataType {

#[cfg(test)]
mod tests {
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};

use super::*;

#[test]
Expand Down Expand Up @@ -1581,4 +1634,51 @@ mod tests {
}
}
}

fn get_hash<V: Hash>(v: &V) -> u64 {
let mut hasher = DefaultHasher::new();
v.hash(&mut hasher);
hasher.finish()
}

macro_rules! assert_datum_view_hash {
($v:expr, $Kind: ident) => {
let expected = get_hash(&DatumView::$Kind($v));
let actual = get_hash(&$v);
assert_eq!(expected, actual);
};
}

#[test]
fn test_hash() {
assert_datum_view_hash!(Timestamp::new(42), Timestamp);
assert_datum_view_hash!(42_i32, Date);
assert_datum_view_hash!(424_i64, Time);
assert_datum_view_hash!(b"abcde", Varbinary);
assert_datum_view_hash!("12345", String);
assert_datum_view_hash!(42424242_u64, UInt64);
assert_datum_view_hash!(424242_u32, UInt32);
assert_datum_view_hash!(4242_u16, UInt16);
assert_datum_view_hash!(42_u8, UInt8);
assert_datum_view_hash!(-42424242_i64, Int64);
assert_datum_view_hash!(-42424242_i32, Int32);
assert_datum_view_hash!(-4242_i16, Int16);
assert_datum_view_hash!(-42_i8, Int8);
assert_datum_view_hash!(true, Boolean);

// Null case.
let null_expected = get_hash(&1);
let null_actual = get_hash(&DatumView::Null);
assert_eq!(null_expected, null_actual);
ShiKaiWi marked this conversation as resolved.
Show resolved Hide resolved

// Float case.
let float_expected = get_hash(&Fl(42.0_f32));
let float_actual = get_hash(&DatumView::Float(42.0));
assert_eq!(float_expected, float_actual);

// Double case.
let double_expected = get_hash(&Fl(-42.0_f64));
let double_actual = get_hash(&DatumView::Double(-42.0));
assert_eq!(double_expected, double_actual);
}
}
2 changes: 1 addition & 1 deletion df_operator/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ chrono = { workspace = true }
common_types = { workspace = true }
datafusion = { workspace = true }
generic_error = { workspace = true }
hyperloglog = { git = "https://github.com/jedisct1/rust-hyperloglog.git", rev = "ed1b9b915072ba90c6b93fbfbba30c03215ba682", features = ["with_serde"] }
hyperloglog = { git = "https://github.com/jedisct1/rust-hyperloglog.git", rev = "425487ce910f26636fbde8c4d640b538431aad50", features = ["with_serde"] }
macros = { workspace = true }
smallvec = { workspace = true }
snafu = { workspace = true }
72 changes: 54 additions & 18 deletions df_operator/src/aggregate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
use std::{fmt, ops::Deref};

use arrow::array::ArrayRef as DfArrayRef;
use common_types::{column::ColumnBlock, datum::DatumView};
use datafusion::{
error::{DataFusionError, Result as DfResult},
physical_plan::Accumulator as DfAccumulator,
Expand All @@ -14,7 +15,7 @@ use generic_error::GenericError;
use macros::define_result;
use snafu::Snafu;

use crate::functions::{ScalarValue, ScalarValueRef};
use crate::functions::ScalarValue;

#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
Expand All @@ -28,6 +29,7 @@ pub enum Error {

define_result!(Error);

// TODO: Use `Datum` rather than `ScalarValue`.
pub struct State(Vec<DfScalarValue>);

impl State {
Expand All @@ -43,11 +45,11 @@ impl From<ScalarValue> for State {
}
}

pub struct Input<'a>(&'a [DfScalarValue]);
pub struct Input<'a>(&'a [DatumView<'a>]);

impl<'a> Input<'a> {
pub fn iter(&self) -> impl Iterator<Item = ScalarValueRef> {
self.0.iter().map(ScalarValueRef::from)
pub fn iter(&self) -> impl Iterator<Item = &DatumView<'a>> {
self.0.iter()
}

pub fn len(&self) -> usize {
Expand All @@ -58,8 +60,8 @@ impl<'a> Input<'a> {
self.0.is_empty()
}

pub fn value(&self, index: usize) -> ScalarValueRef {
ScalarValueRef::from(&self.0[index])
pub fn value(&self, index: usize) -> &DatumView<'a> {
self.0.get(index).unwrap()
}
}

Expand All @@ -85,6 +87,7 @@ pub trait Accumulator: Send + Sync + fmt::Debug {
/// Returns the state of the accumulator at the end of the accumulation.
// in the case of an average on which we track `sum` and `n`, this function
// should return a vector of two values, sum and n.
// TODO: Use `Datum` rather than `ScalarValue`.
fn state(&self) -> Result<State>;

/// updates the accumulator's state from a vector of scalars.
Expand All @@ -94,6 +97,7 @@ pub trait Accumulator: Send + Sync + fmt::Debug {
fn merge(&mut self, states: StateRef) -> Result<()>;

/// returns its value based on its current state.
// TODO: Use `Datum` rather than `ScalarValue`.
fn evaluate(&self) -> Result<ScalarValue>;
}

Expand All @@ -120,12 +124,28 @@ impl<T: Accumulator> DfAccumulator for ToDfAccumulator<T> {
if values.is_empty() {
return Ok(());
};
(0..values[0].len()).try_for_each(|index| {
let v = values
.iter()
.map(|array| DfScalarValue::try_from_array(array, index))
.collect::<DfResult<Vec<DfScalarValue>>>()?;
let input = Input(&v);

let column_blocks = values
.iter()
.map(|array| {
ColumnBlock::try_cast_arrow_array_ref(array).map_err(|e| {
DataFusionError::Execution(format!(
"Accumulator failed to cast arrow array to column block, column, err:{e}"
))
})
})
.collect::<DfResult<Vec<_>>>()?;

let mut row = Vec::with_capacity(column_blocks.len());
let num_rows = column_blocks[0].num_rows();
(0..num_rows).try_for_each(|index| {
row.clear();

for column_block in &column_blocks {
let datum_view = column_block.datum_view(index);
row.push(datum_view);
}
let input = Input(&row);
ShiKaiWi marked this conversation as resolved.
Show resolved Hide resolved

self.accumulator.update(input).map_err(|e| {
DataFusionError::Execution(format!("Accumulator failed to update, err:{e}"))
Expand All @@ -137,12 +157,28 @@ impl<T: Accumulator> DfAccumulator for ToDfAccumulator<T> {
if states.is_empty() {
return Ok(());
};
(0..states[0].len()).try_for_each(|index| {
let v = states
.iter()
.map(|array| DfScalarValue::try_from_array(array, index))
.collect::<DfResult<Vec<DfScalarValue>>>()?;
let state_ref = StateRef(Input(&v));

let column_blocks = states
.iter()
.map(|array| {
ColumnBlock::try_cast_arrow_array_ref(array).map_err(|e| {
DataFusionError::Execution(format!(
"Accumulator failed to cast arrow array to column block, column, err:{e}"
))
})
})
.collect::<DfResult<Vec<_>>>()?;

let mut row = Vec::with_capacity(column_blocks.len());
let num_rows = column_blocks[0].num_rows();
(0..num_rows).try_for_each(|index| {
row.clear();

for column_block in &column_blocks {
let datum_view = column_block.datum_view(index);
row.push(datum_view);
}
let state_ref = StateRef(Input(&row));

self.accumulator.merge(state_ref).map_err(|e| {
DataFusionError::Execution(format!("Accumulator failed to merge, err:{e}"))
Expand Down
6 changes: 4 additions & 2 deletions df_operator/src/udfs/thetasketch_distinct.rs
Original file line number Diff line number Diff line change
Expand Up @@ -103,13 +103,14 @@ struct HllDistinct {
hll: HyperLogLog,
}

// TODO(yingwen): Avoid base64 encode/decode if datafusion supports converting
// binary datatype to scalarvalue.
// TODO: maybe we can remove base64 encoding?
impl HllDistinct {
fn merge_impl(&mut self, states: StateRef) -> Result<()> {
// The states are serialize from hll.
ensure!(states.len() == 1, InvalidStateLen);
let value_ref = states.value(0);
// Try to deserialize the hll.
let hll_string = value_ref.as_str().context(StateNotString)?;
let hll_bytes = base64::decode(hll_string).context(DecodeBase64)?;
// Try to deserialize the hll.
Expand All @@ -132,6 +133,7 @@ impl fmt::Debug for HllDistinct {
}

impl Accumulator for HllDistinct {
// TODO: maybe we can remove base64 encoding?
fn state(&self) -> aggregate::Result<State> {
// Serialize `self.hll` to bytes.
let buf = bincode::serialize(&self.hll).box_err().context(GetState)?;
Expand All @@ -145,7 +147,7 @@ impl Accumulator for HllDistinct {
fn update(&mut self, values: Input) -> aggregate::Result<()> {
for value_ref in values.iter() {
// Insert value into hll.
self.hll.insert(&value_ref);
self.hll.insert(value_ref);
}

Ok(())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -423,7 +423,7 @@ affected_rows: 400
SELECT thetasketch_distinct(`value`) FROM `02_function_thetasketch_distinct_table`;

thetasketch_distinct(02_function_thetasketch_distinct_table.value),
UInt64(147),
UInt64(148),


SELECT
Expand All @@ -439,7 +439,7 @@ ORDER BY
`arch` DESC;

arch,thetasketch_distinct(02_function_thetasketch_distinct_table.value),
String("x86"),UInt64(115),
String("x86"),UInt64(113),
String("arm"),UInt64(117),


Expand Down
Loading