Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Support DataTypes of Date and Time #657

Merged
merged 41 commits into from
Feb 24, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
41 commits
Select commit Hold shift + click to select a range
3de2708
add date type
MichaelLeeHZ Feb 8, 2023
8c22612
support date
MichaelLeeHZ Feb 16, 2023
103a151
suppport date
MichaelLeeHZ Feb 16, 2023
08fa6e6
dev finish, ready to test
MichaelLeeHZ Feb 16, 2023
2970f21
impl time
MichaelLeeHZ Feb 20, 2023
c7269a4
Merge remote-tracking branch 'origin' into feature_support_date
MichaelLeeHZ Feb 20, 2023
4f9fe5b
proto dependency
MichaelLeeHZ Feb 20, 2023
a3b4022
import specify proto
MichaelLeeHZ Feb 20, 2023
8c73cbc
add some test
MichaelLeeHZ Feb 20, 2023
864cb72
remove blank lines
MichaelLeeHZ Feb 20, 2023
6e1e2f1
remove blank lines
MichaelLeeHZ Feb 20, 2023
dd24b8a
remove blank lines
MichaelLeeHZ Feb 20, 2023
34b1558
fmt
MichaelLeeHZ Feb 20, 2023
b3b8c69
add test case
MichaelLeeHZ Feb 20, 2023
f200583
fmt
MichaelLeeHZ Feb 20, 2023
147f321
addd test case
MichaelLeeHZ Feb 20, 2023
fd6cb82
fmt
MichaelLeeHZ Feb 20, 2023
36a1747
udpate ut
MichaelLeeHZ Feb 20, 2023
604996d
fix
MichaelLeeHZ Feb 20, 2023
886154f
fix
MichaelLeeHZ Feb 21, 2023
35553f8
Update analytic_engine/src/table/data.rs
MichaelLeeHZ Feb 21, 2023
cd9777d
fix
MichaelLeeHZ Feb 21, 2023
68fef30
Merge branch 'feature_support_date' of github.com:MachaelLee/ceresdb …
MichaelLeeHZ Feb 21, 2023
97355b4
add some comment
MichaelLeeHZ Feb 21, 2023
78a63a3
Update common_types/src/datum.rs
MichaelLeeHZ Feb 21, 2023
b5a1588
Update common_types/src/datum.rs
MichaelLeeHZ Feb 21, 2023
2ad1821
support negative time
MichaelLeeHZ Feb 21, 2023
5d517fd
Update common_types/src/datum.rs
MichaelLeeHZ Feb 23, 2023
e0582c2
add some test
MichaelLeeHZ Feb 23, 2023
a9b8cc7
Merge branch 'main' into feature_support_date
MichaelLeeHZ Feb 23, 2023
5a792f2
support time
MichaelLeeHZ Feb 23, 2023
b4fd78a
test
MichaelLeeHZ Feb 23, 2023
52029bc
Merge branch 'feature_support_date' of github.com:MachaelLee/ceresdb …
MichaelLeeHZ Feb 23, 2023
e7e9874
Merge remote-tracking branch 'origin/feature_support_date' into featu…
MichaelLeeHZ Feb 23, 2023
1d4f292
support mysql time
MichaelLeeHZ Feb 23, 2023
87120cd
format date time
MichaelLeeHZ Feb 23, 2023
b81e7a4
add some test
MichaelLeeHZ Feb 23, 2023
75515f6
Update common_types/src/datum.rs
MichaelLeeHZ Feb 23, 2023
264a199
Update common_types/src/datum.rs
MichaelLeeHZ Feb 23, 2023
b35f31a
add some test
MichaelLeeHZ Feb 23, 2023
265d2c1
Update common_types/src/datum.rs
MichaelLeeHZ Feb 23, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 14 additions & 14 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ zstd = { version = "0.12", default-features = false }

[workspace.dependencies.ceresdbproto]
git = "https://github.com/CeresDB/ceresdbproto.git"
rev = "4dbd2b36262affd83f8a2d4a99273e43aebcb067"
rev = "9244c64da637bc1cb30feacb9165a9414d9a53c5"

[dependencies]
analytic_engine = { workspace = true }
Expand Down
29 changes: 25 additions & 4 deletions analytic_engine/src/instance/flush_compaction.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1152,11 +1152,32 @@ mod tests {

#[test]
fn test_split_record_batch_with_time_ranges() {
let rows0 = vec![build_row(b"binary key", 20, 10.0, "string value")];
let rows1 = vec![build_row(b"binary key1", 120, 11.0, "string value 1")];
let rows0 = vec![build_row(
b"binary key",
20,
10.0,
"string value",
1000,
1_000_000,
)];
let rows1 = vec![build_row(
b"binary key1",
120,
11.0,
"string value 1",
1000,
1_000_000,
)];
let rows2 = vec![
build_row_opt(b"binary key2", 220, None, Some("string value 2")),
build_row_opt(b"binary key3", 250, Some(13.0), None),
build_row_opt(
b"binary key2",
220,
None,
Some("string value 2"),
Some(1000),
None,
),
build_row_opt(b"binary key3", 250, Some(13.0), None, None, Some(1_000_000)),
];

let rows = vec![rows0.clone(), rows1.clone(), rows2.clone()]
Expand Down
2 changes: 1 addition & 1 deletion analytic_engine/src/manifest/details.rs
Original file line number Diff line number Diff line change
Expand Up @@ -750,7 +750,7 @@ mod tests {
}
builder
.add_normal_column(
column_schema::Builder::new("field3".to_string(), DatumKind::String)
column_schema::Builder::new("field5".to_string(), DatumKind::String)
.build()
.expect("should succeed build column schema"),
)
Expand Down
70 changes: 49 additions & 21 deletions analytic_engine/src/memtable/skiplist/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -206,12 +206,12 @@ mod tests {
reverse: false,
},
vec![
build_row(b"a", 1, 10.0, "v1"),
build_row(b"b", 2, 10.0, "v2"),
build_row(b"c", 3, 10.0, "v3"),
build_row(b"d", 4, 10.0, "v4"),
build_row(b"e", 5, 10.0, "v5"),
build_row(b"f", 6, 10.0, "v6"),
build_row(b"a", 1, 10.0, "v1", 1000, 1_000_000),
build_row(b"b", 2, 10.0, "v2", 2000, 2_000_000),
build_row(b"c", 3, 10.0, "v3", 3000, 3_000_000),
build_row(b"d", 4, 10.0, "v4", 4000, 4_000_000),
build_row(b"e", 5, 10.0, "v5", 5000, 5_000_000),
build_row(b"f", 6, 10.0, "v6", 6000, 6_000_000),
],
),
(
Expand All @@ -225,10 +225,10 @@ mod tests {
reverse: false,
},
vec![
build_row(b"a", 1, 10.0, "v1"),
build_row(b"b", 2, 10.0, "v2"),
build_row(b"c", 3, 10.0, "v3"),
build_row(b"d", 4, 10.0, "v4"),
build_row(b"a", 1, 10.0, "v1", 1000, 1_000_000),
build_row(b"b", 2, 10.0, "v2", 2000, 2_000_000),
build_row(b"c", 3, 10.0, "v3", 3000, 3_000_000),
build_row(b"d", 4, 10.0, "v4", 4000, 4_000_000),
],
),
(
Expand All @@ -243,9 +243,9 @@ mod tests {
reverse: false,
},
vec![
build_row(b"a", 1, 10.0, "v1"),
build_row(b"b", 2, 10.0, "v2"),
build_row(b"c", 3, 10.0, "v3"),
build_row(b"a", 1, 10.0, "v1", 1000, 1_000_000),
build_row(b"b", 2, 10.0, "v2", 2000, 2_000_000),
build_row(b"c", 3, 10.0, "v3", 3000, 3_000_000),
],
),
];
Expand Down Expand Up @@ -303,17 +303,45 @@ mod tests {

let mut ctx = PutContext::new(IndexInWriterSchema::for_same_schema(schema.num_columns()));
let input = vec![
(KeySequence::new(1, 1), build_row(b"a", 1, 10.0, "v1")),
(KeySequence::new(1, 2), build_row(b"b", 2, 10.0, "v2")),
(
KeySequence::new(1, 1),
build_row(b"a", 1, 10.0, "v1", 1000, 1_000_000),
),
(
KeySequence::new(1, 2),
build_row(b"b", 2, 10.0, "v2", 2000, 2_000_000),
),
(
KeySequence::new(1, 3),
build_row(b"c", 3, 10.0, "primary_key same with next row"),
build_row(
b"c",
3,
10.0,
"primary_key same with next row",
3000,
3_000_000,
),
),
(
KeySequence::new(1, 4),
build_row(b"c", 3, 10.0, "v3", 3000, 3_000_000),
),
(
KeySequence::new(2, 1),
build_row(b"d", 4, 10.0, "v4", 4000, 4_000_000),
),
(
KeySequence::new(2, 1),
build_row(b"e", 5, 10.0, "v5", 5000, 5_000_000),
),
(
KeySequence::new(2, 3),
build_row(b"f", 6, 10.0, "v6", 6000, 6_000_000),
),
(
KeySequence::new(3, 4),
build_row(b"g", 7, 10.0, "v7", 7000, 7_000_000),
),
(KeySequence::new(1, 4), build_row(b"c", 3, 10.0, "v3")),
(KeySequence::new(2, 1), build_row(b"d", 4, 10.0, "v4")),
(KeySequence::new(2, 1), build_row(b"e", 5, 10.0, "v5")),
(KeySequence::new(2, 3), build_row(b"f", 6, 10.0, "v6")),
(KeySequence::new(3, 4), build_row(b"g", 7, 10.0, "v7")),
];

for (seq, row) in input {
Expand Down
30 changes: 24 additions & 6 deletions analytic_engine/src/row_iter/chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -334,10 +334,22 @@ mod tests {
async fn test_chain_multiple_streams() {
let testcases = vec![
// (sequence, rows)
(10, vec![build_row(b"key4", 1000000, 10.0, "v4")]),
(20, vec![build_row(b"key2", 1000000, 10.0, "v2")]),
(100, vec![build_row(b"key3", 1000000, 10.0, "v3")]),
(1, vec![build_row(b"key1", 1000000, 10.0, "v1")]),
(
10,
vec![build_row(b"key4", 1000000, 10.0, "v4", 1000, 1_000_000)],
),
(
20,
vec![build_row(b"key2", 1000000, 10.0, "v2", 2000, 2_000_000)],
),
(
100,
vec![build_row(b"key3", 1000000, 10.0, "v3", 3000, 3_000_000)],
),
(
1,
vec![build_row(b"key1", 1000000, 10.0, "v1", 4000, 4_000_000)],
),
];
run_and_check(testcases).await;
}
Expand All @@ -364,10 +376,16 @@ mod tests {
async fn test_chain_half_empty_streams() {
let testcases = vec![
// (sequence, rows)
(10, vec![build_row(b"key4", 1000000, 10.0, "v4")]),
(
10,
vec![build_row(b"key4", 1000000, 10.0, "v4", 1000, 1_000_000)],
),
(20, vec![]),
(100, vec![]),
(1, vec![build_row(b"key1", 1000000, 10.0, "v1")]),
(
1,
vec![build_row(b"key1", 1000000, 10.0, "v1", 1000, 1_000_000)],
),
];
run_and_check(testcases).await;
}
Expand Down
22 changes: 11 additions & 11 deletions analytic_engine/src/row_iter/dedup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -212,18 +212,18 @@ mod tests {
build_record_batch_with_key(
schema.clone(),
vec![
build_row(b"a", 1, 10.0, "v1"),
build_row(b"a", 1, 10.0, "v"),
build_row(b"a", 2, 10.0, "v2"),
build_row(b"a", 1, 10.0, "v1", 1000, 1_000_000),
build_row(b"a", 1, 10.0, "v", 1000, 1_000_000),
build_row(b"a", 2, 10.0, "v2", 2000, 2_000_000),
],
),
build_record_batch_with_key(
schema,
vec![
build_row(b"a", 2, 10.0, "v"),
build_row(b"a", 3, 10.0, "v3"),
build_row(b"a", 3, 10.0, "v"),
build_row(b"a", 4, 10.0, "v4"),
build_row(b"a", 2, 10.0, "v", 2000, 2_000_000),
build_row(b"a", 3, 10.0, "v3", 3000, 3_000_000),
build_row(b"a", 3, 10.0, "v", 3000, 3_000_000),
build_row(b"a", 4, 10.0, "v4", 4000, 4_000_000),
],
),
],
Expand All @@ -233,10 +233,10 @@ mod tests {
check_iterator(
&mut iter,
vec![
build_row(b"a", 1, 10.0, "v1"),
build_row(b"a", 2, 10.0, "v2"),
build_row(b"a", 3, 10.0, "v3"),
build_row(b"a", 4, 10.0, "v4"),
build_row(b"a", 1, 10.0, "v1", 1000, 1_000_000),
build_row(b"a", 2, 10.0, "v2", 2000, 2_000_000),
build_row(b"a", 3, 10.0, "v3", 3000, 3_000_000),
build_row(b"a", 4, 10.0, "v4", 4000, 4_000_000),
],
)
.await;
Expand Down
Loading