Skip to content

Commit

Permalink
fix(offchain): fixed timestamp in graphql
Browse files Browse the repository at this point in the history
The timestamp was intepreted in milliseconds, but it should be
interpreted in seconds.
Also removes the chrono dependency because it is not necessary.
  • Loading branch information
gligneul committed Sep 4, 2023
1 parent 9436c7a commit 0e25296
Show file tree
Hide file tree
Showing 14 changed files with 31 additions and 54 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

## [Unreleased]

### Fixed

- Fixed timestamp in GraphQL API

## [1.0.0] 2023-08-22

### Added
Expand Down
4 changes: 0 additions & 4 deletions offchain/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion offchain/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ axum = "0.6"
backoff = "0.4"
base64 = "0.21"
byteorder = "1.4"
chrono = "0.4"
clap = "4.3"
diesel = "2.1"
diesel_migrations = "2.1"
Expand Down
3 changes: 1 addition & 2 deletions offchain/data/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,9 @@ redacted = { path = "../redacted" }

backoff.workspace = true
base64.workspace = true
chrono.workspace = true
clap = { workspace = true, features = ["derive", "env"] }
diesel_migrations.workspace = true
diesel = { workspace = true, features = ["postgres", "chrono", "r2d2"]}
diesel = { workspace = true, features = ["postgres", "r2d2"]}
snafu.workspace = true
tracing.workspace = true
urlencoding.workspace = true
Expand Down
2 changes: 1 addition & 1 deletion offchain/data/src/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ pub struct Input {
pub msg_sender: Vec<u8>,
pub tx_hash: Vec<u8>,
pub block_number: i64,
pub timestamp: chrono::NaiveDateTime,
pub timestamp: std::time::SystemTime,
pub payload: Vec<u8>,
}

Expand Down
9 changes: 4 additions & 5 deletions offchain/data/tests/repository.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
// SPDX-License-Identifier: Apache-2.0 (see LICENSE)

use backoff::ExponentialBackoffBuilder;
use chrono::NaiveDateTime;
use diesel::pg::Pg;
use diesel::{
sql_query, Connection, PgConnection, QueryableByName, RunQueryDsl,
Expand All @@ -14,7 +13,7 @@ use rollups_data::{
Report, Repository, RepositoryConfig, Voucher,
};
use serial_test::serial;
use std::time::Duration;
use std::time::{Duration, UNIX_EPOCH};
use test_fixtures::DataFixture;
use testcontainers::clients::Cli;

Expand Down Expand Up @@ -69,7 +68,7 @@ pub fn insert_test_input(repo: &Repository) {
msg_sender: "msg-sender".as_bytes().to_vec(),
tx_hash: "tx-hash".as_bytes().to_vec(),
block_number: 0,
timestamp: NaiveDateTime::from_timestamp_millis(1676489717).unwrap(),
timestamp: UNIX_EPOCH + Duration::from_secs(1676489717),
payload: "input-0".as_bytes().to_vec(),
};

Expand All @@ -83,7 +82,7 @@ pub fn create_input() -> Input {
msg_sender: "msg-sender".as_bytes().to_vec(),
tx_hash: "tx-hash".as_bytes().to_vec(),
block_number: 0,
timestamp: NaiveDateTime::from_timestamp_millis(1676489717).unwrap(),
timestamp: UNIX_EPOCH + Duration::from_secs(1676489717),
payload: "input-0".as_bytes().to_vec(),
}
}
Expand Down Expand Up @@ -582,7 +581,7 @@ fn test_pagination_macro() {
msg_sender: "msg-sender".as_bytes().to_vec(),
tx_hash: "tx-hash".as_bytes().to_vec(),
block_number: 0,
timestamp: NaiveDateTime::from_timestamp_millis(1676489717).unwrap(),
timestamp: UNIX_EPOCH + Duration::from_secs(1676489717),
payload: "input-1".as_bytes().to_vec(),
};

Expand Down
1 change: 0 additions & 1 deletion offchain/graphql-server/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,5 @@ tracing.workspace = true
test-fixtures = { path = "../test-fixtures" }

awc.workspace = true
chrono.workspace = true
serial_test.workspace = true
testcontainers.workspace = true
9 changes: 8 additions & 1 deletion offchain/graphql-server/src/schema/resolvers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ use juniper::{
graphql_object, DefaultScalarValue, FieldError, FieldResult,
GraphQLInputObject, GraphQLObject,
};
use std::time::UNIX_EPOCH;

use rollups_data::Repository;
use rollups_data::{
Expand Down Expand Up @@ -212,7 +213,13 @@ impl Input {
description = "Timestamp associated with the input submission, as defined by the base layer's block in which it was recorded"
)]
fn timestamp(&self) -> i64 {
self.timestamp.timestamp()
match self.timestamp.duration_since(UNIX_EPOCH) {
Ok(duration) => duration.as_secs() as i64,
Err(e) => {
tracing::warn!("failed to parse timestamp ({})", e);
0
}
}
}

#[graphql(
Expand Down
9 changes: 3 additions & 6 deletions offchain/graphql-server/tests/integration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,11 @@
use actix_web::dev::ServerHandle;
use actix_web::rt::spawn;
use awc::{Client, ClientRequest};
use chrono::naive::NaiveDateTime;
use graphql_server::{http, schema::Context};
use rollups_data::{Input, Notice, Proof, Report, Repository, Voucher};
use std::fs::read_to_string;
use std::str::from_utf8;
use std::time::Duration;
use std::time::{Duration, UNIX_EPOCH};
use test_fixtures::RepositoryFixture;
use testcontainers::clients::Cli;
use tokio::sync::oneshot;
Expand Down Expand Up @@ -40,8 +39,7 @@ impl TestState<'_> {
msg_sender: "msg-sender".as_bytes().to_vec(),
tx_hash: "tx-hash".as_bytes().to_vec(),
block_number: 0,
timestamp: NaiveDateTime::from_timestamp_millis(1676489717)
.unwrap(),
timestamp: UNIX_EPOCH + Duration::from_secs(1676489717),
payload: "input-0".as_bytes().to_vec(),
};

Expand Down Expand Up @@ -131,8 +129,7 @@ impl TestState<'_> {
msg_sender: "msg-sender".as_bytes().to_vec(),
tx_hash: "tx-hash".as_bytes().to_vec(),
block_number: 0,
timestamp: NaiveDateTime::from_timestamp_millis(1676489717)
.unwrap(),
timestamp: UNIX_EPOCH + Duration::from_secs(1676489717),
payload: "input-0".as_bytes().to_vec(),
};

Expand Down
2 changes: 1 addition & 1 deletion offchain/graphql-server/tests/responses/input.json
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{"data":{"input":{"index":0,"msgSender":"0x6d73672d73656e646572","timestamp":1676489,"blockNumber":0,"payload":"0x696e7075742d30"}}}
{"data":{"input":{"index":0,"msgSender":"0x6d73672d73656e646572","timestamp":1676489717,"blockNumber":0,"payload":"0x696e7075742d30"}}}
2 changes: 1 addition & 1 deletion offchain/graphql-server/tests/responses/inputs.json
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{"data":{"inputs":{"totalCount":1,"edges":[{"node":{"index":0,"msgSender":"0x6d73672d73656e646572","timestamp":1676489,"blockNumber":0,"payload":"0x696e7075742d30"}}]}}}
{"data":{"inputs":{"totalCount":1,"edges":[{"node":{"index":0,"msgSender":"0x6d73672d73656e646572","timestamp":1676489717,"blockNumber":0,"payload":"0x696e7075742d30"}}]}}}
1 change: 0 additions & 1 deletion offchain/indexer/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ http-health-check = { path = "../http-health-check" }
rollups-data = { path = "../data" }
rollups-events = { path = "../rollups-events" }

chrono.workspace = true
clap = { workspace = true, features = ["derive", "env"] }
snafu.workspace = true
tokio = { workspace = true, features = ["macros", "time", "rt-multi-thread"] }
Expand Down
15 changes: 2 additions & 13 deletions offchain/indexer/src/conversions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
///! Convert from rollups-events types to rollups-data types.
///! This code cannot use the From trait because both types are defined in
///! external crates.
use chrono::naive::NaiveDateTime;
use std::time::{Duration, UNIX_EPOCH};

use rollups_events::{
RollupsAdvanceStateInput, RollupsNotice, RollupsOutputEnum, RollupsProof,
Expand All @@ -14,18 +14,7 @@ use rollups_events::{
use rollups_data::{Input, Notice, OutputEnum, Proof, Report, Voucher};

pub fn convert_input(input: RollupsAdvanceStateInput) -> Input {
let timestamp = match NaiveDateTime::from_timestamp_millis(
input.metadata.timestamp as i64,
) {
Some(timestamp) => timestamp,
None => {
tracing::warn!(
input.metadata.timestamp,
"failed to parse timestamp"
);
Default::default()
}
};
let timestamp = UNIX_EPOCH + Duration::from_secs(input.metadata.timestamp);
Input {
index: input.metadata.input_index as i32,
msg_sender: input.metadata.msg_sender.into_inner().into(),
Expand Down
23 changes: 6 additions & 17 deletions offchain/indexer/tests/integration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ use rollups_events::{
RollupsVoucher,
};
use serial_test::serial;
use std::time::UNIX_EPOCH;
use test_fixtures::{BrokerFixture, RepositoryFixture};
use testcontainers::clients::Cli;
use tokio::task::JoinHandle;
Expand Down Expand Up @@ -171,22 +172,6 @@ async fn indexer_does_not_override_existing_input() {
assert_input_eq(&original_input, &input_read);
}

#[test_log::test(tokio::test)]
#[serial]
async fn indexer_ignores_invalid_timestamp() {
let docker = Cli::default();
let state = TestState::setup(&docker).await;

let invalid_timestamp = i64::MAX as u64;
let mut input_sent = state
.produce_input_in_broker_with_timestamp(0, invalid_timestamp)
.await;
// Indexer's behavior for invalid timestamps is to set them to 0.
input_sent.metadata.timestamp = 0;
let input_read = state.get_input_from_database(&input_sent).await;
assert_input_eq(&input_sent, &input_read);
}

#[test_log::test(tokio::test)]
#[serial]
async fn indexer_inserts_input_after_broker_timeout() {
Expand Down Expand Up @@ -476,7 +461,11 @@ fn assert_input_eq(input_sent: &RollupsAdvanceStateInput, input_read: &Input) {
input_sent.metadata.block_number
);
assert_eq!(
input_read.timestamp.timestamp_millis() as u64,
input_read
.timestamp
.duration_since(UNIX_EPOCH)
.expect("failed to get time")
.as_secs(),
input_sent.metadata.timestamp
);
assert_eq!(&input_read.payload, input_sent.payload.inner());
Expand Down

0 comments on commit 0e25296

Please sign in to comment.