Skip to content

Handle new self-profiling information for incr-comp hash fingerprints #166

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Jul 22, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions measureme/src/rustc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ pub const GENERIC_ACTIVITY_EVENT_KIND: &str = "GenericActivity";

pub const INCREMENTAL_LOAD_RESULT_EVENT_KIND: &str = "IncrementalLoadResult";

pub const INCREMENTAL_RESULT_HASHING_EVENT_KIND: &str = "IncrementalResultHashing";

pub const QUERY_BLOCKED_EVENT_KIND: &str = "QueryBlocked";

pub const QUERY_CACHE_HIT_EVENT_KIND: &str = "QueryCacheHit";
17 changes: 17 additions & 0 deletions summarize/src/analysis.rs
Original file line number Diff line number Diff line change
Expand Up @@ -174,6 +174,13 @@ pub fn perform_analysis(data: ProfilingData) -> Results {
QUERY_EVENT_KIND | GENERIC_ACTIVITY_EVENT_KIND => {
data.self_time -= current_event_duration;
}
INCREMENTAL_RESULT_HASHING_EVENT_KIND => {
// We are within hashing something. If we now encounter something
// within that event (like the nested "intern-the-dep-node" event)
// then we don't want to attribute that to the hashing time.
data.self_time -= current_event_duration;
data.incremental_hashing_time -= current_event_duration;
}
INCREMENTAL_LOAD_RESULT_EVENT_KIND => {
data.self_time -= current_event_duration;
data.incremental_load_time -= current_event_duration;
Expand Down Expand Up @@ -216,6 +223,16 @@ pub fn perform_analysis(data: ProfilingData) -> Results {
});
}

INCREMENTAL_RESULT_HASHING_EVENT_KIND => {
record_event_data(&current_event.label, &|data| {
// Don't add to data.time since this event happens
// within the query itself which is already contributing
// to data.time
data.self_time += current_event_duration;
data.incremental_hashing_time += current_event_duration;
});
}

_ => {
// Data sources other than rustc will use their own event kinds so just
// treat this like a GENERIC_ACTIVITY except that we don't track cache
Expand Down
17 changes: 16 additions & 1 deletion summarize/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,8 @@ fn diff(opt: DiffOpt) -> Result<(), Box<dyn Error + Send + Sync>> {
"Item count",
"Cache hits",
"Blocked time",
"Incremental load time"
"Incremental load time",
"Incremental hashing time",
));

for query_data in results.query_data {
Expand All @@ -139,6 +140,7 @@ fn diff(opt: DiffOpt) -> Result<(), Box<dyn Error + Send + Sync>> {
format!("{:+}", query_data.number_of_cache_hits),
format!("{:.2?}", query_data.blocked_time),
format!("{:.2?}", query_data.incremental_load_time),
format!("{:.2?}", query_data.incremental_hashing_time),
]);
}

Expand Down Expand Up @@ -180,6 +182,7 @@ fn summarize(opt: SummarizeOpt) -> Result<(), Box<dyn Error + Send + Sync>> {
let mut has_cache_hits = false;
let mut has_blocked_time = false;
let mut has_incremental_load_time = false;
let mut has_incremental_hashing_time = false;

let duration_zero = Duration::from_secs(0);
for r in &results.query_data {
Expand All @@ -193,6 +196,10 @@ fn summarize(opt: SummarizeOpt) -> Result<(), Box<dyn Error + Send + Sync>> {
has_incremental_load_time = true;
}

if r.incremental_hashing_time > duration_zero {
has_incremental_hashing_time = true;
}

if has_cache_hits && has_blocked_time && has_incremental_load_time {
break;
}
Expand All @@ -209,6 +216,10 @@ fn summarize(opt: SummarizeOpt) -> Result<(), Box<dyn Error + Send + Sync>> {
("Cache hits", has_cache_hits),
("Blocked time", has_blocked_time),
("Incremental load time", has_incremental_load_time),
(
"Incremental result hashing time",
has_incremental_hashing_time,
),
];

fn filter_cells(cells: &[(&str, bool)]) -> Vec<Cell> {
Expand Down Expand Up @@ -252,6 +263,10 @@ fn summarize(opt: SummarizeOpt) -> Result<(), Box<dyn Error + Send + Sync>> {
&format!("{:.2?}", query_data.incremental_load_time),
has_incremental_load_time,
),
(
&format!("{:.2?}", query_data.incremental_hashing_time),
has_incremental_hashing_time,
),
])));
}

Expand Down
7 changes: 7 additions & 0 deletions summarize/src/query_data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ pub struct QueryData {
pub invocation_count: usize,
pub blocked_time: Duration,
pub incremental_load_time: Duration,
pub incremental_hashing_time: Duration,
}

impl QueryData {
Expand All @@ -26,6 +27,7 @@ impl QueryData {
invocation_count: 0,
blocked_time: Duration::from_nanos(0),
incremental_load_time: Duration::from_nanos(0),
incremental_hashing_time: Duration::from_nanos(0),
}
}

Expand All @@ -48,6 +50,7 @@ impl QueryData {
invocation_count: -(self.invocation_count as i64),
blocked_time: invert(self.blocked_time),
incremental_load_time: invert(self.incremental_load_time),
incremental_hashing_time: invert(self.incremental_hashing_time),
}
}

Expand All @@ -63,6 +66,7 @@ impl QueryData {
invocation_count: self.invocation_count as i64,
blocked_time: self.blocked_time.into(),
incremental_load_time: self.incremental_load_time.into(),
incremental_hashing_time: self.incremental_hashing_time.into(),
}
}
}
Expand All @@ -79,6 +83,7 @@ pub struct QueryDataDiff {
pub invocation_count: i64,
pub blocked_time: SignedDuration,
pub incremental_load_time: SignedDuration,
pub incremental_hashing_time: SignedDuration,
}

impl Sub for QueryData {
Expand Down Expand Up @@ -106,6 +111,8 @@ impl Sub for QueryData {
invocation_count: i(self.invocation_count) - i(rhs.invocation_count),
blocked_time: sd(self.blocked_time) - sd(rhs.blocked_time),
incremental_load_time: sd(self.incremental_load_time) - sd(rhs.incremental_load_time),
incremental_hashing_time: sd(self.incremental_hashing_time)
- sd(rhs.incremental_hashing_time),
}
}
}
Expand Down