Skip to content

Commit

Permalink
Performance optimizations for project-wide eqwalization
Browse files Browse the repository at this point in the history
Summary:
* Introduce an `eqwalizer_pool` for handling the project-wide eqwalization
* Use the same strategy as for warming up the `def_map` cache, by introducing the `ScheduleEqwalizeAll` and `UpdateEqwalizeAll` events
* Add an optional step in the `ScheduleCache` which pre-warms the `module_ast` if project-wide eqwalization is enabled
* Parallelize eqwalization, similarly to the way this happens in `eqwalizer_cli`

The currently hard-coded number of tasks (4) will be made configurable in a separate diff.

This also solves an issue in the way diagnostics were presented, avoiding duplicates.

Reviewed By: VLanvin

Differential Revision: D55416690

fbshipit-source-id: 8170baa766ffb37fef8fb8d97b4639333674c4a0
  • Loading branch information
robertoaloi authored and facebook-github-bot committed Apr 2, 2024
1 parent 5fd19ff commit 8895a71
Show file tree
Hide file tree
Showing 9 changed files with 239 additions and 47 deletions.
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

19 changes: 15 additions & 4 deletions crates/elp/src/bin/eqwalizer_cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,10 @@ pub fn do_eqwalize_all(args: &EqwalizeAll, loaded: &LoadResult, cli: &mut dyn Cl
.par_bridge()
.progress_with(pb.clone())
.map_with(analysis.clone(), |analysis, (_name, _source, file_id)| {
if analysis.should_eqwalize(file_id, include_generated) {
if analysis
.should_eqwalize(file_id, include_generated)
.unwrap()
{
Some(file_id)
} else {
None
Expand Down Expand Up @@ -148,7 +151,9 @@ pub fn do_eqwalize_app(args: &EqwalizeApp, loaded: &LoadResult, cli: &mut dyn Cl
.iter_own()
.filter_map(|(_name, _source, file_id)| {
if analysis.file_app_name(file_id).ok()? == Some(AppName(args.app.clone()))
&& analysis.should_eqwalize(file_id, include_generated)
&& analysis
.should_eqwalize(file_id, include_generated)
.unwrap()
{
Some(file_id)
} else {
Expand Down Expand Up @@ -194,7 +199,10 @@ pub fn eqwalize_target(args: &EqwalizeTarget, cli: &mut dyn Cli) -> Result<()> {
let vfs_path = VfsPath::from(src.clone());
if let Some(file_id) = loaded.vfs.file_id(&vfs_path) {
at_least_one_found = true;
if analysis.should_eqwalize(file_id, include_generated) {
if analysis
.should_eqwalize(file_id, include_generated)
.unwrap()
{
file_ids.push(file_id);
}
}
Expand Down Expand Up @@ -240,7 +248,10 @@ pub fn eqwalize_stats(args: &EqwalizeStats, cli: &mut dyn Cli) -> Result<()> {
.par_bridge()
.progress_with(pb.clone())
.map_with(analysis.clone(), |analysis, (name, _source, file_id)| {
if analysis.should_eqwalize(file_id, include_generated) {
if analysis
.should_eqwalize(file_id, include_generated)
.expect("cancelled")
{
analysis
.eqwalizer_stats(project_id, file_id)
.expect("cancelled")
Expand Down
160 changes: 150 additions & 10 deletions crates/elp/src/server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ use elp_ide::elp_ide_db::elp_base_db::SourceRoot;
use elp_ide::elp_ide_db::elp_base_db::SourceRootId;
use elp_ide::elp_ide_db::elp_base_db::Vfs;
use elp_ide::elp_ide_db::elp_base_db::VfsPath;
use elp_ide::erlang_service;
use elp_ide::erlang_service::CompileOption;
use elp_ide::Analysis;
use elp_ide::AnalysisHost;
Expand Down Expand Up @@ -137,6 +138,8 @@ pub enum Task {
Progress(ProgressTask),
ScheduleCache,
UpdateCache(ProgressBar, Vec<FileId>),
ScheduleEqwalizeAll(ProjectId),
UpdateEqwalizeAll(ProgressBar, ProjectId, String, Vec<FileId>),
}

impl fmt::Debug for Event {
Expand Down Expand Up @@ -203,6 +206,7 @@ pub struct Server {
task_pool: TaskHandle,
project_pool: TaskHandle,
cache_pool: TaskHandle,
eqwalizer_pool: TaskHandle,
diagnostics: Arc<DiagnosticCollection>,
req_queue: ReqQueue,
progress: ProgressManager,
Expand All @@ -222,6 +226,7 @@ pub struct Server {
edoc_diagnostics_requested: bool,
ct_diagnostics_requested: bool,
cache_scheduled: bool,
eqwalize_all_scheduled: FxHashSet<ProjectId>,
logger: Logger,
ai_completion: Arc<Mutex<AiCompletion>>,
include_generated: bool,
Expand All @@ -239,6 +244,7 @@ impl Server {
task_pool: TaskHandle,
project_pool: TaskHandle,
cache_pool: TaskHandle,
eqwalizer_pool: TaskHandle,
logger: Logger,
config: Config,
ai_completion: AiCompletion,
Expand All @@ -250,6 +256,7 @@ impl Server {
task_pool,
project_pool,
cache_pool,
eqwalizer_pool,
diagnostics: Arc::new(DiagnosticCollection::default()),
req_queue: ReqQueue::default(),
open_document_versions: SharedMap::default(),
Expand All @@ -268,6 +275,7 @@ impl Server {
edoc_diagnostics_requested: false,
ct_diagnostics_requested: false,
cache_scheduled: false,
eqwalize_all_scheduled: FxHashSet::default(),
logger,
ai_completion: Arc::new(Mutex::new(ai_completion)),
vfs_config_version: 0,
Expand Down Expand Up @@ -386,6 +394,10 @@ impl Server {
Some(Event::Task(msg.unwrap()))
}

recv (self.eqwalizer_pool.receiver) -> msg => {
Some(Event::Task(msg.unwrap()))
}

}
}

Expand Down Expand Up @@ -444,13 +456,14 @@ impl Server {
.update_erlang_service_paths();
spinner.end();
self.eqwalizer_diagnostics_requested = true;
if self.config.eqwalizer().all {
self.eqwalizer_project_diagnostics_requested = true;
}
}
Task::Progress(progress) => self.report_progress(progress),
Task::UpdateCache(spinner, files) => self.update_cache(spinner, files),
Task::ScheduleCache => self.schedule_cache(),
Task::UpdateEqwalizeAll(spinner, project_id, project_name, files) => {
self.update_eqwalize_all(spinner, project_id, project_name, files)
}
Task::ScheduleEqwalizeAll(project_id) => self.schedule_eqwalize_all(project_id),
Task::ShowMessage(params) => self.show_message(params),
}

Expand Down Expand Up @@ -602,6 +615,9 @@ impl Server {
})?
.on::<notification::DidOpenTextDocument>(|this, params| {
this.eqwalizer_diagnostics_requested = true;
if this.config.eqwalizer().all {
this.eqwalizer_project_diagnostics_requested = true;
}
this.edoc_diagnostics_requested = true;
this.ct_diagnostics_requested = true;
if let Ok(path) = convert::abs_path(&params.text_document.uri) {
Expand Down Expand Up @@ -907,7 +923,7 @@ impl Server {
.progress
.begin_spinner("EqWAlizing All (project-wide)".to_string());

self.task_pool.handle.spawn(move || {
self.eqwalizer_pool.handle.spawn(move || {
let diagnostics = snapshot
.projects
.iter()
Expand Down Expand Up @@ -992,12 +1008,7 @@ impl Server {
diags: Vec<(ProjectId, Vec<(FileId, Vec<diagnostics::Diagnostic>)>)>,
) {
for (_project_id, diagnostics) in diags {
let mut keep = FxHashSet::default();
for (file_id, diagnostics) in diagnostics {
keep.insert(file_id);
Arc::make_mut(&mut self.diagnostics).set_eqwalizer_project(file_id, diagnostics);
}
Arc::make_mut(&mut self.diagnostics).reset_eqwalizer_project(&keep);
Arc::make_mut(&mut self.diagnostics).set_eqwalizer_project(diagnostics);
}
}

Expand Down Expand Up @@ -1405,9 +1416,16 @@ impl Server {
if files.is_empty() {
bar.end();
self.cache_scheduled = true;
if self.config.eqwalizer().all {
for (i, _) in self.snapshot().projects.iter().enumerate() {
let project_id = ProjectId(i as u32);
self.schedule_eqwalize_all(project_id);
}
}
return;
}
let snapshot = self.snapshot();
let eqwalize_all = self.config.eqwalizer().all;
self.cache_pool.handle.spawn_with_sender(move |sender| {
let total = files.len();
let mut done = 0;
Expand All @@ -1418,6 +1436,32 @@ impl Server {
files.push(file_id);
break;
} else {
if eqwalize_all {
match snapshot.analysis.should_eqwalize(file_id, false) {
Ok(should_eqwalize) => {
if should_eqwalize {
if snapshot
.analysis
.module_ast(
file_id,
erlang_service::Format::OffsetEtf,
vec![],
)
.is_err()
{
//got canceled
files.push(file_id);
break;
}
}
}
Err(_) => {
//got canceled
files.push(file_id);
break;
}
}
}
done += 1;
bar.report(done, total);
}
Expand All @@ -1426,6 +1470,102 @@ impl Server {
});
}

fn schedule_eqwalize_all(&mut self, project_id: ProjectId) {
if self.eqwalize_all_scheduled.contains(&project_id) {
return;
}
let snapshot = self.snapshot();
let project_name = match snapshot.get_project(project_id) {
Some(project) => project.name(),
None => "undefined".to_string(),
};
let message = format!("Eqwalize All ({})", project_name);
let bar = self.progress.begin_bar(message, None);

self.eqwalizer_pool.handle.spawn_with_sender(move |sender| {
let mut files = vec![];
let module_index = match snapshot.analysis.module_index(project_id) {
Ok(module_index) => module_index,
//rescheduling canceled
Err(_) => {
sender.send(Task::ScheduleEqwalizeAll(project_id)).unwrap();
return;
}
};

for (_, _, file_id) in module_index.iter_own() {
match snapshot.analysis.should_eqwalize(file_id, false) {
Ok(true) => {
files.push(file_id);
}
Ok(false) => {}
Err(_) => {
sender.send(Task::ScheduleEqwalizeAll(project_id)).unwrap();
return;
}
}
}
sender
.send(Task::UpdateEqwalizeAll(
bar,
project_id,
project_name,
files,
))
.unwrap();
});
}

fn update_eqwalize_all(
&mut self,
bar: ProgressBar,
project_id: ProjectId,
project_name: String,
mut files: Vec<FileId>,
) {
if files.is_empty() {
bar.end();
self.eqwalize_all_scheduled.insert(project_id);
return;
}
let snapshot = self.snapshot();
let chunk_size = 100;
self.eqwalizer_pool.handle.spawn_with_sender(move |sender| {
let total = files.len();
let mut done = 0;
while !files.is_empty() {
let len = files.len();
let file_ids = if chunk_size < len {
files.split_off(len - chunk_size)
} else {
files.drain(..).collect()
};
if snapshot
.analysis
.eqwalizer_diagnostics_by_project(project_id, file_ids.clone(), 4)
.is_err()
{
//got canceled
for file_id in file_ids {
files.push(file_id);
}
break;
} else {
done += file_ids.len();
bar.report(done, total);
}
}
sender
.send(Task::UpdateEqwalizeAll(
bar,
project_id,
project_name,
files,
))
.unwrap();
});
}

fn report_progress(&mut self, task: ProgressTask) {
let params = match task {
ProgressTask::BeginNotify(params) => {
Expand Down
8 changes: 8 additions & 0 deletions crates/elp/src/server/setup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,7 @@ pub fn setup_server(config: Config, connection: Connection, logger: Logger) -> R
let vfs_loader = set_up_vfs_loader();
let task_pool = set_up_task_pool();
let cache_pool = set_up_single_thread_pool();
let eqwalizer_pool = set_up_eqwalizer_pool();
let project_pool = set_up_single_thread_pool();
let ai_completion = set_up_ai_completion(&config);

Expand All @@ -128,6 +129,7 @@ pub fn setup_server(config: Config, connection: Connection, logger: Logger) -> R
task_pool,
project_pool,
cache_pool,
eqwalizer_pool,
logger,
config,
ai_completion,
Expand All @@ -148,6 +150,12 @@ fn set_up_task_pool() -> TaskHandle {
Handle { handle, receiver }
}

fn set_up_eqwalizer_pool() -> TaskHandle {
let (sender, receiver) = crossbeam_channel::unbounded();
let handle = TaskPool::new(sender);
Handle { handle, receiver }
}

fn set_up_single_thread_pool() -> TaskHandle {
let (sender, receiver) = crossbeam_channel::unbounded();
let pool = ThreadPool::new(1);
Expand Down
18 changes: 14 additions & 4 deletions crates/elp/src/snapshot.rs
Original file line number Diff line number Diff line change
Expand Up @@ -193,26 +193,36 @@ impl Snapshot {
) -> Option<Vec<(FileId, Vec<diagnostics::Diagnostic>)>> {
let module_index = self.analysis.module_index(project_id).ok()?;

let file_ids = module_index
let file_ids: Vec<FileId> = module_index
.iter_own()
.filter_map(|(_, _, file_id)| {
if !exclude.contains(&file_id) && self.analysis.should_eqwalize(file_id, false) {
Some(file_id)
if !exclude.contains(&file_id) {
if let Ok(true) = self.analysis.should_eqwalize(file_id, false) {
Some(file_id)
} else {
None
}
} else {
None
}
})
.collect();

log::info!(
"Calculating eqwalizer diagnostics for {} files",
file_ids.len()
);

let project_name = match self.get_project(project_id) {
Some(project) => project.name(),
None => "undefined".to_string(),
};

let _timer =
timeit_with_telemetry!(TelemetryData::EqwalizerProjectDiagnostics { project_name });

self.analysis
.eqwalizer_diagnostics_by_project(project_id, file_ids)
.eqwalizer_diagnostics_by_project(project_id, file_ids, 4)
.ok()?
}

Expand Down
Loading

0 comments on commit 8895a71

Please sign in to comment.