|
1 | 1 | use goblin::{self, Object};
|
| 2 | +use ignore::WalkBuilder; |
2 | 3 | use std::collections::HashMap;
|
3 | 4 | use std::fs;
|
4 | 5 | use std::path::{Path, PathBuf};
|
5 | 6 |
|
6 |
| -pub fn run(paths: &[&str]) { |
7 |
| - let blob_paths = find_blobs_in_paths(&paths); |
8 |
| - let blobs_to_dependencies = get_dependencies(&blob_paths); |
9 |
| - let missing_blobs = identify_missing(&blobs_to_dependencies); |
10 |
| - display_missing_blobs(&missing_blobs); |
| 7 | +pub struct MissingBlobs { |
| 8 | + recursive: bool, |
11 | 9 | }
|
12 | 10 |
|
13 |
| -fn find_blobs_in_paths(paths: &[&str]) -> Vec<PathBuf> { |
| 11 | +impl MissingBlobs { |
| 12 | + pub fn new(recursive: bool) -> Self { |
| 13 | + Self { recursive } |
| 14 | + } |
| 15 | + |
| 16 | + pub fn run(&self, paths: &[&str]) { |
| 17 | + let file_paths: Vec<PathBuf> = if self.recursive { |
| 18 | + find_files_recursively(&paths) |
| 19 | + } else { |
| 20 | + find_files(&paths) |
| 21 | + }; |
| 22 | + |
| 23 | + let blob_paths: Vec<&PathBuf> = file_paths |
| 24 | + .iter() |
| 25 | + .filter(|path| match path.extension() { |
| 26 | + // Assume that valid blobs have ".so" extension. |
| 27 | + Some(ext) => ext == "so", |
| 28 | + None => false, |
| 29 | + }) |
| 30 | + .collect(); |
| 31 | + |
| 32 | + let blobs_to_dependencies = get_dependencies(&blob_paths); |
| 33 | + let missing_blobs = identify_missing(&blobs_to_dependencies); |
| 34 | + display_missing_blobs(&missing_blobs); |
| 35 | + } |
| 36 | +} |
| 37 | + |
| 38 | +fn find_files(paths: &[&str]) -> Vec<PathBuf> { |
14 | 39 | let dirs = paths
|
15 | 40 | .iter()
|
16 | 41 | .map(Path::new)
|
17 | 42 | .filter(|path| path.is_dir())
|
18 | 43 | .collect::<Vec<_>>();
|
19 | 44 |
|
20 |
| - let blob_paths: Vec<PathBuf> = dirs |
| 45 | + let file_paths: Vec<PathBuf> = dirs |
21 | 46 | .iter()
|
22 | 47 | .map(|dir| fs::read_dir(dir).expect("Could not read directory."))
|
23 | 48 | .flat_map(|read_dir| {
|
24 | 49 | read_dir.map(|dir_entry| dir_entry.expect("Could not read directory entry.").path())
|
25 | 50 | })
|
26 |
| - .filter(|path| match path.extension() { |
27 |
| - // Assume that valid blobs have ".so" extension. |
28 |
| - Some(ext) => ext == "so", |
29 |
| - None => false, |
30 |
| - }) |
31 | 51 | .collect();
|
32 | 52 |
|
33 |
| - blob_paths |
| 53 | + file_paths |
| 54 | +} |
| 55 | + |
| 56 | +fn find_files_recursively(paths: &[&str]) -> Vec<PathBuf> { |
| 57 | + let mut walker = WalkBuilder::new(paths[0]); |
| 58 | + for path in &paths[1..] { |
| 59 | + walker.add(path); |
| 60 | + } |
| 61 | + |
| 62 | + // Don't read from ignore configs |
| 63 | + walker |
| 64 | + .ignore(false) |
| 65 | + .git_ignore(false) |
| 66 | + .git_exclude(false) |
| 67 | + .git_global(false); |
| 68 | + |
| 69 | + walker |
| 70 | + .build() |
| 71 | + .map(|dir_entry| { |
| 72 | + dir_entry |
| 73 | + .expect("Could not read directory entry.") |
| 74 | + .into_path() |
| 75 | + }) |
| 76 | + .collect() |
34 | 77 | }
|
35 | 78 |
|
36 |
| -fn get_dependencies(blob_paths: &Vec<PathBuf>) -> HashMap<String, Vec<String>> { |
| 79 | +fn get_dependencies(blob_paths: &[&PathBuf]) -> HashMap<String, Vec<String>> { |
37 | 80 | let mut dependencies: HashMap<String, Vec<String>> = HashMap::new();
|
38 | 81 |
|
39 | 82 | blob_paths.iter().for_each(|path| {
|
|
0 commit comments