Skip to content

Commit

Permalink
Add json support to gui
Browse files Browse the repository at this point in the history
  • Loading branch information
qarmin committed Oct 11, 2023
1 parent 9b57382 commit 1409d01
Show file tree
Hide file tree
Showing 15 changed files with 284 additions and 79 deletions.
7 changes: 6 additions & 1 deletion czkawka_core/src/bad_extensions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ use futures::channel::mpsc::UnboundedSender;
use log::debug;
use mime_guess::get_mime_extensions;
use rayon::prelude::*;
use serde::Serialize;

use crate::common::{prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads};
use crate::common_dir_traversal::{CheckingMethod, DirTraversalBuilder, DirTraversalResult, FileEntry, ProgressData, ToolType};
Expand Down Expand Up @@ -158,7 +159,7 @@ const WORKAROUNDS: &[(&str, &str)] = &[
("exe", "xls"), // Not sure why xls is not recognized
];

#[derive(Clone)]
#[derive(Clone, Serialize)]
pub struct BadFileEntry {
pub path: PathBuf,
pub modified_date: u64,
Expand Down Expand Up @@ -426,6 +427,10 @@ impl PrintResults for BadExtensions {

Ok(())
}

fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
self.save_results_to_file_as_json_internal(file_name, &self.bad_extensions_files, pretty_print)
}
}

impl BadExtensions {
Expand Down
21 changes: 13 additions & 8 deletions czkawka_core/src/big_file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,14 @@ use futures::channel::mpsc::UnboundedSender;
use humansize::{format_size, BINARY};
use log::debug;
use rayon::prelude::*;
use serde::{Deserialize, Serialize};

use crate::common::{check_folder_children, prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads, split_path};
use crate::common_dir_traversal::{common_get_entry_data_metadata, common_read_dir, get_lowercase_name, get_modified_time, CheckingMethod, ProgressData, ToolType};
use crate::common_tool::{CommonData, CommonToolData, DeleteMethod};
use crate::common_traits::{DebugPrint, PrintResults};

#[derive(Clone, Debug)]
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct FileEntry {
pub path: PathBuf,
pub size: u64,
Expand All @@ -39,7 +40,7 @@ pub struct Info {
pub struct BigFile {
common_data: CommonToolData,
information: Info,
big_files: Vec<(u64, FileEntry)>,
big_files: Vec<FileEntry>,
number_of_files_to_check: usize,
search_mode: SearchMode,
}
Expand Down Expand Up @@ -189,7 +190,7 @@ impl BigFile {
iter = Box::new(old_map.into_iter().rev());
}

for (size, mut vector) in iter {
for (_size, mut vector) in iter {
if self.information.number_of_real_files < self.number_of_files_to_check {
if vector.len() > 1 {
vector.sort_unstable_by_key(|e| {
Expand All @@ -199,7 +200,7 @@ impl BigFile {
}
for file in vector {
if self.information.number_of_real_files < self.number_of_files_to_check {
self.big_files.push((size, file));
self.big_files.push(file);
self.information.number_of_real_files += 1;
} else {
break;
Expand All @@ -214,7 +215,7 @@ impl BigFile {
fn delete_files(&mut self) {
match self.common_data.delete_method {
DeleteMethod::Delete => {
for (_, file_entry) in &self.big_files {
for file_entry in &self.big_files {
if fs::remove_file(&file_entry.path).is_err() {
self.common_data.text_messages.warnings.push(file_entry.path.display().to_string());
}
Expand Down Expand Up @@ -262,15 +263,19 @@ impl PrintResults for BigFile {
} else {
writeln!(writer, "{} the smallest files.\n\n", self.information.number_of_real_files)?;
}
for (size, file_entry) in &self.big_files {
writeln!(writer, "{} ({}) - {}", format_size(*size, BINARY), size, file_entry.path.display())?;
for file_entry in &self.big_files {
writeln!(writer, "{} ({}) - {}", format_size(file_entry.size, BINARY), file_entry.size, file_entry.path.display())?;
}
} else {
write!(writer, "Not found any files.").unwrap();
}

Ok(())
}

fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
self.save_results_to_file_as_json_internal(file_name, &self.big_files, pretty_print)
}
}

impl CommonData for BigFile {
Expand All @@ -287,7 +292,7 @@ impl BigFile {
self.search_mode = search_mode;
}

pub const fn get_big_files(&self) -> &Vec<(u64, FileEntry)> {
pub const fn get_big_files(&self) -> &Vec<FileEntry> {
&self.big_files
}

Expand Down
4 changes: 4 additions & 0 deletions czkawka_core/src/broken_files.rs
Original file line number Diff line number Diff line change
Expand Up @@ -481,6 +481,10 @@ impl PrintResults for BrokenFiles {

Ok(())
}

fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
self.save_results_to_file_as_json_internal(file_name, &self.broken_files, pretty_print)
}
}

fn check_extension_availability(file_name_lowercase: &str) -> TypeOfFile {
Expand Down
34 changes: 34 additions & 0 deletions czkawka_core/src/common_traits.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use fun_time::fun_time;
use serde::Serialize;
use std::fs::File;
use std::io::{BufWriter, Write};
use std::path::Path;
Expand Down Expand Up @@ -31,6 +32,39 @@ pub trait PrintResults {
writer.flush()?;
Ok(())
}

fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()>;

fn save_results_to_file_as_json_internal<T: Serialize>(&self, file_name: &str, item_to_serialize: &T, pretty_print: bool) -> std::io::Result<()> {
if pretty_print {
self.save_results_to_file_as_json_pretty(file_name, item_to_serialize)
} else {
self.save_results_to_file_as_json_compact(file_name, item_to_serialize)
}
}

#[fun_time(message = "save_results_to_file_as_json_pretty")]
fn save_results_to_file_as_json_pretty<T: Serialize>(&self, file_name: &str, item_to_serialize: &T) -> std::io::Result<()> {
let file_handler = File::create(file_name)?;
let mut writer = BufWriter::new(file_handler);
serde_json::to_writer_pretty(&mut writer, item_to_serialize)?;
Ok(())
}

#[fun_time(message = "save_results_to_file_as_json_compact")]
fn save_results_to_file_as_json_compact<T: Serialize>(&self, file_name: &str, item_to_serialize: &T) -> std::io::Result<()> {
let file_handler = File::create(file_name)?;
let mut writer = BufWriter::new(file_handler);
serde_json::to_writer(&mut writer, item_to_serialize)?;
Ok(())
}

fn save_all_in_one(&self, file_name: &str) -> std::io::Result<()> {
self.save_results_to_file_as_json(&format!("{file_name}_pretty.json"), true)?;
self.save_results_to_file_as_json(&format!("{file_name}_compact.json"), false)?;
self.print_results_to_file(&format!("{file_name}.txt"))?;
Ok(())
}
}

pub trait ResultEntry {
Expand Down
96 changes: 96 additions & 0 deletions czkawka_core/src/duplicate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -989,6 +989,24 @@ impl PrintResults for DuplicateFinder {
}
writeln!(writer)?;
}
} else if !self.files_with_identical_names_referenced.is_empty() {
writeln!(
writer,
"-------------------------------------------------Files with same names in referenced folders-------------------------------------------------"
)?;
writeln!(
writer,
"Found {} files in {} groups with same name(may have different content)",
self.information.number_of_duplicated_files_by_name, self.information.number_of_groups_by_name,
)?;
for (name, (file_entry, vector)) in self.files_with_identical_names_referenced.iter().rev() {
writeln!(writer, "Name - {} - {} files ", name, vector.len())?;
writeln!(writer, "Reference file - {}", file_entry.path.display())?;
for j in vector {
writeln!(writer, "{}", j.path.display())?;
}
writeln!(writer)?;
}
} else {
write!(writer, "Not found any files with same names.")?;
}
Expand All @@ -1011,6 +1029,24 @@ impl PrintResults for DuplicateFinder {
}
writeln!(writer)?;
}
} else if !self.files_with_identical_names_referenced.is_empty() {
writeln!(
writer,
"-------------------------------------------------Files with same size and names in referenced folders-------------------------------------------------"
)?;
writeln!(
writer,
"Found {} files in {} groups with same size and name(may have different content)",
self.information.number_of_duplicated_files_by_size_name, self.information.number_of_groups_by_size_name,
)?;
for ((size, name), (file_entry, vector)) in self.files_with_identical_size_names_referenced.iter().rev() {
writeln!(writer, "Name - {}, {} - {} files ", name, format_size(*size, BINARY), vector.len())?;
writeln!(writer, "Reference file - {}", file_entry.path.display())?;
for j in vector {
writeln!(writer, "{}", j.path.display())?;
}
writeln!(writer)?;
}
} else {
write!(writer, "Not found any files with same size and names.")?;
}
Expand All @@ -1034,6 +1070,25 @@ impl PrintResults for DuplicateFinder {
writeln!(writer, "{}", file_entry.path.display())?;
}
}
} else if !self.files_with_identical_size_referenced.is_empty() {
writeln!(
writer,
"-------------------------------------------------Files with same size in referenced folders-------------------------------------------------"
)?;
writeln!(
writer,
"Found {} duplicated files which in {} groups which takes {}.",
self.information.number_of_duplicated_files_by_size,
self.information.number_of_groups_by_size,
format_size(self.information.lost_space_by_size, BINARY)
)?;
for (size, (file_entry, vector)) in self.files_with_identical_size_referenced.iter().rev() {
writeln!(writer, "\n---- Size {} ({}) - {} files", format_size(*size, BINARY), size, vector.len())?;
writeln!(writer, "Reference file - {}", file_entry.path.display())?;
for file_entry in vector {
writeln!(writer, "{}", file_entry.path.display())?;
}
}
} else {
write!(writer, "Not found any duplicates.")?;
}
Expand All @@ -1059,6 +1114,27 @@ impl PrintResults for DuplicateFinder {
}
}
}
} else if !self.files_with_identical_hashes_referenced.is_empty() {
writeln!(
writer,
"-------------------------------------------------Files with same hashes in referenced folders-------------------------------------------------"
)?;
writeln!(
writer,
"Found {} duplicated files which in {} groups which takes {}.",
self.information.number_of_duplicated_files_by_hash,
self.information.number_of_groups_by_hash,
format_size(self.information.lost_space_by_hash, BINARY)
)?;
for (size, vectors_vector) in self.files_with_identical_hashes_referenced.iter().rev() {
for (file_entry, vector) in vectors_vector {
writeln!(writer, "\n---- Size {} ({}) - {} files", format_size(*size, BINARY), size, vector.len())?;
writeln!(writer, "Reference file - {}", file_entry.path.display())?;
for file_entry in vector {
writeln!(writer, "{}", file_entry.path.display())?;
}
}
}
} else {
write!(writer, "Not found any duplicates.")?;
}
Expand All @@ -1068,6 +1144,26 @@ impl PrintResults for DuplicateFinder {

Ok(())
}

fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> io::Result<()> {
if self.get_use_reference() {
match self.check_method {
CheckingMethod::Name => self.save_results_to_file_as_json_internal(file_name, &self.files_with_identical_names_referenced, pretty_print),
CheckingMethod::SizeName => self.save_results_to_file_as_json_internal(file_name, &self.files_with_identical_size_names_referenced, pretty_print),
CheckingMethod::Size => self.save_results_to_file_as_json_internal(file_name, &self.files_with_identical_size_referenced, pretty_print),
CheckingMethod::Hash => self.save_results_to_file_as_json_internal(file_name, &self.files_with_identical_hashes_referenced, pretty_print),
_ => panic!(),
}
} else {
match self.check_method {
CheckingMethod::Name => self.save_results_to_file_as_json_internal(file_name, &self.files_with_identical_names, pretty_print),
CheckingMethod::SizeName => self.save_results_to_file_as_json_internal(file_name, &self.files_with_identical_size_names, pretty_print),
CheckingMethod::Size => self.save_results_to_file_as_json_internal(file_name, &self.files_with_identical_size, pretty_print),
CheckingMethod::Hash => self.save_results_to_file_as_json_internal(file_name, &self.files_with_identical_hashes, pretty_print),
_ => panic!(),
}
}
}
}

fn delete_files(vector: &[FileEntry], delete_method: &DeleteMethod, text_messages: &mut Messages, dryrun: bool) -> (u64, usize, usize) {
Expand Down
4 changes: 4 additions & 0 deletions czkawka_core/src/empty_files.rs
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,10 @@ impl PrintResults for EmptyFiles {

Ok(())
}

fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
self.save_results_to_file_as_json_internal(file_name, &self.empty_files, pretty_print)
}
}

impl EmptyFiles {
Expand Down
4 changes: 4 additions & 0 deletions czkawka_core/src/empty_folder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -158,6 +158,10 @@ impl PrintResults for EmptyFolder {

Ok(())
}

fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
self.save_results_to_file_as_json_internal(file_name, &self.empty_folder_list.keys().collect::<Vec<_>>(), pretty_print)
}
}

impl CommonData for EmptyFolder {
Expand Down
4 changes: 4 additions & 0 deletions czkawka_core/src/invalid_symlinks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,10 @@ impl PrintResults for InvalidSymlinks {

Ok(())
}

fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
self.save_results_to_file_as_json_internal(file_name, &self.invalid_symlinks, pretty_print)
}
}

impl CommonData for InvalidSymlinks {
Expand Down
44 changes: 41 additions & 3 deletions czkawka_core/src/same_music.rs
Original file line number Diff line number Diff line change
Expand Up @@ -924,17 +924,55 @@ impl PrintResults for SameMusic {
file_entry.genre,
file_entry.bitrate,
file_entry.path.display()
)
.unwrap();
)?;
}
writeln!(writer).unwrap();
writeln!(writer)?;
}
} else if !self.duplicated_music_entries_referenced.is_empty() {
writeln!(writer, "{} music files which have similar friends\n\n.", self.duplicated_music_entries_referenced.len())?;
for (file_entry, vec_file_entry) in &self.duplicated_music_entries_referenced {
writeln!(writer, "Found {} music files which have similar friends", vec_file_entry.len())?;
writeln!(writer)?;
writeln!(
writer,
"TT: {} - TA: {} - Y: {} - L: {} - G: {} - B: {} - P: {}",
file_entry.track_title,
file_entry.track_artist,
file_entry.year,
file_entry.length,
file_entry.genre,
file_entry.bitrate,
file_entry.path.display()
)?;
for file_entry in vec_file_entry {
writeln!(
writer,
"TT: {} - TA: {} - Y: {} - L: {} - G: {} - B: {} - P: {}",
file_entry.track_title,
file_entry.track_artist,
file_entry.year,
file_entry.length,
file_entry.genre,
file_entry.bitrate,
file_entry.path.display()
)?;
}
writeln!(writer)?;
}
} else {
write!(writer, "Not found any similar music files.")?;
}

Ok(())
}

fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
if self.get_use_reference() {
self.save_results_to_file_as_json_internal(file_name, &self.duplicated_music_entries_referenced, pretty_print)
} else {
self.save_results_to_file_as_json_internal(file_name, &self.duplicated_music_entries, pretty_print)
}
}
}

fn get_approximate_conversion(what: &mut String) {
Expand Down
Loading

0 comments on commit 1409d01

Please sign in to comment.