| skipped 25 lines |
26 | 26 | | .flatten() |
27 | 27 | | .collect::<Vec<File>>(); |
28 | 28 | | |
29 | | - | if sizewize_duplicate_files.len() > 1 { |
| 29 | + | if sizewize_duplicate_files.is_empty() { |
| 30 | + | Ok(DashMap::new()) |
| 31 | + | } else { |
30 | 32 | | let hash_index_store = index_files(sizewize_duplicate_files, IndexCritera::Hash)?; |
31 | 33 | | let duplicate_files = hash_index_store |
32 | 34 | | .into_par_iter() |
| skipped 1 lines |
34 | 36 | | .collect(); |
35 | 37 | | |
36 | 38 | | Ok(duplicate_files) |
37 | | - | } else { |
38 | | - | Ok(DashMap::new()) |
39 | 39 | | } |
40 | 40 | | } |
41 | 41 | | |
| skipped 99 lines |
141 | 141 | | } |
142 | 142 | | } |
143 | 143 | | |
| 144 | + | #[cfg(test)] |
| 145 | + | mod tests { |
| 146 | + | use super::*; |
| 147 | + | |
| 148 | + | #[test] |
| 149 | + | fn test_index_by_size() { |
| 150 | + | let files_to_index: Vec<File> = vec![ |
| 151 | + | File { |
| 152 | + | path: "tf1.jpg".to_string(), |
| 153 | + | size: Some(100_123), |
| 154 | + | hash: None, |
| 155 | + | }, |
| 156 | + | File { |
| 157 | + | path: "tf2.png".to_string(), |
| 158 | + | size: Some(100_123), |
| 159 | + | hash: None, |
| 160 | + | }, |
| 161 | + | File { |
| 162 | + | path: "tf3.mp4".to_string(), |
| 163 | + | size: Some(100_000_000), |
| 164 | + | hash: None, |
| 165 | + | }, |
| 166 | + | ]; |
| 167 | + | |
| 168 | + | let duplicates_by_size = index_files(files_to_index, IndexCritera::Size).unwrap(); |
| 169 | + | let duplicate_paths = duplicates_by_size.view("100123", |_, value| { |
| 170 | + | value |
| 171 | + | .iter() |
| 172 | + | .map(|f| f.clone().path) |
| 173 | + | .collect::<Vec<String>>() |
| 174 | + | }).unwrap(); |
| 175 | + | assert_eq!(duplicates_by_size.len(), 2); |
| 176 | + | assert!(duplicates_by_size.contains_key("100123")); |
| 177 | + | assert!(duplicate_paths.contains(&"tf1.jpg".to_string())); |
| 178 | + | assert!(duplicate_paths.contains(&"tf2.png".to_string())); |
| 179 | + | } |
| 180 | + | } |
| 181 | + | |