Commit | Line | Data |
---|---|---|
cce97c27 SK |
1 | open Printf |
2 | ||
948ee900 SK |
3 | module Array = ArrayLabels |
4 | module List = ListLabels | |
5c0100d2 | 5 | module StrSet = Set.Make(String) |
e09dff7f | 6 | module Unix = UnixLabels |
cce97c27 | 7 | |
1013fbcd SK |
8 | module Metrics : sig |
9 | type t | |
10 | ||
11 | val init | |
12 | : unit -> t | |
13 | val report | |
14 | : t | |
15 | -> time_all:float | |
16 | -> time_group_by_size:float | |
17 | -> time_group_by_head:float | |
18 | -> time_group_by_digest:float | |
19 | -> unit | |
20 | ||
21 | val file_considered | |
22 | : t -> size:int -> unit | |
23 | val file_ignored | |
24 | : t -> size:int -> unit | |
25 | val file_empty | |
26 | : t -> unit | |
27 | val file_sampled | |
28 | : t -> unit | |
29 | val chunk_read | |
30 | : t -> size:int -> unit | |
31 | val file_unique_size | |
32 | : t -> size:int -> unit | |
33 | val file_unique_sample | |
34 | : t -> size:int -> unit | |
35 | val file_hashed | |
36 | : t -> size:int -> unit | |
37 | val digest | |
38 | : t -> unit | |
389dccaf SK |
39 | val redundant_data |
40 | : t -> size:int -> unit | |
1013fbcd SK |
41 | end = struct |
42 | type t = | |
43 | { considered_files : int ref | |
44 | ; considered_bytes : int ref | |
45 | ; empty : int ref | |
46 | ; ignored_files : int ref | |
47 | ; ignored_bytes : int ref | |
48 | ; unique_size_files : int ref | |
49 | ; unique_size_bytes : int ref | |
50 | ; unique_sample_files : int ref | |
51 | ; unique_sample_bytes : int ref | |
52 | ; sampled_files : int ref | |
53 | ; sampled_bytes : int ref | |
54 | ; hashed_files : int ref | |
55 | ; hashed_bytes : int ref | |
56 | ; digests : int ref | |
389dccaf | 57 | ; redundant_data : int ref |
1013fbcd SK |
58 | } |
59 | ||
60 | let init () = | |
61 | { considered_files = ref 0 | |
62 | ; considered_bytes = ref 0 | |
63 | ; empty = ref 0 | |
64 | ; ignored_files = ref 0 | |
65 | ; ignored_bytes = ref 0 | |
66 | ; unique_size_files = ref 0 | |
67 | ; unique_size_bytes = ref 0 | |
68 | ; sampled_files = ref 0 | |
69 | ; sampled_bytes = ref 0 | |
70 | ; hashed_files = ref 0 | |
71 | ; hashed_bytes = ref 0 | |
72 | ; unique_sample_files = ref 0 | |
73 | ; unique_sample_bytes = ref 0 | |
74 | ; digests = ref 0 | |
389dccaf | 75 | ; redundant_data = ref 0 |
1013fbcd SK |
76 | } |
77 | ||
78 | let add sum addend = | |
79 | sum := !sum + addend | |
80 | ||
81 | let file_considered t ~size = | |
82 | incr t.considered_files; | |
83 | add t.considered_bytes size | |
84 | ||
85 | let file_ignored {ignored_files; ignored_bytes; _} ~size = | |
86 | incr ignored_files; | |
87 | add ignored_bytes size | |
88 | ||
89 | let file_empty t = | |
90 | incr t.empty | |
91 | ||
92 | let chunk_read t ~size = | |
93 | add t.sampled_bytes size | |
94 | ||
95 | let file_sampled t = | |
96 | incr t.sampled_files | |
97 | ||
98 | let file_unique_size t ~size = | |
99 | incr t.unique_size_files; | |
100 | add t.unique_size_bytes size | |
101 | ||
102 | let file_unique_sample t ~size = | |
103 | incr t.unique_sample_files; | |
104 | add t.unique_sample_bytes size | |
105 | ||
106 | let file_hashed t ~size = | |
107 | incr t.hashed_files; | |
108 | add t.hashed_bytes size | |
109 | ||
110 | let digest t = | |
111 | incr t.digests | |
112 | ||
389dccaf SK |
113 | let redundant_data t ~size = |
114 | add t.redundant_data size | |
115 | ||
1013fbcd SK |
116 | let report |
117 | t | |
118 | ~time_all | |
119 | ~time_group_by_size | |
120 | ~time_group_by_head | |
121 | ~time_group_by_digest | |
122 | = | |
123 | let b_to_mb b = (float_of_int b) /. 1024. /. 1024. in | |
124 | let b_to_gb b = (b_to_mb b) /. 1024. in | |
125 | eprintf "Time : %8.2f seconds\n%!" | |
126 | time_all; | |
127 | eprintf "Considered : %8d files %6.2f Gb\n%!" | |
128 | !(t.considered_files) | |
129 | (b_to_gb !(t.considered_bytes)); | |
130 | eprintf "Sampled : %8d files %6.2f Gb\n%!" | |
131 | !(t.sampled_files) | |
132 | (b_to_gb !(t.sampled_bytes)); | |
133 | eprintf "Hashed : %8d files %6.2f Gb %6.2f seconds\n%!" | |
134 | !(t.hashed_files) | |
135 | (b_to_gb !(t.hashed_bytes)) | |
136 | time_group_by_digest; | |
137 | eprintf "Digests : %8d\n%!" | |
138 | !(t.digests); | |
389dccaf SK |
139 | eprintf "Duplicates (Hashed - Digests): %8d files %6.2f Gb\n%!" |
140 | (!(t.hashed_files) - !(t.digests)) | |
141 | (b_to_gb !(t.redundant_data)); | |
1013fbcd SK |
142 | eprintf "Skipped due to 0 size : %8d files\n%!" !(t.empty); |
143 | eprintf "Skipped due to unique size : %8d files %6.2f Gb %6.2f seconds\n%!" | |
144 | !(t.unique_size_files) | |
145 | (b_to_gb !(t.unique_size_bytes)) | |
146 | time_group_by_size; | |
147 | eprintf "Skipped due to unique sample : %8d files %6.2f Gb %6.2f seconds\n%!" | |
148 | !(t.unique_sample_files) | |
149 | (b_to_gb !(t.unique_sample_bytes)) | |
150 | time_group_by_head; | |
151 | eprintf "Ignored due to regex match : %8d files %6.2f Gb\n%!" | |
152 | !(t.ignored_files) | |
153 | (b_to_gb !(t.ignored_bytes)) | |
154 | end | |
155 | ||
156 | module M = Metrics | |
157 | ||
cce97c27 | 158 | module Stream : sig |
948ee900 | 159 | type 'a t |
e13e9ef5 SK |
160 | |
161 | val create : (unit -> 'a option) -> 'a t | |
162 | ||
1013fbcd SK |
163 | val of_queue : 'a Queue.t -> 'a t |
164 | ||
948ee900 | 165 | val iter : 'a t -> f:('a -> unit) -> unit |
8673c3a5 | 166 | |
a9a56d74 SK |
167 | val map : 'a t -> f:('a -> 'b) -> 'b t |
168 | ||
169 | val filter : 'a t -> f:('a -> bool) -> 'a t | |
170 | ||
8673c3a5 | 171 | val concat : ('a t) list -> 'a t |
1013fbcd SK |
172 | |
173 | val group_by : 'a t -> f:('a -> 'b) -> ('b * int * 'a list) t | |
cce97c27 SK |
174 | end = struct |
175 | module S = Stream | |
176 | ||
948ee900 | 177 | type 'a t = |
a9a56d74 | 178 | {mutable streams : ('a S.t) list} |
948ee900 | 179 | |
e13e9ef5 | 180 | let create f = |
a9a56d74 SK |
181 | {streams = [S.from (fun _ -> f ())]} |
182 | ||
1013fbcd SK |
183 | let of_queue q = |
184 | create (fun () -> | |
185 | match Queue.take q with | |
186 | | exception Queue.Empty -> | |
187 | None | |
188 | | x -> | |
189 | Some x | |
190 | ) | |
191 | ||
a9a56d74 SK |
192 | let rec next t = |
193 | match t.streams with | |
194 | | [] -> | |
195 | None | |
196 | | s :: streams -> | |
197 | (match S.next s with | |
198 | | exception Stream.Failure -> | |
199 | t.streams <- streams; | |
200 | next t | |
201 | | x -> | |
202 | Some x | |
203 | ) | |
204 | ||
205 | let map t ~f = | |
206 | create (fun () -> | |
207 | match next t with | |
208 | | None -> None | |
209 | | Some x -> Some (f x) | |
210 | ) | |
211 | ||
212 | let filter t ~f = | |
213 | let rec filter () = | |
214 | match next t with | |
215 | | None -> | |
216 | None | |
217 | | Some x when f x -> | |
218 | Some x | |
219 | | Some _ -> | |
220 | filter () | |
221 | in | |
222 | create filter | |
e13e9ef5 SK |
223 | |
224 | let iter t ~f = | |
a9a56d74 | 225 | List.iter t.streams ~f:(S.iter f) |
8673c3a5 SK |
226 | |
227 | let concat ts = | |
a9a56d74 | 228 | {streams = List.concat (List.map ts ~f:(fun {streams} -> streams))} |
1013fbcd SK |
229 | |
230 | let group_by t ~f = | |
231 | let groups_tbl = Hashtbl.create 1_000_000 in | |
232 | let group_update x = | |
233 | let group = f x in | |
234 | let members = | |
235 | match Hashtbl.find_opt groups_tbl group with | |
236 | | None -> | |
237 | (1, [x]) | |
238 | | Some (n, xs) -> | |
239 | (succ n, x :: xs) | |
240 | in | |
241 | Hashtbl.replace groups_tbl group members | |
242 | in | |
243 | iter t ~f:group_update; | |
244 | let groups = Queue.create () in | |
245 | Hashtbl.iter | |
246 | (fun name (length, members) -> Queue.add (name, length, members) groups) | |
247 | groups_tbl; | |
248 | of_queue groups | |
e13e9ef5 SK |
249 | end |
250 | ||
251 | module In_channel : sig | |
252 | val lines : in_channel -> string Stream.t | |
253 | end = struct | |
254 | let lines ic = | |
255 | Stream.create (fun () -> | |
256 | match input_line ic with | |
257 | | exception End_of_file -> | |
258 | None | |
259 | | line -> | |
260 | Some line | |
261 | ) | |
262 | end | |
263 | ||
a9a56d74 SK |
264 | module File : sig |
265 | type t = | |
266 | { path : string | |
267 | ; size : int | |
268 | } | |
269 | ||
270 | val find : string -> t Stream.t | |
271 | (** Find all files in the directory tree, starting from the given root path *) | |
272 | ||
273 | val lookup : string Stream.t -> t Stream.t | |
274 | (** Lookup file info for given paths *) | |
5c0100d2 | 275 | |
1013fbcd SK |
276 | val filter_out_unique_sizes : t Stream.t -> metrics:M.t -> t Stream.t |
277 | val filter_out_unique_heads : t Stream.t -> len:int -> metrics:M.t -> t Stream.t | |
e13e9ef5 | 278 | end = struct |
a9a56d74 SK |
279 | type t = |
280 | { path : string | |
281 | ; size : int | |
282 | } | |
283 | ||
284 | let lookup paths = | |
285 | Stream.map paths ~f:(fun path -> | |
286 | let {Unix.st_size = size; _} = Unix.lstat path in | |
287 | {path; size} | |
288 | ) | |
289 | ||
290 | let find root = | |
948ee900 SK |
291 | let dirs = Queue.create () in |
292 | let files = Queue.create () in | |
948ee900 SK |
293 | let explore parent = |
294 | Array.iter (Sys.readdir parent) ~f:(fun child -> | |
295 | let path = Filename.concat parent child in | |
a9a56d74 | 296 | let {Unix.st_kind = file_kind; st_size; _} = Unix.lstat path in |
948ee900 SK |
297 | match file_kind with |
298 | | Unix.S_REG -> | |
a9a56d74 SK |
299 | let file = {path; size = st_size} in |
300 | Queue.add file files | |
948ee900 SK |
301 | | Unix.S_DIR -> |
302 | Queue.add path dirs | |
303 | | Unix.S_CHR | |
304 | | Unix.S_BLK | |
305 | | Unix.S_LNK | |
306 | | Unix.S_FIFO | |
307 | | Unix.S_SOCK -> | |
308 | () | |
309 | ) | |
310 | in | |
1f130f74 | 311 | explore root; |
c66266c6 SK |
312 | let rec next () = |
313 | match Queue.is_empty files, Queue.is_empty dirs with | |
314 | | false, _ -> Some (Queue.take files) | |
315 | | true , true -> None | |
316 | | true , false -> | |
317 | explore (Queue.take dirs); | |
318 | next () | |
319 | in | |
320 | Stream.create next | |
5c0100d2 | 321 | |
1013fbcd SK |
322 | let filter_out_singletons files ~group ~handle_singleton = |
323 | let q = Queue.create () in | |
324 | Stream.iter (Stream.group_by files ~f:group) ~f:(fun group -> | |
325 | let (_, n, members) = group in | |
326 | if n > 1 then | |
327 | List.iter members ~f:(fun m -> Queue.add m q) | |
328 | else | |
329 | handle_singleton group | |
330 | ); | |
331 | Stream.of_queue q | |
332 | ||
333 | let filter_out_unique_sizes files ~metrics = | |
334 | filter_out_singletons | |
335 | files | |
336 | ~group:(fun {size; _} -> size) | |
337 | ~handle_singleton:(fun (size, _, _) -> M.file_unique_size metrics ~size) | |
338 | ||
339 | let head path ~len ~metrics = | |
340 | let buf = Bytes.make len ' ' in | |
341 | let ic = open_in_bin path in | |
342 | let rec read pos len = | |
343 | assert (len >= 0); | |
344 | if len = 0 then | |
345 | () | |
346 | else begin | |
347 | let chunk_size = input ic buf pos len in | |
348 | M.chunk_read metrics ~size:chunk_size; | |
349 | if chunk_size = 0 then (* EOF *) | |
350 | () | |
351 | else | |
352 | read (pos + chunk_size) (len - chunk_size) | |
353 | end | |
354 | in | |
355 | read 0 len; | |
356 | close_in ic; | |
357 | Bytes.to_string buf | |
358 | ||
359 | let filter_out_unique_heads files ~len ~metrics = | |
360 | filter_out_singletons | |
361 | files | |
362 | ~group:(fun {path; _} -> | |
363 | M.file_sampled metrics; | |
364 | head path ~len ~metrics | |
365 | ) | |
366 | ~handle_singleton:(fun (_, _, files) -> | |
367 | let {size; _} = List.hd files in (* Guaranteed non-empty *) | |
368 | M.file_unique_sample metrics ~size | |
369 | ) | |
cce97c27 SK |
370 | end |
371 | ||
948ee900 | 372 | type input = |
cfcdf90a SK |
373 | | Stdin |
374 | | Directories of string list | |
948ee900 | 375 | |
e09dff7f SK |
376 | type output = |
377 | | Stdout | |
378 | | Directory of string | |
379 | ||
1253df34 SK |
380 | type opt = |
381 | { input : input | |
382 | ; output : output | |
9d01fa28 | 383 | ; ignore : string -> bool |
7a0486be | 384 | ; sample : int |
1253df34 SK |
385 | } |
386 | ||
1013fbcd | 387 | let make_input_stream input ignore ~metrics = |
a9a56d74 SK |
388 | let input = |
389 | match input with | |
390 | | Stdin -> | |
391 | File.lookup (In_channel.lines stdin) | |
392 | | Directories paths -> | |
393 | let paths = StrSet.elements (StrSet.of_list paths) in | |
394 | Stream.concat (List.map paths ~f:File.find) | |
395 | in | |
396 | Stream.filter input ~f:(fun {File.path; size} -> | |
1013fbcd | 397 | M.file_considered metrics ~size; |
a9a56d74 | 398 | let empty = size = 0 in |
9d01fa28 | 399 | let ignored = ignore path in |
1013fbcd | 400 | if empty then M.file_empty metrics; |
9d01fa28 | 401 | if ignored then M.file_ignored metrics ~size; |
a9a56d74 SK |
402 | (not empty) && (not ignored) |
403 | ) | |
e09dff7f SK |
404 | |
405 | let make_output_fun = function | |
406 | | Stdout -> | |
5c0100d2 SK |
407 | fun digest n_files files -> |
408 | printf "%s %d\n%!" (Digest.to_hex digest) n_files; | |
1013fbcd | 409 | List.iter files ~f:(fun {File.path; _} -> |
5c0100d2 SK |
410 | printf " %S\n%!" path |
411 | ) | |
e09dff7f | 412 | | Directory dir -> |
5c0100d2 | 413 | fun digest _ files -> |
e09dff7f SK |
414 | let digest = Digest.to_hex digest in |
415 | let dir = Filename.concat dir (String.sub digest 0 2) in | |
416 | Unix.mkdir dir ~perm:0o700; | |
417 | let oc = open_out (Filename.concat dir digest) in | |
1013fbcd | 418 | List.iter files ~f:(fun {File.path; _} -> |
e09dff7f SK |
419 | output_string oc (sprintf "%S\n%!" path) |
420 | ); | |
421 | close_out oc | |
422 | ||
7a0486be | 423 | let main {input; output; ignore; sample = sample_len} = |
1013fbcd SK |
424 | let t0_all = Sys.time () in |
425 | let metrics = M.init () in | |
e09dff7f | 426 | let output = make_output_fun output in |
1013fbcd | 427 | let input = make_input_stream input ignore ~metrics in |
8c54ccb8 SK |
428 | (* TODO: Make a nice(r) abstraction to re-assemble pieces in the pipeline: |
429 | * | |
430 | * from input to files_by_size | |
431 | * from files_by_size to files_by_sample | |
432 | * from files_by_sample to files_by_digest | |
433 | * from files_by_digest to output | |
434 | * | |
435 | * input |> files_by_size |> files_by_sample |> files_by_digest |> output | |
436 | *) | |
1013fbcd SK |
437 | |
438 | let files = input in | |
439 | ||
487389a4 | 440 | let t0_group_by_size = Sys.time () in |
1013fbcd | 441 | let files = File.filter_out_unique_sizes files ~metrics in |
487389a4 | 442 | let t1_group_by_size = Sys.time () in |
1013fbcd SK |
443 | |
444 | let t0_group_by_sample = t1_group_by_size in | |
445 | let files = File.filter_out_unique_heads files ~len:sample_len ~metrics in | |
487389a4 | 446 | let t1_group_by_sample = Sys.time () in |
1013fbcd SK |
447 | |
448 | let t0_group_by_digest = t1_group_by_sample in | |
449 | let groups = | |
450 | Stream.group_by files ~f:(fun {File.path; size} -> | |
451 | M.file_hashed metrics ~size; | |
452 | Digest.file path | |
7a0486be | 453 | ) |
1013fbcd | 454 | in |
487389a4 | 455 | let t1_group_by_digest = Sys.time () in |
1013fbcd SK |
456 | |
457 | Stream.iter groups ~f:(fun (d, n, files) -> | |
458 | M.digest metrics; | |
389dccaf SK |
459 | if n > 1 then |
460 | M.redundant_data metrics ~size:(n * (List.hd files).File.size); | |
461 | output d n files | |
1013fbcd SK |
462 | ); |
463 | ||
464 | let t1_all = Sys.time () in | |
465 | ||
466 | M.report metrics | |
467 | ~time_all: (t1_all -. t0_all) | |
468 | ~time_group_by_size: (t1_group_by_size -. t0_group_by_size) | |
469 | ~time_group_by_head: (t1_group_by_sample -. t0_group_by_sample) | |
470 | ~time_group_by_digest:(t1_group_by_digest -. t0_group_by_digest) | |
cce97c27 | 471 | |
1253df34 SK |
472 | let get_opt () : opt = |
473 | let assert_ test x msg = | |
474 | if not (test x) then begin | |
475 | eprintf "%s\n%!" msg; | |
e09dff7f SK |
476 | exit 1 |
477 | end | |
478 | in | |
1253df34 SK |
479 | let assert_file_exists path = |
480 | assert_ Sys.file_exists path (sprintf "File does not exist: %S" path) | |
481 | in | |
e09dff7f | 482 | let assert_file_is_dir path = |
1253df34 | 483 | assert_ Sys.is_directory path (sprintf "File is not a directory: %S" path) |
e09dff7f | 484 | in |
1253df34 SK |
485 | let input = ref Stdin in |
486 | let output = ref Stdout in | |
9d01fa28 | 487 | let ignore = ref (fun _ -> false) in |
7a0486be | 488 | let sample = ref 256 in |
e09dff7f SK |
489 | let spec = |
490 | [ ( "-out" | |
491 | , Arg.String (fun path -> | |
492 | assert_file_exists path; | |
493 | assert_file_is_dir path; | |
494 | output := Directory path | |
8673c3a5 | 495 | ) |
e09dff7f SK |
496 | , " Output to this directory instead of stdout." |
497 | ) | |
34107832 | 498 | ; ( "-ignore" |
9d01fa28 SK |
499 | , Arg.String (fun regexp -> |
500 | let regexp = Str.regexp regexp in | |
501 | ignore := fun string -> Str.string_match regexp string 0) | |
34107832 SK |
502 | , " Ignore file paths which match this regexp pattern (see Str module)." |
503 | ) | |
7a0486be SK |
504 | ; ( "-sample" |
505 | , Arg.Set_int sample | |
506 | , (sprintf " Byte size of file samples to use. Default: %d" !sample) | |
507 | ) | |
e09dff7f SK |
508 | ] |
509 | in | |
510 | Arg.parse | |
511 | (Arg.align spec) | |
512 | (fun path -> | |
513 | assert_file_exists path; | |
514 | assert_file_is_dir path; | |
515 | match !input with | |
cfcdf90a SK |
516 | | Stdin -> |
517 | input := Directories [path] | |
518 | | Directories paths -> | |
519 | input := Directories (path :: paths) | |
61a05dbb SK |
520 | ) |
521 | ""; | |
7a0486be SK |
522 | assert_ |
523 | (fun x -> x > 0) | |
524 | !sample | |
525 | (sprintf "Sample size cannot be negative: %d" !sample); | |
1253df34 SK |
526 | { input = !input |
527 | ; output = !output | |
528 | ; ignore = !ignore | |
7a0486be | 529 | ; sample = !sample |
1253df34 SK |
530 | } |
531 | ||
532 | let () = | |
533 | main (get_opt ()) |