3 module Array = ArrayLabels
4 module List = ListLabels
5 module StrSet = Set.Make(String)
6 module Unix = UnixLabels
15 -> wall_time_all:float
16 -> wall_time_group_by_size:float
17 -> wall_time_group_by_head:float
18 -> wall_time_group_by_digest:float
19 -> proc_time_all:float
20 -> proc_time_group_by_size:float
21 -> proc_time_group_by_head:float
22 -> proc_time_group_by_digest:float
26 : t -> size:int -> unit
28 : t -> size:int -> unit
34 : t -> size:int -> unit
36 : t -> size:int -> unit
37 val file_unique_sample
38 : t -> size:int -> unit
40 : t -> size:int -> unit
44 : t -> size:int -> unit
47 { considered_files : int ref
48 ; considered_bytes : int ref
50 ; ignored_files : int ref
51 ; ignored_bytes : int ref
52 ; unique_size_files : int ref
53 ; unique_size_bytes : int ref
54 ; unique_sample_files : int ref
55 ; unique_sample_bytes : int ref
56 ; sampled_files : int ref
57 ; sampled_bytes : int ref
58 ; hashed_files : int ref
59 ; hashed_bytes : int ref
61 ; redundant_data : int ref
65 { considered_files = ref 0
66 ; considered_bytes = ref 0
68 ; ignored_files = ref 0
69 ; ignored_bytes = ref 0
70 ; unique_size_files = ref 0
71 ; unique_size_bytes = ref 0
72 ; sampled_files = ref 0
73 ; sampled_bytes = ref 0
74 ; hashed_files = ref 0
75 ; hashed_bytes = ref 0
76 ; unique_sample_files = ref 0
77 ; unique_sample_bytes = ref 0
79 ; redundant_data = ref 0
85 let file_considered t ~size =
86 incr t.considered_files;
87 add t.considered_bytes size
89 let file_ignored {ignored_files; ignored_bytes; _} ~size =
91 add ignored_bytes size
96 let chunk_read t ~size =
97 add t.sampled_bytes size
102 let file_unique_size t ~size =
103 incr t.unique_size_files;
104 add t.unique_size_bytes size
106 let file_unique_sample t ~size =
107 incr t.unique_sample_files;
108 add t.unique_sample_bytes size
110 let file_hashed t ~size =
112 add t.hashed_bytes size
117 let redundant_data t ~size =
118 add t.redundant_data size
123 ~wall_time_group_by_size
124 ~wall_time_group_by_head
125 ~wall_time_group_by_digest
127 ~proc_time_group_by_size
128 ~proc_time_group_by_head
129 ~proc_time_group_by_digest
131 let b_to_mb b = (float_of_int b) /. 1024. /. 1024. in
132 let b_to_gb b = (b_to_mb b) /. 1024. in
133 eprintf "Total time : %.2f wall sec %.2f proc sec\n%!"
136 eprintf "Considered : %8d files %6.2f Gb\n%!"
137 !(t.considered_files)
138 (b_to_gb !(t.considered_bytes));
139 eprintf "Sampled : %8d files %6.2f Gb\n%!"
141 (b_to_gb !(t.sampled_bytes));
142 eprintf "Hashed : %8d files %6.2f Gb %6.2f wall sec %6.2f proc sec\n%!"
144 (b_to_gb !(t.hashed_bytes))
145 wall_time_group_by_digest
146 proc_time_group_by_digest;
147 eprintf "Digests : %8d\n%!"
149 eprintf "Duplicates (Hashed - Digests): %8d files %6.2f Gb\n%!"
150 (!(t.hashed_files) - !(t.digests))
151 (b_to_gb !(t.redundant_data));
152 eprintf "Skipped due to 0 size : %8d files\n%!" !(t.empty);
153 eprintf "Skipped due to unique size : %8d files %6.2f Gb %6.2f wall sec %6.2f proc sec\n%!"
154 !(t.unique_size_files)
155 (b_to_gb !(t.unique_size_bytes))
156 wall_time_group_by_size
157 proc_time_group_by_size;
158 eprintf "Skipped due to unique sample : %8d files %6.2f Gb %6.2f wall sec %6.2f proc sec\n%!"
159 !(t.unique_sample_files)
160 (b_to_gb !(t.unique_sample_bytes))
161 wall_time_group_by_head
162 proc_time_group_by_head;
163 eprintf "Ignored due to regex match : %8d files %6.2f Gb\n%!"
165 (b_to_gb !(t.ignored_bytes))
173 val create : (unit -> 'a option) -> 'a t
175 val of_queue : 'a Queue.t -> 'a t
177 val iter : 'a t -> f:('a -> unit) -> unit
179 val bag_map : 'a t -> njobs:int -> f:('a -> 'b) -> ('a * 'b) t
180 (** Parallel map with arbitrarily-reordered elements. *)
182 val map : 'a t -> f:('a -> 'b) -> 'b t
184 val filter : 'a t -> f:('a -> bool) -> 'a t
186 val concat : ('a t) list -> 'a t
188 val group_by : 'a t -> f:('a -> 'b) -> ('b * int * 'a list) t
193 {mutable streams : ('a S.t) list}
195 type ('input, 'output) msg_from_vassal =
197 | Result of (int * ('input * 'output))
200 type 'input msg_from_lord =
201 | Job of 'input option
204 {streams = [S.from (fun _ -> f ())]}
208 match Queue.take q with
209 | exception Queue.Empty ->
221 | exception Stream.Failure ->
222 t.streams <- streams;
232 | Some x -> Some (f x)
248 List.iter t.streams ~f:(S.iter f)
251 {streams = List.concat (List.map ts ~f:(fun {streams} -> streams))}
254 let groups_tbl = Hashtbl.create 1_000_000 in
258 match Hashtbl.find_opt groups_tbl group with
264 Hashtbl.replace groups_tbl group members
266 iter t ~f:group_update;
267 let groups = Queue.create () in
269 (fun name (length, members) -> Queue.add (name, length, members) groups)
274 val send : out_channel -> 'a -> unit
275 val recv : in_channel -> 'a
278 Marshal.to_channel oc msg [];
282 Marshal.from_channel ic
285 let lord t ~njobs ~vassals ~ic ~ocs =
286 let active_vassals = ref njobs in
287 let results = Queue.create () in
289 match ((Ipc.recv ic) : ('input, 'output) msg_from_vassal) with
293 if !active_vassals = 0 then () else loop ()
295 Ipc.send ocs.(i) (Job (next t));
297 | Result (i, result) ->
298 Queue.add result results;
299 Ipc.send ocs.(i) (Job (next t));
302 let rec wait = function
306 let pid, _process_status = Unix.wait () in
307 (* TODO: handle process_status *)
308 wait (List.filter vassals ~f:(fun p -> p <> pid))
315 let vassal i ~f ~vassal_pipe_r ~lord_pipe_w =
316 let ic = Unix.in_channel_of_descr vassal_pipe_r in
317 let oc = Unix.out_channel_of_descr lord_pipe_w in
319 match (Ipc.recv ic : 'input msg_from_lord) with
321 Ipc.send oc (Result (i, (x, f x)));
324 Ipc.send oc (Exiting i)
326 Ipc.send oc (Ready i);
332 let bag_map t ~njobs ~f =
333 let lord_pipe_r, lord_pipe_w = Unix.pipe () in
334 let vassal_pipes = Array.init njobs ~f:(fun _ -> Unix.pipe ()) in
335 let vassal_pipes_r = Array.map vassal_pipes ~f:(fun (r, _) -> r) in
336 let vassal_pipes_w = Array.map vassal_pipes ~f:(fun (_, w) -> w) in
337 let vassals = ref [] in
338 for i=0 to (njobs - 1) do
339 begin match Unix.fork () with
341 Unix.close lord_pipe_r;
342 vassal i ~f ~lord_pipe_w ~vassal_pipe_r:vassal_pipes_r.(i)
344 vassals := pid :: !vassals
347 Unix.close lord_pipe_w;
352 ~ic:(Unix.in_channel_of_descr lord_pipe_r)
353 ~ocs:(Array.map vassal_pipes_w ~f:Unix.out_channel_of_descr)
356 module In_channel : sig
357 val lines : in_channel -> string Stream.t
360 Stream.create (fun () ->
361 match input_line ic with
362 | exception End_of_file ->
375 val find : string -> t Stream.t
376 (** Find all files in the directory tree, starting from the given root path *)
378 val lookup : string Stream.t -> t Stream.t
379 (** Lookup file info for given paths *)
381 val head : t -> len:int -> metrics:M.t -> string
383 val filter_out_unique_sizes : t Stream.t -> metrics:M.t -> t Stream.t
384 val filter_out_unique_heads : t Stream.t -> len:int -> metrics:M.t -> t Stream.t
392 Stream.map paths ~f:(fun path ->
393 let {Unix.st_size = size; _} = Unix.lstat path in
398 let dirs = Queue.create () in
399 let files = Queue.create () in
401 Array.iter (Sys.readdir parent) ~f:(fun child ->
402 let path = Filename.concat parent child in
403 let {Unix.st_kind = file_kind; st_size; _} = Unix.lstat path in
406 let file = {path; size = st_size} in
420 match Queue.is_empty files, Queue.is_empty dirs with
421 | false, _ -> Some (Queue.take files)
422 | true , true -> None
424 explore (Queue.take dirs);
429 let filter_out_singletons files ~group ~handle_singleton =
430 let q = Queue.create () in
431 Stream.iter (Stream.group_by files ~f:group) ~f:(fun group ->
432 let (_, n, members) = group in
434 List.iter members ~f:(fun m -> Queue.add m q)
436 handle_singleton group
440 let filter_out_unique_sizes files ~metrics =
441 filter_out_singletons
443 ~group:(fun {size; _} -> size)
444 ~handle_singleton:(fun (size, _, _) -> M.file_unique_size metrics ~size)
446 let head {path; _} ~len ~metrics =
447 M.file_sampled metrics;
448 let buf = Bytes.make len ' ' in
449 let ic = open_in_bin path in
450 let rec read pos len =
455 let chunk_size = input ic buf pos len in
456 M.chunk_read metrics ~size:chunk_size;
457 if chunk_size = 0 then (* EOF *)
460 read (pos + chunk_size) (len - chunk_size)
467 let filter_out_unique_heads files ~len ~metrics =
468 filter_out_singletons
470 ~group:(head ~len ~metrics)
471 ~handle_singleton:(fun (_, _, files) ->
472 let {size; _} = List.hd files in (* Guaranteed non-empty *)
473 M.file_unique_sample metrics ~size
479 | Directories of string list
483 | Directory of string
488 ; ignore : string -> bool
493 let make_input_stream input ignore ~metrics =
497 File.lookup (In_channel.lines stdin)
498 | Directories paths ->
499 let paths = StrSet.elements (StrSet.of_list paths) in
500 Stream.concat (List.map paths ~f:File.find)
502 Stream.filter input ~f:(fun {File.path; size} ->
503 M.file_considered metrics ~size;
504 let empty = size = 0 in
505 let ignored = ignore path in
506 if empty then M.file_empty metrics;
507 if ignored then M.file_ignored metrics ~size;
508 (not empty) && (not ignored)
511 let make_output_fun = function
513 fun digest n_files files ->
514 printf "%s %d\n%!" (Digest.to_hex digest) n_files;
515 List.iter files ~f:(fun {File.path; _} ->
516 printf " %S\n%!" path
519 fun digest _ files ->
520 let digest = Digest.to_hex digest in
521 let dir = Filename.concat dir (String.sub digest 0 2) in
522 Unix.mkdir dir ~perm:0o700;
523 let oc = open_out (Filename.concat dir digest) in
524 List.iter files ~f:(fun {File.path; _} ->
525 output_string oc (sprintf "%S\n%!" path)
535 let main {input; output; ignore; sample = sample_len; njobs} =
536 let wt0_all = time_wall () in
537 let pt0_all = time_proc () in
538 let metrics = M.init () in
539 let output = make_output_fun output in
540 let input = make_input_stream input ignore ~metrics in
541 (* TODO: Make a nice(r) abstraction to re-assemble pieces in the pipeline:
543 * from input to files_by_size
544 * from files_by_size to files_by_sample
545 * from files_by_sample to files_by_digest
546 * from files_by_digest to output
548 * input |> files_by_size |> files_by_sample |> files_by_digest |> output
553 let wt0_group_by_size = time_wall () in
554 let pt0_group_by_size = time_proc () in
555 eprintf "[debug] filtering-out files with unique size\n%!";
556 let files = File.filter_out_unique_sizes files ~metrics in
557 let pt1_group_by_size = time_proc () in
558 let wt1_group_by_size = time_wall () in
560 let wt0_group_by_sample = wt1_group_by_size in
561 let pt0_group_by_sample = pt1_group_by_size in
562 eprintf "[debug] filtering-out files with unique heads\n%!";
564 if njobs > 1 then begin
565 let q = Queue.create () in
567 |> Stream.bag_map ~njobs ~f:(File.head ~len:sample_len ~metrics)
568 |> Stream.group_by ~f:snd
569 |> Stream.map ~f:(fun (d, n, pairs) -> (d, n, List.map pairs ~f:fst))
570 |> Stream.filter ~f:(fun (_, n, _) -> n > 1)
571 |> Stream.iter ~f:(fun (_, _, fs) -> List.iter fs ~f:(fun f -> Queue.add f q))
575 File.filter_out_unique_heads files ~len:sample_len ~metrics
577 let pt1_group_by_sample = time_proc () in
578 let wt1_group_by_sample = time_wall () in
580 let wt0_group_by_digest = wt1_group_by_sample in
581 let pt0_group_by_digest = pt1_group_by_sample in
582 eprintf "[debug] hashing\n%!";
586 Stream.bag_map files ~njobs ~f:(fun {File.path; _} -> Digest.file path)
588 Stream.map (Stream.group_by with_digests ~f:snd) ~f:(
589 fun (digest, n, file_digest_pairs) ->
591 List.map file_digest_pairs ~f:(fun (file, _) ->
592 M.file_hashed metrics ~size:file.File.size;
599 Stream.group_by files ~f:(fun {File.path; size} ->
600 M.file_hashed metrics ~size;
604 let pt1_group_by_digest = time_proc () in
605 let wt1_group_by_digest = time_wall () in
607 eprintf "[debug] reporting\n%!";
608 Stream.iter groups ~f:(fun (d, n, files) ->
611 M.redundant_data metrics ~size:(n * (List.hd files).File.size);
615 let pt1_all = time_proc () in
616 let wt1_all = time_wall () in
619 ~wall_time_all: (wt1_all -. wt0_all)
620 ~wall_time_group_by_size: (wt1_group_by_size -. wt0_group_by_size)
621 ~wall_time_group_by_head: (wt1_group_by_sample -. wt0_group_by_sample)
622 ~wall_time_group_by_digest:(wt1_group_by_digest -. wt0_group_by_digest)
623 ~proc_time_all: (pt1_all -. pt0_all)
624 ~proc_time_group_by_size: (pt1_group_by_size -. pt0_group_by_size)
625 ~proc_time_group_by_head: (pt1_group_by_sample -. pt0_group_by_sample)
626 ~proc_time_group_by_digest:(pt1_group_by_digest -. pt0_group_by_digest)
628 let get_opt () : opt =
629 let assert_ test x msg =
630 if not (test x) then begin
631 eprintf "%s\n%!" msg;
635 let assert_file_exists path =
636 assert_ Sys.file_exists path (sprintf "File does not exist: %S" path)
638 let assert_file_is_dir path =
639 assert_ Sys.is_directory path (sprintf "File is not a directory: %S" path)
641 let input = ref Stdin in
642 let output = ref Stdout in
643 let ignore = ref (fun _ -> false) in
644 let sample = ref 512 in
648 , Arg.String (fun path ->
649 assert_file_exists path;
650 assert_file_is_dir path;
651 output := Directory path
653 , " Output to this directory instead of stdout."
656 , Arg.String (fun regexp ->
657 let regexp = Str.regexp regexp in
658 ignore := fun string -> Str.string_match regexp string 0)
659 , " Ignore file paths which match this regexp pattern (see Str module)."
663 , (sprintf " Byte size of file samples to use. Default: %d" !sample)
667 , (sprintf " Number of parallel jobs. Default: %d" !njobs)
674 assert_file_exists path;
675 assert_file_is_dir path;
678 input := Directories [path]
679 | Directories paths ->
680 input := Directories (path :: paths)
686 (sprintf "Sample size cannot be negative: %d" !sample);