3 module Array = ArrayLabels
4 module List = ListLabels
5 module StrSet = Set.Make(String)
6 module Unix = UnixLabels
15 -> wall_time_all:float
16 -> wall_time_group_by_size:float
17 -> wall_time_group_by_head:float
18 -> wall_time_group_by_digest:float
19 -> proc_time_all:float
20 -> proc_time_group_by_size:float
21 -> proc_time_group_by_head:float
22 -> proc_time_group_by_digest:float
26 : t -> size:int -> unit
28 : t -> size:int -> unit
34 : t -> size:int -> unit
36 : t -> size:int -> unit
37 val file_unique_sample
38 : t -> size:int -> unit
40 : t -> size:int -> unit
44 : t -> size:int -> unit
47 { considered_files : int ref
48 ; considered_bytes : int ref
50 ; ignored_files : int ref
51 ; ignored_bytes : int ref
52 ; unique_size_files : int ref
53 ; unique_size_bytes : int ref
54 ; unique_sample_files : int ref
55 ; unique_sample_bytes : int ref
56 ; sampled_files : int ref
57 ; sampled_bytes : int ref
58 ; hashed_files : int ref
59 ; hashed_bytes : int ref
61 ; redundant_data : int ref
65 { considered_files = ref 0
66 ; considered_bytes = ref 0
68 ; ignored_files = ref 0
69 ; ignored_bytes = ref 0
70 ; unique_size_files = ref 0
71 ; unique_size_bytes = ref 0
72 ; sampled_files = ref 0
73 ; sampled_bytes = ref 0
74 ; hashed_files = ref 0
75 ; hashed_bytes = ref 0
76 ; unique_sample_files = ref 0
77 ; unique_sample_bytes = ref 0
79 ; redundant_data = ref 0
85 let file_considered t ~size =
86 incr t.considered_files;
87 add t.considered_bytes size
89 let file_ignored {ignored_files; ignored_bytes; _} ~size =
91 add ignored_bytes size
96 let chunk_read t ~size =
97 add t.sampled_bytes size
102 let file_unique_size t ~size =
103 incr t.unique_size_files;
104 add t.unique_size_bytes size
106 let file_unique_sample t ~size =
107 incr t.unique_sample_files;
108 add t.unique_sample_bytes size
110 let file_hashed t ~size =
112 add t.hashed_bytes size
117 let redundant_data t ~size =
118 add t.redundant_data size
123 ~wall_time_group_by_size
124 ~wall_time_group_by_head
125 ~wall_time_group_by_digest
127 ~proc_time_group_by_size
128 ~proc_time_group_by_head
129 ~proc_time_group_by_digest
131 let b_to_mb b = (float_of_int b) /. 1024. /. 1024. in
132 let b_to_gb b = (b_to_mb b) /. 1024. in
133 eprintf "Total time : %.2f wall sec %.2f proc sec\n%!"
136 eprintf "Considered : %8d files %6.2f Gb\n%!"
137 !(t.considered_files)
138 (b_to_gb !(t.considered_bytes));
139 eprintf "Sampled : %8d files %6.2f Gb\n%!"
141 (b_to_gb !(t.sampled_bytes));
142 eprintf "Hashed : %8d files %6.2f Gb %6.2f wall sec %6.2f proc sec\n%!"
144 (b_to_gb !(t.hashed_bytes))
145 wall_time_group_by_digest
146 proc_time_group_by_digest;
147 eprintf "Digests : %8d\n%!"
149 eprintf "Duplicates (Hashed - Digests): %8d files %6.2f Gb\n%!"
150 (!(t.hashed_files) - !(t.digests))
151 (b_to_gb !(t.redundant_data));
152 eprintf "Skipped due to 0 size : %8d files\n%!" !(t.empty);
153 eprintf "Skipped due to unique size : %8d files %6.2f Gb %6.2f wall sec %6.2f proc sec\n%!"
154 !(t.unique_size_files)
155 (b_to_gb !(t.unique_size_bytes))
156 wall_time_group_by_size
157 proc_time_group_by_size;
158 eprintf "Skipped due to unique sample : %8d files %6.2f Gb %6.2f wall sec %6.2f proc sec\n%!"
159 !(t.unique_sample_files)
160 (b_to_gb !(t.unique_sample_bytes))
161 wall_time_group_by_head
162 proc_time_group_by_head;
163 eprintf "Ignored due to regex match : %8d files %6.2f Gb\n%!"
165 (b_to_gb !(t.ignored_bytes))
173 val create : (unit -> 'a option) -> 'a t
175 val of_queue : 'a Queue.t -> 'a t
177 val iter : 'a t -> f:('a -> unit) -> unit
179 val bag_map : 'a t -> njobs:int -> f:('a -> 'b) -> ('a * 'b) t
180 (** Parallel map with arbitrarily-reordered elements. *)
182 val map : 'a t -> f:('a -> 'b) -> 'b t
184 val filter : 'a t -> f:('a -> bool) -> 'a t
186 val concat : ('a t) list -> 'a t
188 val group_by : 'a t -> f:('a -> 'b) -> ('b * int * 'a list) t
193 {mutable streams : ('a S.t) list}
195 type ('input, 'output) msg_from_vassal =
197 | Result of (int * ('input * 'output))
200 type 'input msg_from_lord =
201 | Job of 'input option
204 {streams = [S.from (fun _ -> f ())]}
208 match Queue.take q with
209 | exception Queue.Empty ->
221 | exception Stream.Failure ->
222 t.streams <- streams;
232 | Some x -> Some (f x)
248 List.iter t.streams ~f:(S.iter f)
251 {streams = List.concat (List.map ts ~f:(fun {streams} -> streams))}
254 let groups_tbl = Hashtbl.create 1_000_000 in
258 match Hashtbl.find_opt groups_tbl group with
264 Hashtbl.replace groups_tbl group members
266 iter t ~f:group_update;
267 let groups = Queue.create () in
269 (fun name (length, members) -> Queue.add (name, length, members) groups)
274 val send : out_channel -> 'a -> unit
275 val recv : in_channel -> 'a
278 Marshal.to_channel oc msg [];
282 Marshal.from_channel ic
285 let lord t ~njobs ~vassals ~ic ~ocs =
286 eprintf "[debug] [lord] started\n%!";
287 let active_vassals = ref njobs in
288 let results = Queue.create () in
289 let rec dispatch () =
290 match Ipc.recv ic with
291 | ((Exiting i) : ('input, 'output) msg_from_vassal) ->
294 if !active_vassals = 0 then
298 | ((Ready i) : ('input, 'output) msg_from_vassal) ->
299 Ipc.send ocs.(i) (Job (next t));
301 | ((Result (i, result)) : ('input, 'output) msg_from_vassal) ->
302 Queue.add result results;
303 Ipc.send ocs.(i) (Job (next t));
306 let rec wait = function
309 let pid, _process_status = Unix.wait () in
310 (* TODO: handle process_status *)
311 wait (List.filter vassals ~f:(fun p -> p <> pid))
318 let vassal i ~f ~vassal_pipe_r ~lord_pipe_w =
319 eprintf "[debug] [vassal %d] started\n%!" i;
320 let ic = Unix.in_channel_of_descr vassal_pipe_r in
321 let oc = Unix.out_channel_of_descr lord_pipe_w in
324 match Ipc.recv ic with
325 | (Job (Some x) : 'input msg_from_lord) ->
326 work (Result (i, (x, f x)))
327 | (Job None : 'input msg_from_lord) ->
328 Ipc.send oc (Exiting i)
335 let bag_map t ~njobs ~f =
336 let lord_pipe_r, lord_pipe_w = Unix.pipe () in
337 let vassal_pipes = Array.init njobs ~f:(fun _ -> Unix.pipe ()) in
338 let vassal_pipes_r = Array.map vassal_pipes ~f:(fun (r, _) -> r) in
339 let vassal_pipes_w = Array.map vassal_pipes ~f:(fun (_, w) -> w) in
340 let vassals = ref [] in
341 for i=0 to (njobs - 1) do
342 begin match Unix.fork () with
344 Unix.close lord_pipe_r;
345 vassal i ~f ~lord_pipe_w ~vassal_pipe_r:vassal_pipes_r.(i)
347 vassals := pid :: !vassals
350 Unix.close lord_pipe_w;
355 ~ic:(Unix.in_channel_of_descr lord_pipe_r)
356 ~ocs:(Array.map vassal_pipes_w ~f:Unix.out_channel_of_descr)
359 module In_channel : sig
360 val lines : in_channel -> string Stream.t
363 Stream.create (fun () ->
364 match input_line ic with
365 | exception End_of_file ->
378 val find : string -> t Stream.t
379 (** Find all files in the directory tree, starting from the given root path *)
381 val lookup : string Stream.t -> t Stream.t
382 (** Lookup file info for given paths *)
384 val head : t -> len:int -> metrics:M.t -> string
386 val filter_out_unique_sizes : t Stream.t -> metrics:M.t -> t Stream.t
387 val filter_out_unique_heads : t Stream.t -> len:int -> metrics:M.t -> t Stream.t
395 Stream.map paths ~f:(fun path ->
396 let {Unix.st_size = size; _} = Unix.lstat path in
401 let dirs = Queue.create () in
402 let files = Queue.create () in
404 Array.iter (Sys.readdir parent) ~f:(fun child ->
405 let path = Filename.concat parent child in
406 let {Unix.st_kind = file_kind; st_size; _} = Unix.lstat path in
409 let file = {path; size = st_size} in
423 match Queue.is_empty files, Queue.is_empty dirs with
424 | false, _ -> Some (Queue.take files)
425 | true , true -> None
427 explore (Queue.take dirs);
432 let filter_out_singletons files ~group ~handle_singleton =
433 let q = Queue.create () in
434 Stream.iter (Stream.group_by files ~f:group) ~f:(fun group ->
435 let (_, n, members) = group in
437 List.iter members ~f:(fun m -> Queue.add m q)
439 handle_singleton group
443 let filter_out_unique_sizes files ~metrics =
444 filter_out_singletons
446 ~group:(fun {size; _} -> size)
447 ~handle_singleton:(fun (size, _, _) -> M.file_unique_size metrics ~size)
449 let head {path; _} ~len ~metrics =
450 M.file_sampled metrics;
451 let buf = Bytes.make len ' ' in
452 let ic = open_in_bin path in
453 let rec read pos len =
458 let chunk_size = input ic buf pos len in
459 M.chunk_read metrics ~size:chunk_size;
460 if chunk_size = 0 then (* EOF *)
463 read (pos + chunk_size) (len - chunk_size)
470 let filter_out_unique_heads files ~len ~metrics =
471 filter_out_singletons
473 ~group:(head ~len ~metrics)
474 ~handle_singleton:(fun (_, _, files) ->
475 let {size; _} = List.hd files in (* Guaranteed non-empty *)
476 M.file_unique_sample metrics ~size
482 | Directories of string list
486 | Directory of string
491 ; ignore : string -> bool
496 let make_input_stream input ignore ~metrics =
500 File.lookup (In_channel.lines stdin)
501 | Directories paths ->
502 let paths = StrSet.elements (StrSet.of_list paths) in
503 Stream.concat (List.map paths ~f:File.find)
505 Stream.filter input ~f:(fun {File.path; size} ->
506 M.file_considered metrics ~size;
507 let empty = size = 0 in
508 let ignored = ignore path in
509 if empty then M.file_empty metrics;
510 if ignored then M.file_ignored metrics ~size;
511 (not empty) && (not ignored)
514 let make_output_fun = function
516 fun digest n_files files ->
517 printf "%s %d\n%!" (Digest.to_hex digest) n_files;
518 List.iter files ~f:(fun {File.path; _} ->
519 printf " %S\n%!" path
522 fun digest _ files ->
523 let digest = Digest.to_hex digest in
524 let dir = Filename.concat dir (String.sub digest 0 2) in
525 Unix.mkdir dir ~perm:0o700;
526 let oc = open_out (Filename.concat dir digest) in
527 List.iter files ~f:(fun {File.path; _} ->
528 output_string oc (sprintf "%S\n%!" path)
538 let main {input; output; ignore; sample = sample_len; njobs} =
539 let wt0_all = time_wall () in
540 let pt0_all = time_proc () in
541 let metrics = M.init () in
542 let output = make_output_fun output in
543 let input = make_input_stream input ignore ~metrics in
544 (* TODO: Make a nice(r) abstraction to re-assemble pieces in the pipeline:
546 * from input to files_by_size
547 * from files_by_size to files_by_sample
548 * from files_by_sample to files_by_digest
549 * from files_by_digest to output
551 * input |> files_by_size |> files_by_sample |> files_by_digest |> output
556 let wt0_group_by_size = time_wall () in
557 let pt0_group_by_size = time_proc () in
558 eprintf "[debug] filtering-out files with unique size\n%!";
559 let files = File.filter_out_unique_sizes files ~metrics in
560 let pt1_group_by_size = time_proc () in
561 let wt1_group_by_size = time_wall () in
563 let wt0_group_by_sample = wt1_group_by_size in
564 let pt0_group_by_sample = pt1_group_by_size in
565 eprintf "[debug] filtering-out files with unique heads\n%!";
567 if njobs > 1 then begin
568 let q = Queue.create () in
570 |> Stream.bag_map ~njobs ~f:(File.head ~len:sample_len ~metrics)
571 |> Stream.group_by ~f:snd
572 |> Stream.map ~f:(fun (d, n, pairs) -> (d, n, List.map pairs ~f:fst))
573 |> Stream.filter ~f:(fun (_, n, _) -> n > 1)
574 |> Stream.iter ~f:(fun (_, _, fs) -> List.iter fs ~f:(fun f -> Queue.add f q))
578 File.filter_out_unique_heads files ~len:sample_len ~metrics
580 let pt1_group_by_sample = time_proc () in
581 let wt1_group_by_sample = time_wall () in
583 let wt0_group_by_digest = wt1_group_by_sample in
584 let pt0_group_by_digest = pt1_group_by_sample in
585 eprintf "[debug] hashing\n%!";
589 Stream.bag_map files ~njobs ~f:(fun {File.path; _} -> Digest.file path)
591 Stream.map (Stream.group_by with_digests ~f:snd) ~f:(
592 fun (digest, n, file_digest_pairs) ->
594 List.map file_digest_pairs ~f:(fun (file, _) ->
595 M.file_hashed metrics ~size:file.File.size;
602 Stream.group_by files ~f:(fun {File.path; size} ->
603 M.file_hashed metrics ~size;
607 let pt1_group_by_digest = time_proc () in
608 let wt1_group_by_digest = time_wall () in
610 eprintf "[debug] reporting\n%!";
611 Stream.iter groups ~f:(fun (d, n, files) ->
614 M.redundant_data metrics ~size:(n * (List.hd files).File.size);
618 let pt1_all = time_proc () in
619 let wt1_all = time_wall () in
622 ~wall_time_all: (wt1_all -. wt0_all)
623 ~wall_time_group_by_size: (wt1_group_by_size -. wt0_group_by_size)
624 ~wall_time_group_by_head: (wt1_group_by_sample -. wt0_group_by_sample)
625 ~wall_time_group_by_digest:(wt1_group_by_digest -. wt0_group_by_digest)
626 ~proc_time_all: (pt1_all -. pt0_all)
627 ~proc_time_group_by_size: (pt1_group_by_size -. pt0_group_by_size)
628 ~proc_time_group_by_head: (pt1_group_by_sample -. pt0_group_by_sample)
629 ~proc_time_group_by_digest:(pt1_group_by_digest -. pt0_group_by_digest)
631 let get_opt () : opt =
632 let assert_ test x msg =
633 if not (test x) then begin
634 eprintf "%s\n%!" msg;
638 let assert_file_exists path =
639 assert_ Sys.file_exists path (sprintf "File does not exist: %S" path)
641 let assert_file_is_dir path =
642 assert_ Sys.is_directory path (sprintf "File is not a directory: %S" path)
644 let input = ref Stdin in
645 let output = ref Stdout in
646 let ignore = ref (fun _ -> false) in
647 let sample = ref 512 in
651 , Arg.String (fun path ->
652 assert_file_exists path;
653 assert_file_is_dir path;
654 output := Directory path
656 , " Output to this directory instead of stdout."
659 , Arg.String (fun regexp ->
660 let regexp = Str.regexp regexp in
661 ignore := fun string -> Str.string_match regexp string 0)
662 , " Ignore file paths which match this regexp pattern (see Str module)."
666 , (sprintf " Byte size of file samples to use. Default: %d" !sample)
670 , (sprintf " Number of parallel jobs. Default: %d" !njobs)
677 assert_file_exists path;
678 assert_file_is_dir path;
681 input := Directories [path]
682 | Directories paths ->
683 input := Directories (path :: paths)
689 (sprintf "Sample size cannot be negative: %d" !sample);