Improve pipeline and metrics abstractions
[dups.git] / dups.ml
1 open Printf
2
3 module Array = ArrayLabels
4 module List = ListLabels
5 module StrSet = Set.Make(String)
6 module Unix = UnixLabels
7
8 module Metrics : sig
9 type t
10
11 val init
12 : unit -> t
13 val report
14 : t
15 -> time_all:float
16 -> time_group_by_size:float
17 -> time_group_by_head:float
18 -> time_group_by_digest:float
19 -> unit
20
21 val file_considered
22 : t -> size:int -> unit
23 val file_ignored
24 : t -> size:int -> unit
25 val file_empty
26 : t -> unit
27 val file_sampled
28 : t -> unit
29 val chunk_read
30 : t -> size:int -> unit
31 val file_unique_size
32 : t -> size:int -> unit
33 val file_unique_sample
34 : t -> size:int -> unit
35 val file_hashed
36 : t -> size:int -> unit
37 val digest
38 : t -> unit
39 end = struct
40 type t =
41 { considered_files : int ref
42 ; considered_bytes : int ref
43 ; empty : int ref
44 ; ignored_files : int ref
45 ; ignored_bytes : int ref
46 ; unique_size_files : int ref
47 ; unique_size_bytes : int ref
48 ; unique_sample_files : int ref
49 ; unique_sample_bytes : int ref
50 ; sampled_files : int ref
51 ; sampled_bytes : int ref
52 ; hashed_files : int ref
53 ; hashed_bytes : int ref
54 ; digests : int ref
55 }
56
57 let init () =
58 { considered_files = ref 0
59 ; considered_bytes = ref 0
60 ; empty = ref 0
61 ; ignored_files = ref 0
62 ; ignored_bytes = ref 0
63 ; unique_size_files = ref 0
64 ; unique_size_bytes = ref 0
65 ; sampled_files = ref 0
66 ; sampled_bytes = ref 0
67 ; hashed_files = ref 0
68 ; hashed_bytes = ref 0
69 ; unique_sample_files = ref 0
70 ; unique_sample_bytes = ref 0
71 ; digests = ref 0
72 }
73
74 let add sum addend =
75 sum := !sum + addend
76
77 let file_considered t ~size =
78 incr t.considered_files;
79 add t.considered_bytes size
80
81 let file_ignored {ignored_files; ignored_bytes; _} ~size =
82 incr ignored_files;
83 add ignored_bytes size
84
85 let file_empty t =
86 incr t.empty
87
88 let chunk_read t ~size =
89 add t.sampled_bytes size
90
91 let file_sampled t =
92 incr t.sampled_files
93
94 let file_unique_size t ~size =
95 incr t.unique_size_files;
96 add t.unique_size_bytes size
97
98 let file_unique_sample t ~size =
99 incr t.unique_sample_files;
100 add t.unique_sample_bytes size
101
102 let file_hashed t ~size =
103 incr t.hashed_files;
104 add t.hashed_bytes size
105
106 let digest t =
107 incr t.digests
108
109 let report
110 t
111 ~time_all
112 ~time_group_by_size
113 ~time_group_by_head
114 ~time_group_by_digest
115 =
116 let b_to_mb b = (float_of_int b) /. 1024. /. 1024. in
117 let b_to_gb b = (b_to_mb b) /. 1024. in
118 eprintf "Time : %8.2f seconds\n%!"
119 time_all;
120 eprintf "Considered : %8d files %6.2f Gb\n%!"
121 !(t.considered_files)
122 (b_to_gb !(t.considered_bytes));
123 eprintf "Sampled : %8d files %6.2f Gb\n%!"
124 !(t.sampled_files)
125 (b_to_gb !(t.sampled_bytes));
126 eprintf "Hashed : %8d files %6.2f Gb %6.2f seconds\n%!"
127 !(t.hashed_files)
128 (b_to_gb !(t.hashed_bytes))
129 time_group_by_digest;
130 eprintf "Digests : %8d\n%!"
131 !(t.digests);
132 eprintf "Duplicates (Hashed - Digests): %8d\n%!"
133 (!(t.hashed_files) - !(t.digests));
134 eprintf "Skipped due to 0 size : %8d files\n%!" !(t.empty);
135 eprintf "Skipped due to unique size : %8d files %6.2f Gb %6.2f seconds\n%!"
136 !(t.unique_size_files)
137 (b_to_gb !(t.unique_size_bytes))
138 time_group_by_size;
139 eprintf "Skipped due to unique sample : %8d files %6.2f Gb %6.2f seconds\n%!"
140 !(t.unique_sample_files)
141 (b_to_gb !(t.unique_sample_bytes))
142 time_group_by_head;
143 eprintf "Ignored due to regex match : %8d files %6.2f Gb\n%!"
144 !(t.ignored_files)
145 (b_to_gb !(t.ignored_bytes))
146 end
147
148 module M = Metrics
149
150 module Stream : sig
151 type 'a t
152
153 val create : (unit -> 'a option) -> 'a t
154
155 val of_queue : 'a Queue.t -> 'a t
156
157 val iter : 'a t -> f:('a -> unit) -> unit
158
159 val map : 'a t -> f:('a -> 'b) -> 'b t
160
161 val filter : 'a t -> f:('a -> bool) -> 'a t
162
163 val concat : ('a t) list -> 'a t
164
165 val group_by : 'a t -> f:('a -> 'b) -> ('b * int * 'a list) t
166 end = struct
167 module S = Stream
168
169 type 'a t =
170 {mutable streams : ('a S.t) list}
171
172 let create f =
173 {streams = [S.from (fun _ -> f ())]}
174
175 let of_queue q =
176 create (fun () ->
177 match Queue.take q with
178 | exception Queue.Empty ->
179 None
180 | x ->
181 Some x
182 )
183
184 let rec next t =
185 match t.streams with
186 | [] ->
187 None
188 | s :: streams ->
189 (match S.next s with
190 | exception Stream.Failure ->
191 t.streams <- streams;
192 next t
193 | x ->
194 Some x
195 )
196
197 let map t ~f =
198 create (fun () ->
199 match next t with
200 | None -> None
201 | Some x -> Some (f x)
202 )
203
204 let filter t ~f =
205 let rec filter () =
206 match next t with
207 | None ->
208 None
209 | Some x when f x ->
210 Some x
211 | Some _ ->
212 filter ()
213 in
214 create filter
215
216 let iter t ~f =
217 List.iter t.streams ~f:(S.iter f)
218
219 let concat ts =
220 {streams = List.concat (List.map ts ~f:(fun {streams} -> streams))}
221
222 let group_by t ~f =
223 let groups_tbl = Hashtbl.create 1_000_000 in
224 let group_update x =
225 let group = f x in
226 let members =
227 match Hashtbl.find_opt groups_tbl group with
228 | None ->
229 (1, [x])
230 | Some (n, xs) ->
231 (succ n, x :: xs)
232 in
233 Hashtbl.replace groups_tbl group members
234 in
235 iter t ~f:group_update;
236 let groups = Queue.create () in
237 Hashtbl.iter
238 (fun name (length, members) -> Queue.add (name, length, members) groups)
239 groups_tbl;
240 of_queue groups
241 end
242
243 module In_channel : sig
244 val lines : in_channel -> string Stream.t
245 end = struct
246 let lines ic =
247 Stream.create (fun () ->
248 match input_line ic with
249 | exception End_of_file ->
250 None
251 | line ->
252 Some line
253 )
254 end
255
256 module File : sig
257 type t =
258 { path : string
259 ; size : int
260 }
261
262 val find : string -> t Stream.t
263 (** Find all files in the directory tree, starting from the given root path *)
264
265 val lookup : string Stream.t -> t Stream.t
266 (** Lookup file info for given paths *)
267
268 val filter_out_unique_sizes : t Stream.t -> metrics:M.t -> t Stream.t
269 val filter_out_unique_heads : t Stream.t -> len:int -> metrics:M.t -> t Stream.t
270 end = struct
271 type t =
272 { path : string
273 ; size : int
274 }
275
276 let lookup paths =
277 Stream.map paths ~f:(fun path ->
278 let {Unix.st_size = size; _} = Unix.lstat path in
279 {path; size}
280 )
281
282 let find root =
283 let dirs = Queue.create () in
284 let files = Queue.create () in
285 let explore parent =
286 Array.iter (Sys.readdir parent) ~f:(fun child ->
287 let path = Filename.concat parent child in
288 let {Unix.st_kind = file_kind; st_size; _} = Unix.lstat path in
289 match file_kind with
290 | Unix.S_REG ->
291 let file = {path; size = st_size} in
292 Queue.add file files
293 | Unix.S_DIR ->
294 Queue.add path dirs
295 | Unix.S_CHR
296 | Unix.S_BLK
297 | Unix.S_LNK
298 | Unix.S_FIFO
299 | Unix.S_SOCK ->
300 ()
301 )
302 in
303 explore root;
304 let rec next () =
305 match Queue.is_empty files, Queue.is_empty dirs with
306 | false, _ -> Some (Queue.take files)
307 | true , true -> None
308 | true , false ->
309 explore (Queue.take dirs);
310 next ()
311 in
312 Stream.create next
313
314 let filter_out_singletons files ~group ~handle_singleton =
315 let q = Queue.create () in
316 Stream.iter (Stream.group_by files ~f:group) ~f:(fun group ->
317 let (_, n, members) = group in
318 if n > 1 then
319 List.iter members ~f:(fun m -> Queue.add m q)
320 else
321 handle_singleton group
322 );
323 Stream.of_queue q
324
325 let filter_out_unique_sizes files ~metrics =
326 filter_out_singletons
327 files
328 ~group:(fun {size; _} -> size)
329 ~handle_singleton:(fun (size, _, _) -> M.file_unique_size metrics ~size)
330
331 let head path ~len ~metrics =
332 let buf = Bytes.make len ' ' in
333 let ic = open_in_bin path in
334 let rec read pos len =
335 assert (len >= 0);
336 if len = 0 then
337 ()
338 else begin
339 let chunk_size = input ic buf pos len in
340 M.chunk_read metrics ~size:chunk_size;
341 if chunk_size = 0 then (* EOF *)
342 ()
343 else
344 read (pos + chunk_size) (len - chunk_size)
345 end
346 in
347 read 0 len;
348 close_in ic;
349 Bytes.to_string buf
350
351 let filter_out_unique_heads files ~len ~metrics =
352 filter_out_singletons
353 files
354 ~group:(fun {path; _} ->
355 M.file_sampled metrics;
356 head path ~len ~metrics
357 )
358 ~handle_singleton:(fun (_, _, files) ->
359 let {size; _} = List.hd files in (* Guaranteed non-empty *)
360 M.file_unique_sample metrics ~size
361 )
362 end
363
364 type input =
365 | Stdin
366 | Directories of string list
367
368 type output =
369 | Stdout
370 | Directory of string
371
372 type opt =
373 { input : input
374 ; output : output
375 ; ignore : Str.regexp option
376 ; sample : int
377 }
378
379 let make_input_stream input ignore ~metrics =
380 let input =
381 match input with
382 | Stdin ->
383 File.lookup (In_channel.lines stdin)
384 | Directories paths ->
385 let paths = StrSet.elements (StrSet.of_list paths) in
386 Stream.concat (List.map paths ~f:File.find)
387 in
388 Stream.filter input ~f:(fun {File.path; size} ->
389 M.file_considered metrics ~size;
390 let empty = size = 0 in
391 if empty then M.file_empty metrics;
392 let ignored =
393 match ignore with
394 | Some regexp when (Str.string_match regexp path 0) ->
395 M.file_ignored metrics ~size;
396 true
397 | Some _ | None ->
398 false
399 in
400 (not empty) && (not ignored)
401 )
402
403 let make_output_fun = function
404 | Stdout ->
405 fun digest n_files files ->
406 printf "%s %d\n%!" (Digest.to_hex digest) n_files;
407 List.iter files ~f:(fun {File.path; _} ->
408 printf " %S\n%!" path
409 )
410 | Directory dir ->
411 fun digest _ files ->
412 let digest = Digest.to_hex digest in
413 let dir = Filename.concat dir (String.sub digest 0 2) in
414 Unix.mkdir dir ~perm:0o700;
415 let oc = open_out (Filename.concat dir digest) in
416 List.iter files ~f:(fun {File.path; _} ->
417 output_string oc (sprintf "%S\n%!" path)
418 );
419 close_out oc
420
421 let main {input; output; ignore; sample = sample_len} =
422 let t0_all = Sys.time () in
423 let metrics = M.init () in
424 let output = make_output_fun output in
425 let input = make_input_stream input ignore ~metrics in
426 (* TODO: Make a nice(r) abstraction to re-assemble pieces in the pipeline:
427 *
428 * from input to files_by_size
429 * from files_by_size to files_by_sample
430 * from files_by_sample to files_by_digest
431 * from files_by_digest to output
432 *
433 * input |> files_by_size |> files_by_sample |> files_by_digest |> output
434 *)
435
436 let files = input in
437
438 let t0_group_by_size = Sys.time () in
439 let files = File.filter_out_unique_sizes files ~metrics in
440 let t1_group_by_size = Sys.time () in
441
442 let t0_group_by_sample = t1_group_by_size in
443 let files = File.filter_out_unique_heads files ~len:sample_len ~metrics in
444 let t1_group_by_sample = Sys.time () in
445
446 let t0_group_by_digest = t1_group_by_sample in
447 let groups =
448 Stream.group_by files ~f:(fun {File.path; size} ->
449 M.file_hashed metrics ~size;
450 Digest.file path
451 )
452 in
453 let t1_group_by_digest = Sys.time () in
454
455 Stream.iter groups ~f:(fun (d, n, files) ->
456 M.digest metrics;
457 if n > 1 then output d n files
458 );
459
460 let t1_all = Sys.time () in
461
462 M.report metrics
463 ~time_all: (t1_all -. t0_all)
464 ~time_group_by_size: (t1_group_by_size -. t0_group_by_size)
465 ~time_group_by_head: (t1_group_by_sample -. t0_group_by_sample)
466 ~time_group_by_digest:(t1_group_by_digest -. t0_group_by_digest)
467
468 let get_opt () : opt =
469 let assert_ test x msg =
470 if not (test x) then begin
471 eprintf "%s\n%!" msg;
472 exit 1
473 end
474 in
475 let assert_file_exists path =
476 assert_ Sys.file_exists path (sprintf "File does not exist: %S" path)
477 in
478 let assert_file_is_dir path =
479 assert_ Sys.is_directory path (sprintf "File is not a directory: %S" path)
480 in
481 let input = ref Stdin in
482 let output = ref Stdout in
483 let ignore = ref None in
484 let sample = ref 256 in
485 let spec =
486 [ ( "-out"
487 , Arg.String (fun path ->
488 assert_file_exists path;
489 assert_file_is_dir path;
490 output := Directory path
491 )
492 , " Output to this directory instead of stdout."
493 )
494 ; ( "-ignore"
495 , Arg.String (fun regexp -> ignore := Some (Str.regexp regexp))
496 , " Ignore file paths which match this regexp pattern (see Str module)."
497 )
498 ; ( "-sample"
499 , Arg.Set_int sample
500 , (sprintf " Byte size of file samples to use. Default: %d" !sample)
501 )
502 ]
503 in
504 Arg.parse
505 (Arg.align spec)
506 (fun path ->
507 assert_file_exists path;
508 assert_file_is_dir path;
509 match !input with
510 | Stdin ->
511 input := Directories [path]
512 | Directories paths ->
513 input := Directories (path :: paths)
514 )
515 "";
516 assert_
517 (fun x -> x > 0)
518 !sample
519 (sprintf "Sample size cannot be negative: %d" !sample);
520 { input = !input
521 ; output = !output
522 ; ignore = !ignore
523 ; sample = !sample
524 }
525
526 let () =
527 main (get_opt ())
This page took 0.060997 seconds and 4 git commands to generate.