Make ignore-pattern a closure
[dups.git] / dups.ml
... / ...
CommitLineData
1open Printf
2
3module Array = ArrayLabels
4module List = ListLabels
5module StrSet = Set.Make(String)
6module Unix = UnixLabels
7
8module Metrics : sig
9 type t
10
11 val init
12 : unit -> t
13 val report
14 : t
15 -> time_all:float
16 -> time_group_by_size:float
17 -> time_group_by_head:float
18 -> time_group_by_digest:float
19 -> unit
20
21 val file_considered
22 : t -> size:int -> unit
23 val file_ignored
24 : t -> size:int -> unit
25 val file_empty
26 : t -> unit
27 val file_sampled
28 : t -> unit
29 val chunk_read
30 : t -> size:int -> unit
31 val file_unique_size
32 : t -> size:int -> unit
33 val file_unique_sample
34 : t -> size:int -> unit
35 val file_hashed
36 : t -> size:int -> unit
37 val digest
38 : t -> unit
39 val redundant_data
40 : t -> size:int -> unit
41end = struct
42 type t =
43 { considered_files : int ref
44 ; considered_bytes : int ref
45 ; empty : int ref
46 ; ignored_files : int ref
47 ; ignored_bytes : int ref
48 ; unique_size_files : int ref
49 ; unique_size_bytes : int ref
50 ; unique_sample_files : int ref
51 ; unique_sample_bytes : int ref
52 ; sampled_files : int ref
53 ; sampled_bytes : int ref
54 ; hashed_files : int ref
55 ; hashed_bytes : int ref
56 ; digests : int ref
57 ; redundant_data : int ref
58 }
59
60 let init () =
61 { considered_files = ref 0
62 ; considered_bytes = ref 0
63 ; empty = ref 0
64 ; ignored_files = ref 0
65 ; ignored_bytes = ref 0
66 ; unique_size_files = ref 0
67 ; unique_size_bytes = ref 0
68 ; sampled_files = ref 0
69 ; sampled_bytes = ref 0
70 ; hashed_files = ref 0
71 ; hashed_bytes = ref 0
72 ; unique_sample_files = ref 0
73 ; unique_sample_bytes = ref 0
74 ; digests = ref 0
75 ; redundant_data = ref 0
76 }
77
78 let add sum addend =
79 sum := !sum + addend
80
81 let file_considered t ~size =
82 incr t.considered_files;
83 add t.considered_bytes size
84
85 let file_ignored {ignored_files; ignored_bytes; _} ~size =
86 incr ignored_files;
87 add ignored_bytes size
88
89 let file_empty t =
90 incr t.empty
91
92 let chunk_read t ~size =
93 add t.sampled_bytes size
94
95 let file_sampled t =
96 incr t.sampled_files
97
98 let file_unique_size t ~size =
99 incr t.unique_size_files;
100 add t.unique_size_bytes size
101
102 let file_unique_sample t ~size =
103 incr t.unique_sample_files;
104 add t.unique_sample_bytes size
105
106 let file_hashed t ~size =
107 incr t.hashed_files;
108 add t.hashed_bytes size
109
110 let digest t =
111 incr t.digests
112
113 let redundant_data t ~size =
114 add t.redundant_data size
115
116 let report
117 t
118 ~time_all
119 ~time_group_by_size
120 ~time_group_by_head
121 ~time_group_by_digest
122 =
123 let b_to_mb b = (float_of_int b) /. 1024. /. 1024. in
124 let b_to_gb b = (b_to_mb b) /. 1024. in
125 eprintf "Time : %8.2f seconds\n%!"
126 time_all;
127 eprintf "Considered : %8d files %6.2f Gb\n%!"
128 !(t.considered_files)
129 (b_to_gb !(t.considered_bytes));
130 eprintf "Sampled : %8d files %6.2f Gb\n%!"
131 !(t.sampled_files)
132 (b_to_gb !(t.sampled_bytes));
133 eprintf "Hashed : %8d files %6.2f Gb %6.2f seconds\n%!"
134 !(t.hashed_files)
135 (b_to_gb !(t.hashed_bytes))
136 time_group_by_digest;
137 eprintf "Digests : %8d\n%!"
138 !(t.digests);
139 eprintf "Duplicates (Hashed - Digests): %8d files %6.2f Gb\n%!"
140 (!(t.hashed_files) - !(t.digests))
141 (b_to_gb !(t.redundant_data));
142 eprintf "Skipped due to 0 size : %8d files\n%!" !(t.empty);
143 eprintf "Skipped due to unique size : %8d files %6.2f Gb %6.2f seconds\n%!"
144 !(t.unique_size_files)
145 (b_to_gb !(t.unique_size_bytes))
146 time_group_by_size;
147 eprintf "Skipped due to unique sample : %8d files %6.2f Gb %6.2f seconds\n%!"
148 !(t.unique_sample_files)
149 (b_to_gb !(t.unique_sample_bytes))
150 time_group_by_head;
151 eprintf "Ignored due to regex match : %8d files %6.2f Gb\n%!"
152 !(t.ignored_files)
153 (b_to_gb !(t.ignored_bytes))
154end
155
156module M = Metrics
157
158module Stream : sig
159 type 'a t
160
161 val create : (unit -> 'a option) -> 'a t
162
163 val of_queue : 'a Queue.t -> 'a t
164
165 val iter : 'a t -> f:('a -> unit) -> unit
166
167 val map : 'a t -> f:('a -> 'b) -> 'b t
168
169 val filter : 'a t -> f:('a -> bool) -> 'a t
170
171 val concat : ('a t) list -> 'a t
172
173 val group_by : 'a t -> f:('a -> 'b) -> ('b * int * 'a list) t
174end = struct
175 module S = Stream
176
177 type 'a t =
178 {mutable streams : ('a S.t) list}
179
180 let create f =
181 {streams = [S.from (fun _ -> f ())]}
182
183 let of_queue q =
184 create (fun () ->
185 match Queue.take q with
186 | exception Queue.Empty ->
187 None
188 | x ->
189 Some x
190 )
191
192 let rec next t =
193 match t.streams with
194 | [] ->
195 None
196 | s :: streams ->
197 (match S.next s with
198 | exception Stream.Failure ->
199 t.streams <- streams;
200 next t
201 | x ->
202 Some x
203 )
204
205 let map t ~f =
206 create (fun () ->
207 match next t with
208 | None -> None
209 | Some x -> Some (f x)
210 )
211
212 let filter t ~f =
213 let rec filter () =
214 match next t with
215 | None ->
216 None
217 | Some x when f x ->
218 Some x
219 | Some _ ->
220 filter ()
221 in
222 create filter
223
224 let iter t ~f =
225 List.iter t.streams ~f:(S.iter f)
226
227 let concat ts =
228 {streams = List.concat (List.map ts ~f:(fun {streams} -> streams))}
229
230 let group_by t ~f =
231 let groups_tbl = Hashtbl.create 1_000_000 in
232 let group_update x =
233 let group = f x in
234 let members =
235 match Hashtbl.find_opt groups_tbl group with
236 | None ->
237 (1, [x])
238 | Some (n, xs) ->
239 (succ n, x :: xs)
240 in
241 Hashtbl.replace groups_tbl group members
242 in
243 iter t ~f:group_update;
244 let groups = Queue.create () in
245 Hashtbl.iter
246 (fun name (length, members) -> Queue.add (name, length, members) groups)
247 groups_tbl;
248 of_queue groups
249end
250
251module In_channel : sig
252 val lines : in_channel -> string Stream.t
253end = struct
254 let lines ic =
255 Stream.create (fun () ->
256 match input_line ic with
257 | exception End_of_file ->
258 None
259 | line ->
260 Some line
261 )
262end
263
264module File : sig
265 type t =
266 { path : string
267 ; size : int
268 }
269
270 val find : string -> t Stream.t
271 (** Find all files in the directory tree, starting from the given root path *)
272
273 val lookup : string Stream.t -> t Stream.t
274 (** Lookup file info for given paths *)
275
276 val filter_out_unique_sizes : t Stream.t -> metrics:M.t -> t Stream.t
277 val filter_out_unique_heads : t Stream.t -> len:int -> metrics:M.t -> t Stream.t
278end = struct
279 type t =
280 { path : string
281 ; size : int
282 }
283
284 let lookup paths =
285 Stream.map paths ~f:(fun path ->
286 let {Unix.st_size = size; _} = Unix.lstat path in
287 {path; size}
288 )
289
290 let find root =
291 let dirs = Queue.create () in
292 let files = Queue.create () in
293 let explore parent =
294 Array.iter (Sys.readdir parent) ~f:(fun child ->
295 let path = Filename.concat parent child in
296 let {Unix.st_kind = file_kind; st_size; _} = Unix.lstat path in
297 match file_kind with
298 | Unix.S_REG ->
299 let file = {path; size = st_size} in
300 Queue.add file files
301 | Unix.S_DIR ->
302 Queue.add path dirs
303 | Unix.S_CHR
304 | Unix.S_BLK
305 | Unix.S_LNK
306 | Unix.S_FIFO
307 | Unix.S_SOCK ->
308 ()
309 )
310 in
311 explore root;
312 let rec next () =
313 match Queue.is_empty files, Queue.is_empty dirs with
314 | false, _ -> Some (Queue.take files)
315 | true , true -> None
316 | true , false ->
317 explore (Queue.take dirs);
318 next ()
319 in
320 Stream.create next
321
322 let filter_out_singletons files ~group ~handle_singleton =
323 let q = Queue.create () in
324 Stream.iter (Stream.group_by files ~f:group) ~f:(fun group ->
325 let (_, n, members) = group in
326 if n > 1 then
327 List.iter members ~f:(fun m -> Queue.add m q)
328 else
329 handle_singleton group
330 );
331 Stream.of_queue q
332
333 let filter_out_unique_sizes files ~metrics =
334 filter_out_singletons
335 files
336 ~group:(fun {size; _} -> size)
337 ~handle_singleton:(fun (size, _, _) -> M.file_unique_size metrics ~size)
338
339 let head path ~len ~metrics =
340 let buf = Bytes.make len ' ' in
341 let ic = open_in_bin path in
342 let rec read pos len =
343 assert (len >= 0);
344 if len = 0 then
345 ()
346 else begin
347 let chunk_size = input ic buf pos len in
348 M.chunk_read metrics ~size:chunk_size;
349 if chunk_size = 0 then (* EOF *)
350 ()
351 else
352 read (pos + chunk_size) (len - chunk_size)
353 end
354 in
355 read 0 len;
356 close_in ic;
357 Bytes.to_string buf
358
359 let filter_out_unique_heads files ~len ~metrics =
360 filter_out_singletons
361 files
362 ~group:(fun {path; _} ->
363 M.file_sampled metrics;
364 head path ~len ~metrics
365 )
366 ~handle_singleton:(fun (_, _, files) ->
367 let {size; _} = List.hd files in (* Guaranteed non-empty *)
368 M.file_unique_sample metrics ~size
369 )
370end
371
372type input =
373 | Stdin
374 | Directories of string list
375
376type output =
377 | Stdout
378 | Directory of string
379
380type opt =
381 { input : input
382 ; output : output
383 ; ignore : string -> bool
384 ; sample : int
385 }
386
387let make_input_stream input ignore ~metrics =
388 let input =
389 match input with
390 | Stdin ->
391 File.lookup (In_channel.lines stdin)
392 | Directories paths ->
393 let paths = StrSet.elements (StrSet.of_list paths) in
394 Stream.concat (List.map paths ~f:File.find)
395 in
396 Stream.filter input ~f:(fun {File.path; size} ->
397 M.file_considered metrics ~size;
398 let empty = size = 0 in
399 let ignored = ignore path in
400 if empty then M.file_empty metrics;
401 if ignored then M.file_ignored metrics ~size;
402 (not empty) && (not ignored)
403 )
404
405let make_output_fun = function
406 | Stdout ->
407 fun digest n_files files ->
408 printf "%s %d\n%!" (Digest.to_hex digest) n_files;
409 List.iter files ~f:(fun {File.path; _} ->
410 printf " %S\n%!" path
411 )
412 | Directory dir ->
413 fun digest _ files ->
414 let digest = Digest.to_hex digest in
415 let dir = Filename.concat dir (String.sub digest 0 2) in
416 Unix.mkdir dir ~perm:0o700;
417 let oc = open_out (Filename.concat dir digest) in
418 List.iter files ~f:(fun {File.path; _} ->
419 output_string oc (sprintf "%S\n%!" path)
420 );
421 close_out oc
422
423let main {input; output; ignore; sample = sample_len} =
424 let t0_all = Sys.time () in
425 let metrics = M.init () in
426 let output = make_output_fun output in
427 let input = make_input_stream input ignore ~metrics in
428 (* TODO: Make a nice(r) abstraction to re-assemble pieces in the pipeline:
429 *
430 * from input to files_by_size
431 * from files_by_size to files_by_sample
432 * from files_by_sample to files_by_digest
433 * from files_by_digest to output
434 *
435 * input |> files_by_size |> files_by_sample |> files_by_digest |> output
436 *)
437
438 let files = input in
439
440 let t0_group_by_size = Sys.time () in
441 let files = File.filter_out_unique_sizes files ~metrics in
442 let t1_group_by_size = Sys.time () in
443
444 let t0_group_by_sample = t1_group_by_size in
445 let files = File.filter_out_unique_heads files ~len:sample_len ~metrics in
446 let t1_group_by_sample = Sys.time () in
447
448 let t0_group_by_digest = t1_group_by_sample in
449 let groups =
450 Stream.group_by files ~f:(fun {File.path; size} ->
451 M.file_hashed metrics ~size;
452 Digest.file path
453 )
454 in
455 let t1_group_by_digest = Sys.time () in
456
457 Stream.iter groups ~f:(fun (d, n, files) ->
458 M.digest metrics;
459 if n > 1 then
460 M.redundant_data metrics ~size:(n * (List.hd files).File.size);
461 output d n files
462 );
463
464 let t1_all = Sys.time () in
465
466 M.report metrics
467 ~time_all: (t1_all -. t0_all)
468 ~time_group_by_size: (t1_group_by_size -. t0_group_by_size)
469 ~time_group_by_head: (t1_group_by_sample -. t0_group_by_sample)
470 ~time_group_by_digest:(t1_group_by_digest -. t0_group_by_digest)
471
472let get_opt () : opt =
473 let assert_ test x msg =
474 if not (test x) then begin
475 eprintf "%s\n%!" msg;
476 exit 1
477 end
478 in
479 let assert_file_exists path =
480 assert_ Sys.file_exists path (sprintf "File does not exist: %S" path)
481 in
482 let assert_file_is_dir path =
483 assert_ Sys.is_directory path (sprintf "File is not a directory: %S" path)
484 in
485 let input = ref Stdin in
486 let output = ref Stdout in
487 let ignore = ref (fun _ -> false) in
488 let sample = ref 256 in
489 let spec =
490 [ ( "-out"
491 , Arg.String (fun path ->
492 assert_file_exists path;
493 assert_file_is_dir path;
494 output := Directory path
495 )
496 , " Output to this directory instead of stdout."
497 )
498 ; ( "-ignore"
499 , Arg.String (fun regexp ->
500 let regexp = Str.regexp regexp in
501 ignore := fun string -> Str.string_match regexp string 0)
502 , " Ignore file paths which match this regexp pattern (see Str module)."
503 )
504 ; ( "-sample"
505 , Arg.Set_int sample
506 , (sprintf " Byte size of file samples to use. Default: %d" !sample)
507 )
508 ]
509 in
510 Arg.parse
511 (Arg.align spec)
512 (fun path ->
513 assert_file_exists path;
514 assert_file_is_dir path;
515 match !input with
516 | Stdin ->
517 input := Directories [path]
518 | Directories paths ->
519 input := Directories (path :: paths)
520 )
521 "";
522 assert_
523 (fun x -> x > 0)
524 !sample
525 (sprintf "Sample size cannot be negative: %d" !sample);
526 { input = !input
527 ; output = !output
528 ; ignore = !ignore
529 ; sample = !sample
530 }
531
532let () =
533 main (get_opt ())
This page took 0.019033 seconds and 4 git commands to generate.