Note why I chose 'rm -rf' instead of 'cargo clean'
[khome.git] / home / bin / wget-clone-web
CommitLineData
06ec33cb
SK
1#! /bin/sh
2
3uri_and_extra_options=$@;
4
5
6# Continue partially downloaded
7OPT_CONTINUE='-c'
8
9OPT_WAIT='--wait 0.75'
10
11# Randomly select wait (above) in range from wait*0.5 to wait*1.5
12OPT_WAIT_RANDON='--random-wait'
13
14# --page-requisites "download all the files that are necessary to properly
15# display a given HTML page."
16OPT_PAGE_REQUISITES='-p'
17
18# --user-agent=""
19#OPT_USER_AGENT="-U='Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0'"
20OPT_USER_AGENT="-U='ELinks (0.12pre6; NetBSD; 800x600)'"
21
22# --convert-links
23# After the download is complete, convert the links in the document to make
24# them suitable for local viewing. This affects not only the visible
25# hyperlinks, but any part of the document that links to external content, such
26# as embedded images, links to style sheets, hyperlinks to non-HTML content,
27# etc.
28OPT_LINKS_LOCAL='-k'
29
30# --adjust-extension
31# Ensure file extension matches file's MIME type
32OPT_EXT_ADJUST='-E'
33
34# Disrespect robots.txt
35OPT_ROBOTS_OFF='-e robots=off'
36
37
38wget \
39 -a wget.log \
40 $OPT_CONTINUE \
41 $OPT_WAIT \
42 $OPT_WAIT_RANDON \
43 $OPT_PAGE_REQUISITES \
44 $OPT_LINKS_LOCAL \
45 $OPT_EXT_ADJUST \
46 "$OPT_USER_AGENT" \
47 $OPT_ROBOTS_OFF \
48 $uri_and_extra_options
This page took 0.118509 seconds and 4 git commands to generate.