4 local -r word
=$
(fzf
< /usr
/share
/dict
/words
)
24 c = count[NR] = $1 + 0 # + 0 to coerce number from string
30 for (i = 1; i <= NR; i++) {
32 printf "%s %d ", cmd[i], c
33 scaled = (c * 100) / max
34 for (j = 1; j <= scaled; j++)
42 # Top Disk-Using directories
43 # TODO: Consider using numfmt instead of awk
52 sub("^" $1 "\t+", "", path)
53 gb = size / 1024 / 1024
54 printf("%f\t%s\n", gb, path)
59 # Top Disk-Using Files
61 find "$1" -type f
-printf '%s\t%p\0' \
62 |
sort -z -n -k 1 -r \
68 sub("^" $1 "\t+", "", path)
69 gb = size / 1024 / 1024 / 1024
70 printf("%f\t%s\n", gb, path)
74 # Most-recently modified file system objects
77 # - intentionally not quoting the parameters, so that some can be ignored
78 # if not passed, rather than be passed to find as an empty string;
79 # - %T+ is a GNU extension;
80 # - gawk is able to split records on \0, while awk cannot.
81 find $@
-printf '%T@ %T+ %p\0' \
82 |
tee >(gawk
-v RS
='\0' 'END { printf("[INFO] Total found: %d\n", NR); }') \
83 |
sort -z -k 1 -n -r \
84 |
head -n "$(stty size | awk 'NR == 1 {print $1 - 5}')" -z \
87 sub("^" $1 " +", "") # Remove epoch time
88 sub("+", " ") # Blank-out the default separator
89 sub("\\.[0-9]+", "") # Remove fractional seconds
103 pactl info |
awk '/^Default Sink:/ {print $3}'
107 curl
"https://xq-api.voidlinux.org/v1/query/x86_64?q=$1" | jq
'.data'
112 LESS_TERMCAP_md
=$
'\e[01;31m' \
113 LESS_TERMCAP_me
=$
'\e[0m' \
114 LESS_TERMCAP_se
=$
'\e[0m' \
115 LESS_TERMCAP_so
=$
'\e[01;44;33m' \
116 LESS_TERMCAP_ue
=$
'\e[0m' \
117 LESS_TERMCAP_us
=$
'\e[01;32m' \
122 cd "$(~/bin/experiment $@)" ||
exit 1
126 ledit
-l "$(stty size | awk '{print $2}')" ocaml $@
130 cat "$(find ~/Archives/Documents/HOWTOs -mindepth 1 -maxdepth 1 | sort | fzf)"
140 _yt_id
=$
(youtube-dl
--get-id "$_yt_uri")
141 _yt_title
=$
(youtube-dl
--get-title "$_yt_uri")
142 _yt_dir
="${DIR_YOUTUBE}/individual-videos/${_yt_title}--${_yt_id}"
145 cd "$_yt_dir" ||
exit 1
146 echo "$_yt_uri" > 'uri'
147 youtube-dl
-c --write-description --write-info-json "$_yt_uri"
151 curl
"https://api.github.com/$1/$2/repos?page=1&per_page=10000"
157 gh_dir
="${DIR_GITHUB}/${gh_user_name}"
159 cd "$gh_dir" ||
exit 1
160 gh_fetch_repos
"$gh_user_type" "$gh_user_name" \
161 | jq
--raw-output '.[] | select(.fork | not) | .git_url' \
167 gh_clone
'users' "$1"
175 gh_username
=$
(echo "$1" |
awk -F / '"$1 == "https" && $3 == github.com" {print $4}')
176 gh_dir
="${DIR_GITHUB}/${gh_username}"
178 cd "$gh_dir" ||
exit 1
182 work_log_template
() {
202 mkdir
-p "$DIR_WORK_LOG"
203 file_work_log_today
="${DIR_WORK_LOG}/$(date +%F).md"
204 if [ ! -f "$file_work_log_today" ]
206 work_log_template
> "$file_work_log_today"
208 vim
-c 'set spell' "$file_work_log_today"
213 mkdir
-p "$DIR_NOTES"
214 vim
-c 'set spell' "$DIR_NOTES/$(date +'%Y_%m_%d--%H_%M_%S%z')--$1.md"
218 curl
"http://wttr.in/$WEATHER_LOCATION"
222 bluetoothctl
-- paired-devices \
224 |
xargs bluetoothctl
-- info
228 bluetoothctl
-- devices \
230 |
xargs bluetoothctl
-- info
235 $@
2> >(tee "$stderr")
239 0) urgency
='normal';;
240 *) urgency
='critical'
242 notify-send
-u "$urgency" "Job done: $code" "$(cat $stderr)"
This page took 0.097133 seconds and 5 git commands to generate.