Support optional label in bar gauge
[khome.git] / home / lib / login_functions.sh
CommitLineData
e0dbdb36
SK
1#
2
3d() {
4 local -r word=$(fzf < /usr/share/dict/words)
5 dict "$word"
6}
7
94df2def 8shell_activity_report() {
ec81fc0a 9 # TODO: optional concrete number output
ec81fc0a 10 # TODO: optional combinations of granularities: hour, weekday, month, year
d72bfd8f
SK
11 local group_by="$1"
12 case "$group_by" in
13 'mon') ;;
14 'dow') ;;
15 '') group_by='dow';;
16 *)
957b0d4f 17 echo "Usage: $0 [mon|dow]" >&2
4b5a2977 18 kill -INT $$
d72bfd8f 19 esac
94df2def 20 history \
d72bfd8f
SK
21 | awk -v group_by="$group_by" '
22 function date2dow(y, m, d, _t, _i) {
23 # Contract:
24 # y > 1752, 1 <= m <= 12.
25 # Source:
26 # Sakamoto`s methods
27 # https://en.wikipedia.org/wiki/Determination_of_the_day_of_the_week#Sakamoto%27s_methods
28 _t[ 0] = 0
29 _t[ 1] = 3
30 _t[ 2] = 2
31 _t[ 3] = 5
32 _t[ 4] = 0
33 _t[ 5] = 3
34 _t[ 6] = 5
35 _t[ 7] = 1
36 _t[ 8] = 4
37 _t[ 9] = 6
38 _t[10] = 2
39 _t[11] = 4
40 y -= m < 3
41 _i = int(y + y/4 - y/100 + y/400 + _t[m - 1] + d) % 7
42 _i = _i == 0 ? 7 : _i # Make Sunday last
43 return _i
44
45 }
46
94df2def 47 {
ec81fc0a
SK
48 # NOTE: $2 & $3 are specific to oh-my-zsh history output
49 date = $2
94df2def 50 time = $3
ec81fc0a
SK
51 d_fields = split(date, d, "-")
52 t_fields = split(time, t, ":")
53 if (t_fields && d_fields) {
54 # +0 to coerce number from string
d72bfd8f 55 year = d[1] + 0
ec81fc0a 56 month = d[2] + 0
d72bfd8f 57 day = d[3] + 0
ec81fc0a 58 hour = t[1] + 0
d72bfd8f
SK
59 dow = date2dow(year, month, day)
60 g = group_by == "mon" ? month : dow # dow is default
61 c = count[g, hour]++
94df2def 62 }
ec81fc0a
SK
63 if (c > max)
64 max = c
94df2def
SK
65 }
66
ec81fc0a 67 END {
d72bfd8f
SK
68 w[1] = "Monday"
69 w[2] = "Tuesday"
70 w[3] = "Wednesday"
71 w[4] = "Thursday"
72 w[5] = "Friday"
73 w[6] = "Saturday"
74 w[7] = "Sunday"
75
ec81fc0a
SK
76 m[ 1] = "January"
77 m[ 2] = "February"
78 m[ 3] = "March"
79 m[ 4] = "April"
80 m[ 5] = "May"
81 m[ 6] = "June"
82 m[ 7] = "July"
83 m[ 8] = "August"
84 m[ 9] = "September"
85 m[10] = "October"
86 m[11] = "November"
87 m[12] = "December"
d72bfd8f
SK
88
89 n = group_by == "mon" ? 12 : 7 # dow is default
90
91 for (gid = 1; gid <= n; gid++) {
92 group = group_by == "mon" ? m[gid] : w[gid]
93 printf "%s\n", group;
94df2def 94 for (hour=0; hour<24; hour++) {
d72bfd8f 95 c = count[gid, hour]
ec81fc0a
SK
96 printf " %2d ", hour
97 for (i = 1; i <= (c * 100) / max; i++)
94df2def
SK
98 printf "|"
99 printf "\n"
100 }
ec81fc0a
SK
101 }
102 }'
94df2def
SK
103}
104
d265cd11
SK
105top_commands() {
106 history \
107 | awk '
108 {
109 count[$4]++
110 }
111
112 END {
113 for (cmd in count)
114 print count[cmd], cmd
115 }' \
116 | sort -n -r -k 1 \
117 | head -50 \
118 | awk '
119 {
120 cmd[NR] = $2
121 c = count[NR] = $1 + 0 # + 0 to coerce number from string
122 if (c > max)
123 max = c
124 }
125
126 END {
127 for (i = 1; i <= NR; i++) {
128 c = count[i]
129 printf "%s %d ", cmd[i], c
130 scaled = (c * 100) / max
131 for (j = 1; j <= scaled; j++)
132 printf "|"
133 printf "\n"
134 }
135 }' \
136 | column -t
137}
138
c7de24d9 139# Top Disk-Using directories
58bdbfbf 140# TODO: Consider using numfmt instead of awk
c7de24d9
SK
141tdu() {
142 du "$1" \
858aa230
SK
143 | sort -n -k 1 \
144 | tail -50 \
c7de24d9
SK
145 | awk '
146 {
147 size = $1
148 path = $0
149 sub("^" $1 "\t+", "", path)
150 gb = size / 1024 / 1024
151 printf("%f\t%s\n", gb, path)
152 }' \
153 | cut -c 1-115
154}
155
27456eb6
SK
156# Top Disk-Using Files
157tduf() {
158 find "$1" -type f -printf '%s\t%p\0' \
858aa230
SK
159 | sort -z -n -k 1 \
160 | tail -z -n 50 \
27456eb6
SK
161 | gawk -v RS='\0' '
162 {
163 size = $1
164 path = $0
165 sub("^" $1 "\t+", "", path)
166 gb = size / 1024 / 1024 / 1024
167 printf("%f\t%s\n", gb, path)
168 }'
169}
170
909ece30
SK
171# Most-recently modified file system objects
172recent() {
68992a1d
SK
173 # NOTES:
174 # - intentionally not quoting the parameters, so that some can be ignored
175 # if not passed, rather than be passed to find as an empty string;
176 # - %T+ is a GNU extension;
177 # - gawk is able to split records on \0, while awk cannot.
909ece30 178 find $@ -printf '%T@ %T+ %p\0' \
68992a1d 179 | tee >(gawk -v RS='\0' 'END { printf("[INFO] Total found: %d\n", NR); }') \
909ece30
SK
180 | sort -z -k 1 -n -r \
181 | head -n "$(stty size | awk 'NR == 1 {print $1 - 5}')" -z \
182 | gawk -v RS='\0' '
183 {
184 sub("^" $1 " +", "") # Remove epoch time
185 sub("+", " ") # Blank-out the default separator
186 sub("\\.[0-9]+", "") # Remove fractional seconds
187 print
188 }'
909ece30
SK
189}
190
191recent_dirs() {
192 recent "$1" -type d
193}
194
195recent_files() {
196 recent "$1" -type f
197}
198
c7de24d9
SK
199pa_def_sink() {
200 pactl info | awk '/^Default Sink:/ {print $3}'
201}
202
203void_pkgs() {
204 curl "https://xq-api.voidlinux.org/v1/query/x86_64?q=$1" | jq '.data'
205}
206
207# Colorful man
208man() {
0c296cad
SK
209 # mb: begin blink
210 # md: begin bold
211 # me: end bold, blink and underline
212 #
213 # so: begin standout (reverse video)
214 # se: end standout
215 #
216 # us: begin underline
217 # ue: end underline
218
d93397a6 219 LESS_TERMCAP_md=$'\e[01;30m' \
c7de24d9 220 LESS_TERMCAP_me=$'\e[0m' \
c7de24d9 221 LESS_TERMCAP_so=$'\e[01;44;33m' \
fc2ae05b 222 LESS_TERMCAP_se=$'\e[0m' \
d93397a6 223 LESS_TERMCAP_us=$'\e[01;33m' \
fc2ae05b 224 LESS_TERMCAP_ue=$'\e[0m' \
c7de24d9
SK
225 command man "$@"
226}
64ec9f23
SK
227
228experiment() {
4b5a2977 229 cd "$(~/bin/experiment $@)" || kill -INT $$
64ec9f23 230}
801dd7bd
SK
231
232hump() {
2e8cf226 233 ledit -l "$(stty size | awk '{print $2}')" ocaml $@
801dd7bd 234}
632b7c4a
SK
235
236howto() {
0136ca23 237 cat "$(find ~/Archives/Documents/HOWTOs -mindepth 1 -maxdepth 1 | sort | fzf)"
632b7c4a 238}
f4e0bb58 239
86b74662
SK
240_yt() {
241 local -r base_dir="$1"
242 local -r opts="$2"
243 local -r uri="$3"
244
245 local -r id=$(youtube-dlc --get-id "$uri")
246 local -r title=$(youtube-dlc --get-title "$uri" | sed 's/[^A-Za-z0-9._-]/_/g')
247 local -r dir="${base_dir}/${title}--${id}"
248
249 mkdir -p "$dir"
250 cd "$dir" || kill -INT $$
251 echo "$uri" > 'uri'
252 youtube-dlc $opts -c --write-description --write-info-json "$uri"
253}
254
255yt_audio() {
256 local -r uri="$1"
257 _yt "${DIR_YOUTUBE_AUDIO}/individual" '-f 140' "$uri"
258}
259
260yt_video() {
261 local -r uri="$1"
262 _yt "${DIR_YOUTUBE_VIDEO}/individual" "$uri"
60e43329
SK
263}
264
f4e0bb58
SK
265gh_fetch_repos() {
266 curl "https://api.github.com/$1/$2/repos?page=1&per_page=10000"
267}
268
269gh_clone() {
1f80896b
SK
270 local -r gh_user_type="$1"
271 local -r gh_user_name="$2"
272
273 local -r gh_dir="${DIR_GITHUB}/${gh_user_name}"
8aa18398 274 mkdir -p "$gh_dir"
4b5a2977 275 cd "$gh_dir" || kill -INT $$
8aa18398 276 gh_fetch_repos "$gh_user_type" "$gh_user_name" \
f4e0bb58
SK
277 | jq --raw-output '.[] | select(.fork | not) | .git_url' \
278 | parallel -j 25 \
279 git clone {}
280}
281
282gh_clone_user() {
283 gh_clone 'users' "$1"
284}
285
286gh_clone_org() {
287 gh_clone 'orgs' "$1"
288}
e09a8d5a 289
610785ef
SK
290gh_clone_repo() {
291 gh_username=$(echo "$1" | awk -F / '"$1 == "https" && $3 == github.com" {print $4}')
8aa18398 292 gh_dir="${DIR_GITHUB}/${gh_username}"
610785ef 293 mkdir -p "$gh_dir"
4b5a2977 294 cd "$gh_dir" || kill -INT $$
610785ef 295 git clone "$1"
610785ef
SK
296}
297
c45bdb58
SK
298work_log_template() {
299cat << EOF
d8da04c5 300$(date '+%F %A')
c45bdb58
SK
301==========
302
303Morning report
304--------------
305
a744f575 306### Prev
c45bdb58 307
a744f575
SK
308### Curr
309
310### Next
c45bdb58
SK
311
312### Blockers
313
314Day's notes
315-----------
316EOF
317}
318
319work_log() {
320 mkdir -p "$DIR_WORK_LOG"
1f80896b 321 local -r file_work_log_today="${DIR_WORK_LOG}/$(date +%F).md"
c45bdb58
SK
322 if [ ! -f "$file_work_log_today" ]
323 then
324 work_log_template > "$file_work_log_today"
325 fi
3008cd51 326 vim -c 'set spell' "$file_work_log_today"
c45bdb58
SK
327
328}
329
065977fd
SK
330note() {
331 mkdir -p "$DIR_NOTES"
3008cd51 332 vim -c 'set spell' "$DIR_NOTES/$(date +'%Y_%m_%d--%H_%M_%S%z')--$1.md"
065977fd
SK
333}
334
e09a8d5a 335weather() {
87b05c1b
SK
336 local _weather_location
337 case "$1" in
338 '') _weather_location="$WEATHER_LOCATION";;
339 *) _weather_location="$1"
340 esac
b01250dc 341 curl "http://wttr.in/$_weather_location?format=v2"
e09a8d5a 342}
2c0865d1
SK
343
344bt_devs_paired() {
345 bluetoothctl -- paired-devices \
346 | awk '{print $2}' \
347 | xargs bluetoothctl -- info
348}
349
350bt_devs() {
351 bluetoothctl -- devices \
352 | awk '{print $2}' \
353 | xargs bluetoothctl -- info
354}
dfbaafa4
SK
355
356run() {
1f80896b
SK
357 local -r stderr="$(mktemp)"
358
359 local code urgency
360
dfbaafa4
SK
361 $@ 2> >(tee "$stderr")
362 code="$?"
dfbaafa4
SK
363 case "$code" in
364 0) urgency='normal';;
365 *) urgency='critical'
366 esac
367 notify-send -u "$urgency" "Job done: $code" "$(cat $stderr)"
368 rm "$stderr"
369}
3f673776
SK
370
371bar_gauge() {
372 local -r width="$1"
373
374 awk -v width="$width" '
375 {
e89edca7
SK
376 cur = $1
377 max = $2
378 lab = $3
3f673776 379
e89edca7 380 u = num_scale(cur, max, 1, width)
3f673776 381
e89edca7 382 printf "%s%s[", lab, lab ? " " : ""
3f673776 383 for (i=1; i<=width; i++) {
bcc030dc
SK
384 c = i <= u ? "|" : "-"
385 printf "%s", c
3f673776
SK
386 }
387 printf "]\n"
388 }
389
390 function num_scale(src_cur, src_max, dst_min, dst_max) {
391 return dst_min + ((src_cur * (dst_max - dst_min)) / src_max)
392 }
393 '
394}
395
54031225 396motd_batt() {
3f673776
SK
397 case "$(uname)" in
398 'Linux')
3f673776
SK
399 upower --dump \
400 | awk '
401 /^Device:[ \t]+/ {
402 device["path"] = $2
403 next
404 }
405
406 / battery/ && device["path"] {
407 device["is_battery"] = 1
408 next
409 }
410
411 / percentage:/ && device["is_battery"] {
412 device["battery_percentage"] = $2
413 sub("%$", "", device["battery_percentage"])
414 next
415 }
416
417 /^$/ {
418 if (device["is_battery"] && device["path"] == "/org/freedesktop/UPower/devices/DisplayDevice")
e89edca7 419 print device["battery_percentage"], 100, "batt"
3f673776
SK
420 delete device
421 }
e89edca7 422 '
3f673776
SK
423 ;;
424 esac
54031225
SK
425}
426
427indent() {
428 awk -v unit="$1" '{printf "%s%s\n", unit, $0}'
429}
430
431motd() {
432 local -r bar_width='60'
433 local -r indent_unit=' '
434
435 uname -srvmo
436 hostname | figlet
437 uptime
3f673776 438
291e586a
SK
439 echo
440
54031225
SK
441 printf 'tmux sessions: %d\n' "$(tmux ls 2> /dev/null | wc -l)"
442
443 echo
444
445 echo 'Resources'
446 (
e89edca7
SK
447 free | awk '$1 == "Mem:" {print $3, $2, "mem"}'
448 df ~ | awk 'NR == 2 {print $3, $3 + $4, "disk"}'
449 motd_batt
54031225 450 ) \
e89edca7 451 | bar_gauge "$bar_width" \
54031225
SK
452 | column -t \
453 | indent "$indent_unit"
454
455 echo
456
457 echo 'Network'
458 echo "${indent_unit}interfaces:"
291e586a
SK
459 (ifconfig; iwconfig) 2> /dev/null \
460 | awk '
461 /^[^ ]/ {
462 device = $1
463 sub(":$", "", device)
464 if ($4 ~ "ESSID:") {
465 _essid = $4
466 sub("^ESSID:\"", "", _essid)
467 sub("\"$", "", _essid)
468 essid[device] = _essid
469 }
470 next
471 }
472
473 /^ / && $1 == "inet" {
474 address[device] = $2
475 next
476 }
477
478 /^ +Link Quality=[0-9]+\/[0-9]+ +Signal level=/ {
479 split($2, lq_parts_eq, "=")
480 split(lq_parts_eq[2], lq_parts_slash, "/")
481 cur = lq_parts_slash[1]
482 max = lq_parts_slash[2]
483 link[device] = cur / max * 100
484 next
485 }
486
487 END {
488 for (device in address)
489 if (device != "lo") {
490 l = link[device]
491 e = essid[device]
164f3674 492 l = l ? sprintf("%.0f%%", l) : "--"
291e586a
SK
493 e = e ? e : "--"
494 print device, address[device], e, l
495 }
496 }
497 ' \
54031225
SK
498 | column -t \
499 | indent "${indent_unit}${indent_unit}"
291e586a 500
3f673776 501 # WARN: ensure: $USER ALL=(ALL) NOPASSWD:/bin/netstat
e89edca7 502 echo "${indent_unit}TCP servers"
22f1641f 503 sudo -n netstat -tlnp \
54031225 504 | awk 'NR > 2 {print $7}' \
936d3fb7
SK
505 | awk -F/ '{print $2}' \
506 | sort -u \
507 | xargs \
54031225
SK
508 | column -t \
509 | indent "${indent_unit}${indent_unit}"
3f673776 510}
This page took 0.112493 seconds and 4 git commands to generate.