Implement shell_activity_report function
[khome.git] / home / lib / login_functions.sh
CommitLineData
e0dbdb36
SK
1#
2
3d() {
4 local -r word=$(fzf < /usr/share/dict/words)
5 dict "$word"
6}
7
94df2def
SK
8shell_activity_report() {
9 history \
10 | awk '
11 {
12 time = $3
13 ok = split(time, t, ":")
14 if (ok) {
15 hour = t[1] + 0 # Coerce number from string
16 cnt = count[hour]++
17 }
18 if (cnt > max)
19 max = cnt
20 }
21
22 END {
23 for (hour=0; hour<24; hour++) {
24 c = count[hour]
25 printf "%2d ", hour
26 for (i=1; i<=((c * 100) / max); i++)
27 printf "|"
28 printf "\n"
29 }
30 }'
31}
32
d265cd11
SK
33top_commands() {
34 history \
35 | awk '
36 {
37 count[$4]++
38 }
39
40 END {
41 for (cmd in count)
42 print count[cmd], cmd
43 }' \
44 | sort -n -r -k 1 \
45 | head -50 \
46 | awk '
47 {
48 cmd[NR] = $2
49 c = count[NR] = $1 + 0 # + 0 to coerce number from string
50 if (c > max)
51 max = c
52 }
53
54 END {
55 for (i = 1; i <= NR; i++) {
56 c = count[i]
57 printf "%s %d ", cmd[i], c
58 scaled = (c * 100) / max
59 for (j = 1; j <= scaled; j++)
60 printf "|"
61 printf "\n"
62 }
63 }' \
64 | column -t
65}
66
c7de24d9 67# Top Disk-Using directories
58bdbfbf 68# TODO: Consider using numfmt instead of awk
c7de24d9
SK
69tdu() {
70 du "$1" \
6b543680 71 | sort -n -k 1 -r \
c7de24d9
SK
72 | head -50 \
73 | awk '
74 {
75 size = $1
76 path = $0
77 sub("^" $1 "\t+", "", path)
78 gb = size / 1024 / 1024
79 printf("%f\t%s\n", gb, path)
80 }' \
81 | cut -c 1-115
82}
83
27456eb6
SK
84# Top Disk-Using Files
85tduf() {
86 find "$1" -type f -printf '%s\t%p\0' \
87 | sort -z -n -k 1 -r \
88 | head -z -n 50 \
89 | gawk -v RS='\0' '
90 {
91 size = $1
92 path = $0
93 sub("^" $1 "\t+", "", path)
94 gb = size / 1024 / 1024 / 1024
95 printf("%f\t%s\n", gb, path)
96 }'
97}
98
909ece30
SK
99# Most-recently modified file system objects
100recent() {
68992a1d
SK
101 # NOTES:
102 # - intentionally not quoting the parameters, so that some can be ignored
103 # if not passed, rather than be passed to find as an empty string;
104 # - %T+ is a GNU extension;
105 # - gawk is able to split records on \0, while awk cannot.
909ece30 106 find $@ -printf '%T@ %T+ %p\0' \
68992a1d 107 | tee >(gawk -v RS='\0' 'END { printf("[INFO] Total found: %d\n", NR); }') \
909ece30
SK
108 | sort -z -k 1 -n -r \
109 | head -n "$(stty size | awk 'NR == 1 {print $1 - 5}')" -z \
110 | gawk -v RS='\0' '
111 {
112 sub("^" $1 " +", "") # Remove epoch time
113 sub("+", " ") # Blank-out the default separator
114 sub("\\.[0-9]+", "") # Remove fractional seconds
115 print
116 }'
909ece30
SK
117}
118
119recent_dirs() {
120 recent "$1" -type d
121}
122
123recent_files() {
124 recent "$1" -type f
125}
126
c7de24d9
SK
127pa_def_sink() {
128 pactl info | awk '/^Default Sink:/ {print $3}'
129}
130
131void_pkgs() {
132 curl "https://xq-api.voidlinux.org/v1/query/x86_64?q=$1" | jq '.data'
133}
134
135# Colorful man
136man() {
137 LESS_TERMCAP_md=$'\e[01;31m' \
138 LESS_TERMCAP_me=$'\e[0m' \
139 LESS_TERMCAP_se=$'\e[0m' \
140 LESS_TERMCAP_so=$'\e[01;44;33m' \
141 LESS_TERMCAP_ue=$'\e[0m' \
142 LESS_TERMCAP_us=$'\e[01;32m' \
143 command man "$@"
144}
64ec9f23
SK
145
146experiment() {
9e4c43c0 147 cd "$(~/bin/experiment $@)" || exit 1
64ec9f23 148}
801dd7bd
SK
149
150hump() {
2e8cf226 151 ledit -l "$(stty size | awk '{print $2}')" ocaml $@
801dd7bd 152}
632b7c4a
SK
153
154howto() {
0136ca23 155 cat "$(find ~/Archives/Documents/HOWTOs -mindepth 1 -maxdepth 1 | sort | fzf)"
632b7c4a 156}
f4e0bb58 157
60e43329
SK
158yt() {
159 local _yt_uri
160 local _yt_id
161 local _yt_title
162 local _yt_dir
163
164 _yt_uri="$1"
165 _yt_id=$(youtube-dl --get-id "$_yt_uri")
166 _yt_title=$(youtube-dl --get-title "$_yt_uri")
167 _yt_dir="${DIR_YOUTUBE}/individual-videos/${_yt_title}--${_yt_id}"
168
169 mkdir -p "$_yt_dir"
170 cd "$_yt_dir" || exit 1
171 echo "$_yt_uri" > 'uri'
172 youtube-dl -c --write-description --write-info-json "$_yt_uri"
173}
174
f4e0bb58
SK
175gh_fetch_repos() {
176 curl "https://api.github.com/$1/$2/repos?page=1&per_page=10000"
177}
178
179gh_clone() {
8aa18398
SK
180 gh_user_type="$1"
181 gh_user_name="$2"
182 gh_dir="${DIR_GITHUB}/${gh_user_name}"
183 mkdir -p "$gh_dir"
184 cd "$gh_dir" || exit 1
185 gh_fetch_repos "$gh_user_type" "$gh_user_name" \
f4e0bb58
SK
186 | jq --raw-output '.[] | select(.fork | not) | .git_url' \
187 | parallel -j 25 \
188 git clone {}
189}
190
191gh_clone_user() {
192 gh_clone 'users' "$1"
193}
194
195gh_clone_org() {
196 gh_clone 'orgs' "$1"
197}
e09a8d5a 198
610785ef
SK
199gh_clone_repo() {
200 gh_username=$(echo "$1" | awk -F / '"$1 == "https" && $3 == github.com" {print $4}')
8aa18398 201 gh_dir="${DIR_GITHUB}/${gh_username}"
610785ef
SK
202 mkdir -p "$gh_dir"
203 cd "$gh_dir" || exit 1
204 git clone "$1"
610785ef
SK
205}
206
c45bdb58
SK
207work_log_template() {
208cat << EOF
d8da04c5 209$(date '+%F %A')
c45bdb58
SK
210==========
211
212Morning report
213--------------
214
c44fbbc2 215### Previous
c45bdb58 216
c44fbbc2 217### Current
c45bdb58
SK
218
219### Blockers
220
221Day's notes
222-----------
223EOF
224}
225
226work_log() {
227 mkdir -p "$DIR_WORK_LOG"
228 file_work_log_today="${DIR_WORK_LOG}/$(date +%F).md"
229 if [ ! -f "$file_work_log_today" ]
230 then
231 work_log_template > "$file_work_log_today"
232 fi
3008cd51 233 vim -c 'set spell' "$file_work_log_today"
c45bdb58
SK
234
235}
236
065977fd
SK
237note() {
238 mkdir -p "$DIR_NOTES"
3008cd51 239 vim -c 'set spell' "$DIR_NOTES/$(date +'%Y_%m_%d--%H_%M_%S%z')--$1.md"
065977fd
SK
240}
241
e09a8d5a
SK
242weather() {
243 curl "http://wttr.in/$WEATHER_LOCATION"
244}
2c0865d1
SK
245
246bt_devs_paired() {
247 bluetoothctl -- paired-devices \
248 | awk '{print $2}' \
249 | xargs bluetoothctl -- info
250}
251
252bt_devs() {
253 bluetoothctl -- devices \
254 | awk '{print $2}' \
255 | xargs bluetoothctl -- info
256}
dfbaafa4
SK
257
258run() {
259 stderr="$(mktemp)"
260 $@ 2> >(tee "$stderr")
261 code="$?"
262 urgency=''
263 case "$code" in
264 0) urgency='normal';;
265 *) urgency='critical'
266 esac
267 notify-send -u "$urgency" "Job done: $code" "$(cat $stderr)"
268 rm "$stderr"
269}
This page took 0.049042 seconds and 4 git commands to generate.