2019-07-03 01:20:46 +02:00
#!/bin/sh
2022-02-15 19:29:45 +01:00
# $Id: cgi_memo,v 1.74 2021/11/25 11:25:13 gilles Exp gilles $
2019-07-03 01:20:46 +02:00
if test -n "$1"; then
echoq() { echo "$@" ; } # not quiet mode
else
echoq() { : ; } # quiet mode: nop
fi
2021-08-04 21:14:36 +02:00
run_test() {
tests_count=`expr 1 + $tests_count`
# do not run anything between the two following instructions
"$@"; run_test_status=$?
# now you can run something since $? is saved
if test x"$run_test_status" = x"0"; then
echo "ok $tests_count $@"
else
echo "not ok $tests_count $@"
tests_failed_count=`expr 1 + $tests_failed_count`
tests_failed_list="$tests_failed_list $tests_count"
fi
return $run_test_status
}
run_tests() {
tests_count=0
tests_failed_count=0
tests_failed_list=
for t in "$@"; do
echo "### running $t"
"$t"
done
echo
echo "#### ALL tests done"
if test 0 -eq $tests_failed_count; then
echo "ALL $tests_count TESTS SUCCESSFUL"
return 0
else
# At least one failed
echo "FAILED $tests_failed_count/$tests_count TESTS: $tests_failed_list"
return 1
fi
}
tests()
{
:
# All tests
run_tests \
tests_pattern_filename
}
tests_all_verbose_if_failure()
{
# Run tests silent but if failure then rerun them verbose.
# return 0 if all tests passed
# return 1 if some tests failed
if ! tests > /dev/null 3>&1 ; then
tests
return 1
fi
return 0
}
#### Variable definitions
tests_count=0
tests_failed_count=0
here_is_freebsd()
{
test FreeBSD = `uname -s`
}
here_is_linux()
{
test Linux = `uname -s`
}
2019-07-03 01:20:46 +02:00
echoq list_all_logs
2021-08-04 21:14:36 +02:00
list_all_logs()
{
2022-02-15 19:29:45 +01:00
cat list_all_logs.txt list_all_logs_auto.txt | grep -v 385d7a4d8d428d7aa2b57c8982629e2bd67698ed/ | grep "$1"
2019-07-03 01:20:46 +02:00
}
echoq list_all_logs_generate
2021-08-04 21:14:36 +02:00
list_all_logs_generate()
{
2019-07-03 01:20:46 +02:00
echo Result in list_all_logs.txt
2021-08-04 21:14:36 +02:00
sortmtimef . | grep -v perl.core.txt | grep -v 385d7a4d8d428d7aa2b57c8982629e2bd67698ed/ | grep \./......................................../ | grep \.txt > list_all_logs.txt.tmp
2019-07-03 01:20:46 +02:00
mv list_all_logs.txt.tmp list_all_logs.txt
}
2021-08-04 21:14:36 +02:00
list_log_matching()
{
pattern="$1"
# Ignore no args runs going to 385d7a4d8d428d7aa2b57c8982629e2bd67698ed
cat list_all_logs_auto.txt | grep -v 385d7a4d8d428d7aa2b57c8982629e2bd67698ed | egrep -- "$pattern"
}
2022-02-15 19:29:45 +01:00
statsfile()
{
base=`pattern_filename G $1 $2`
statsfile=$base.txt
echo $statsfile
}
2019-07-03 01:20:46 +02:00
echoq biggest_transfer
2021-08-04 21:14:36 +02:00
biggest_transfer()
{
2022-02-15 19:29:45 +01:00
statsfile=`statsfile Total_bytes_transferred $1`
bytestohuman `datamash_file_op_index "$statsfile" max 5`
2019-07-03 01:20:46 +02:00
}
echoq total_bytes_transferred
2021-08-04 21:14:36 +02:00
total_bytes_transferred()
{
2022-02-15 19:29:45 +01:00
statsfile=`statsfile Total_bytes_transferred $1`
datamash_file_op_index "$statsfile" sum 5
2019-07-03 01:20:46 +02:00
}
2021-08-04 21:14:36 +02:00
2019-07-03 01:20:46 +02:00
# Total volume transferred
echoq total_volume_transferred
2021-08-04 21:14:36 +02:00
total_volume_transferred()
{
2019-07-03 01:20:46 +02:00
#echo -n 'numfmt --to=iec-i '
2022-02-15 19:29:45 +01:00
bytestohuman `total_bytes_transferred $1`
2019-07-03 01:20:46 +02:00
}
echoq total_messages_transferred
2021-08-04 21:14:36 +02:00
total_messages_transferred()
{
2022-02-15 19:29:45 +01:00
statsfile=`statsfile Messages_transferred $1`
datamash_file_op_index "$statsfile" sum 4 %16.0f | tr -d ' '
2019-07-03 01:20:46 +02:00
}
2021-08-04 21:14:36 +02:00
longest_transfer()
{
2022-02-15 19:29:45 +01:00
statsfile=`statsfile Transfer_time $1`
LC_ALL=C printf "%.0f\n" `datamash_file_op_index "$statsfile" max 4`
2019-07-03 01:20:46 +02:00
}
echoq number_and_pids_of_imapsync_running
2021-08-04 21:14:36 +02:00
number_and_pids_of_imapsync_running()
{
2019-07-03 01:20:46 +02:00
echo "`number_of_imapsync_running` : `pids_of_imapsync_running`"
: # always return true
}
echoq number_of_imapsync_running
2021-08-04 21:14:36 +02:00
number_of_imapsync_running()
{
2019-07-03 01:20:46 +02:00
pids_of_imapsync_running | wc -w
: # always return true
}
echoq pids_of_imapsync_running
pids_of_imapsync_running() {
pgrep -d ' ' -f cgi-bin/imapsync
: # always return true
}
echoq oom_immune_imapsync_running
oom_immune_imapsync_running() {
for pid in `pids_of_imapsync_running`
do
test -f /proc/$pid/oom_adj || continue
echo -n "$pid "
cat /proc/$pid/oom_* | tr '\n' ' '
2020-04-11 01:15:57 +02:00
{ test -f /proc/$pid/oom_adj && echo ${1:-"-12"} > /proc/$pid/oom_adj && echo -n ">>> " && cat /proc/$pid/oom_adj ; }
2019-07-03 01:20:46 +02:00
done
}
2020-04-11 01:15:57 +02:00
2019-07-03 01:20:46 +02:00
echoq nb_migrations_launched
nb_migrations_launched() {
2022-02-15 19:29:45 +01:00
list_all_logs | egrep "$1" | egrep -o [a-f0-9]{40} | sort | uniq -c | wc -l
}
nb_migrations_launched_old() {
/bin/ls . | egrep [a-f0-9]{40} | sort | uniq | wc -l
2019-07-03 01:20:46 +02:00
}
2022-02-15 19:29:45 +01:00
2019-07-03 01:20:46 +02:00
echoq current_stats
current_stats() {
echo -n "Nb accounts: "; nb_migrations_launched
echo -n "Nb imapsync running: "; number_and_pids_of_imapsync_running
# dstat, Linux
dstat --version > /dev/null 2>&1 && dstat -l -n -cdgyms 60 1 && return
# no dstat, FreeBSD
dstat --version > /dev/null 2>&1 || vmstat 2 15 && return
#clear
}
echoq watch_current_stats
watch_current_stats() {
export -f current_stats
# watch -n 120 current_stats
while : ; do
clear
oom_immune_imapsync_running
current_stats
done
}
echoq 'grep_in_all_logs str1 str2 ... # up to str5. Results in mtime order of logfiles'
grep_in_all_logs() {
grep_file=grep_`echo "$1 $2 $3 $4 $5" | tr ' ' '_' | tr -cd '0-9a-zA-Z_.\n'`.txt
echo results in "${grep_file}"
list_all_logs | tr '\n' '\000'| xargs -0 egrep -E -i "$1" | egrep -i "$2" | egrep -i "$3" | egrep -i "$4" | egrep -i "$5" | tee "${grep_file}.tmp"
mv "${grep_file}.tmp" "${grep_file}"
}
echoq grep_in_logs_manual
2022-02-15 19:29:45 +01:00
grep_in_logs_manual()
{
cat << EOF
list_all_logs /2021_11 | tail -666 | tr '\n' '\000'| xargs -0 egrep -i LALALA | tee grep_LALALA.txt
2019-07-03 01:20:46 +02:00
EOF
}
echoq 'grep_stats_from_list_all_logs # long'
grep_stats_from_list_all_logs() {
echo results in grep_stats.txt
2021-08-04 21:14:36 +02:00
# remove empty lines because it would grep all lines of all logs
sed -i".bak" '/^[[:space:]]*$/d' stat_patterns.txt
2019-07-03 01:20:46 +02:00
list_all_logs | tr '\n' '\000'| xargs -0 egrep -i -f stat_patterns.txt > grep_stats.txt.tmp
mv grep_stats.txt.tmp grep_stats.txt
}
2021-08-04 21:14:36 +02:00
tests_pattern_filename()
{
run_test test "" = "`pattern_filename`"
run_test test "abcd" = "`pattern_filename abcd`"
run_test test "ab_0123__4567_cd" = "`pattern_filename ab[0123][4567]cd`"
run_test test "ab_cd" = "`pattern_filename ab cd`"
run_test test "ab_cd" = "`pattern_filename ab cd `"
run_test test "abcd" = "`pattern_filename ab\&cd`"
run_test test "abcd" = "`pattern_filename ab""cd`"
run_test test "abcd" = "`pattern_filename ab""cd`"
run_test test "ab_cd" = "`pattern_filename ab" "cd`"
2022-02-15 19:29:45 +01:00
run_test test "ab__cd" = "`pattern_filename ab" "cd`"
2021-08-04 21:14:36 +02:00
run_test test "ab__cd" = "`pattern_filename ab "" cd`"
run_test test "ab___cd" = "`pattern_filename ab " " cd`"
run_test test "ab____cd" = "`pattern_filename ab " " cd`"
run_test test "ab____cd" = "`pattern_filename ab " " cd`"
run_test test "ab_____cd" = "`pattern_filename ab " " cd`"
run_test test "a_b_c_d" = "`pattern_filename a b c d`"
}
pattern_filename()
{
echo "$@" | tr ' .[]' '____' | tr -cd '0-9a-zA-Z_.'
}
2022-02-15 19:29:45 +01:00
echoq 'grep_stats_from_list_log_matching lognamepattern # time depending on lognamepattern'
2021-08-04 21:14:36 +02:00
grep_stats_from_list_log_matching() {
pattern="$1"
pattern_filename=`pattern_filename "$pattern"`
results_filename=grep_stats_"$pattern_filename".txt
echo results in "$results_filename"
# remove empty lines because it would grep all lines of all logs
sed -i".bak" '/^[[:space:]]*$/d' stat_patterns.txt
list_log_matching "$pattern" | tr '\n' '\000'| xargs -0 egrep -i -f stat_patterns.txt > "$results_filename".tmp
mv "$results_filename".tmp "$results_filename"
}
2019-07-03 01:20:46 +02:00
grep_any() {
2022-02-15 19:29:45 +01:00
file=`statsfile "$1" "$2"`
pattern_filtered=`pattern_filename "$2"`
echo $file
if test -f grep_stats_"$pattern_filtered".txt ; then
egrep -i "$1" grep_stats_"$pattern_filtered".txt > $file.tmp
mv $file.tmp $file
else
echo File not found: grep_stats_"$pattern_filtered".txt
fi
2019-07-03 01:20:46 +02:00
}
grep_load() {
2022-02-15 19:29:45 +01:00
file=`statsfile "Load" "$1"`
pattern_filtered=`pattern_filename "$1"`
echo $file
if test -f grep_stats_"$pattern_filtered".txt ; then
egrep -o 'Load is ..?\... ..?\... ..?\... .*' grep_stats_"$pattern_filtered".txt > $file.tmp
mv $file.tmp $file
else
echo File not found: grep_stats_"$pattern_filtered".txt
fi
2019-07-03 01:20:46 +02:00
}
stat_patterns_list() {
2021-08-04 21:14:36 +02:00
cat stat_patterns.txt | sed '/^[[:space:]]*$/d' | tr -d '^'
2019-07-03 01:20:46 +02:00
}
2022-02-15 19:29:45 +01:00
echoq 'grep_all_stat_from_patterns_list lognamepattern # long'
grep_all_stat_from_patterns_list() {
grep_load "$1"
stat_patterns_list | while read k; do grep_any "$k" "$1"; done
}
2021-08-04 21:14:36 +02:00
sum_first_column_G_HTTP_USER_AGENT_sorted()
{
awk '{sum += $1} END {print sum}' G_HTTP_USER_AGENT_sorted.txt
}
2020-04-11 01:15:57 +02:00
stat_useragent_X()
{
grep -o 'HTTP_USER_AGENT.*' G_HTTP_USER_AGENT.txt \
| tail -10000000 | sort | egrep -o -w 'Mozilla/5.0 \([^;]+' \
| sort | egrep -o '\([a-zA-Z]+' | sort | uniq -c | sort -g \
2021-08-04 21:14:36 +02:00
| grep -v KHTML | tr -d '(' > G_HTTP_USER_AGENT_sorted.txt
2020-04-11 01:15:57 +02:00
}
2019-07-03 01:25:47 +02:00
2021-08-04 21:14:36 +02:00
echoq 'percent_stat_useragent_X'
percent_stat_useragent_X()
{
stat_useragent_X
sum_first_column_G_HTTP_USER_AGENT=`sum_first_column_G_HTTP_USER_AGENT_sorted`
{ while read num_useragent useragent ; do
#echo KK $num_useragent $useragent
PerCent=`echo "scale=2; 100*$num_useragent/$sum_first_column_G_HTTP_USER_AGENT" | bc -l`
echo "$useragent $PerCent % ( $num_useragent / $sum_first_column_G_HTTP_USER_AGENT )"
done
} < G_HTTP_USER_AGENT_sorted.txt
}
2019-07-03 01:25:47 +02:00
stat_load()
{
echo -n 'Load 1 min 5 min 15 min ' ; grep -o 'on.*cores' G_Load.txt|uniq
echo -n 'Load min: ' ; datamash --format=%3.1f -W min 3 min 4 min 5 < G_Load.txt
echo -n 'Load q1: ' ; datamash --format=%3.1f -W q1 3 q1 4 q1 5 < G_Load.txt
echo -n 'Load median: ' ; datamash --format=%3.1f -W median 3 median 4 median 5 < G_Load.txt
echo -n 'Load mean: ' ; datamash --format=%3.1f -W mean 3 mean 4 mean 5 < G_Load.txt
echo -n 'Load q3: ' ; datamash --format=%3.1f -W q3 3 q3 4 q3 5 < G_Load.txt
echo -n 'Load max: ' ; datamash --format=%3.1f -W max 3 max 4 max 5 < G_Load.txt
2019-07-03 01:20:46 +02:00
}
2020-04-11 01:15:57 +02:00
echoq stat_exit_value
stat_exit_value()
{
2022-02-15 19:29:45 +01:00
statsfile=`statsfile Exiting_with_return_value "$1"`
good_lines_nb=`grep '(EX' $statsfile | wc -l | tr -d ' '`
grep '(EX' "$statsfile" \
2020-04-11 01:15:57 +02:00
| datamash --sort groupby 6 -W count 5 \
2022-02-15 19:29:45 +01:00
| awk -v good_lines_nb=$good_lines_nb \
'{ printf "%.2g%% %s\n", 100*$2/good_lines_nb, $1 }' \
2020-04-11 01:15:57 +02:00
| sort -n
}
echoq stat_exit_value_by_value
stat_exit_value_by_value()
{
2022-02-15 19:29:45 +01:00
statsfile=`statsfile Exiting_with_return_value "$1"`
datamash --sort groupby 5 -W count 5 < "$statsfile"
2020-04-11 01:15:57 +02:00
}
2019-07-03 01:25:47 +02:00
2021-08-04 21:14:36 +02:00
datamash_file_op_index() {
2019-07-03 01:20:46 +02:00
file="$1"
op="${2:-mean}"
index="${3:-4}" # the four field by default
format="${4:-%16.1f}" # --format=%16.1f by default
2021-08-04 21:14:36 +02:00
func="${5:-}"
val_datamash_file_op_index=`LC_ALL=C datamash --format="$format" -W "$op" "$index" < "$file"`
func_return=
test -n "$func" && func_return=`eval $func $val_datamash_file_op_index`
echo "$val_datamash_file_op_index" $func_return
}
2019-07-03 01:20:46 +02:00
stat_any() {
file="$1"
index=${2:-4} # the four field by default
2021-08-04 21:14:36 +02:00
func="${3:-}"
2019-07-03 01:20:46 +02:00
for op in \
2021-08-04 21:14:36 +02:00
"min " \
"perc:10 " \
"q1 " \
"median " \
"mean " \
"q3 " \
"perc:90 " \
"max " \
2019-07-03 01:20:46 +02:00
do
2021-08-04 21:14:36 +02:00
echo -n "$file $index $op " ; datamash_file_op_index $file $op $index %16.1f $func
2019-07-03 01:20:46 +02:00
done
echo
}
2020-04-11 01:15:57 +02:00
2019-07-03 01:20:46 +02:00
echoq stat_all
2020-04-11 01:15:57 +02:00
stat_all()
2019-07-03 01:25:47 +02:00
{
2019-07-03 01:20:46 +02:00
stat_load ; echo
2021-08-04 21:14:36 +02:00
2019-07-03 01:20:46 +02:00
# stat_any G_REMOTE_ADDR.txt
# stat_any G_REMOTE_HOST.txt
# stat_any G_HTTP_COOKIE.txt
# stat_any G_HTTP_REFERER.txt
2020-04-11 01:15:57 +02:00
# See various_usefull()
# stat_any G_Host1_IMAP_server.txt
# stat_any G_Host2_IMAP_server.txt
# stat_any G_Host1_banner.txt
# stat_any G_Host2_banner.txt
stat_any G_Host1_Nb_messages.txt
stat_any G_Host2_Nb_messages.txt
2019-07-03 01:20:46 +02:00
stat_any G_Messages_transferred.txt
stat_any G_Messages_skipped.txt
2020-04-11 01:15:57 +02:00
stat_any G_Messages_found_in_host1_not_in_host2.txt 9
stat_any G_Messages_found_in_host2_not_in_host1.txt 9
2019-07-03 01:20:46 +02:00
# stat_any G_Folders_synced.txt
2022-02-15 19:29:45 +01:00
egrep -o '[0-9]+/[0-9]+ synced' G_Folders_synced.txt | egrep -o '^[0-9]+' > G_Folders_synced_.txt
egrep -o '[0-9]+/[0-9]+ synced' G_Folders_synced.txt | egrep -o '[0-9]+/[0-9]+' | egrep -o '[0-9]+$' > G_Folders_total_seen.txt
2020-04-11 01:15:57 +02:00
stat_any G_Folders_synced_.txt 1
stat_any G_Folders_total_seen.txt 1
#
2019-07-03 01:20:46 +02:00
stat_any G_Transfer_time.txt
2021-08-04 21:14:36 +02:00
stat_any G_Host1_Total_size.txt
stat_any G_Host2_Total_size.txt
2019-07-03 01:20:46 +02:00
stat_any G_Total_bytes_transferred.txt 5
stat_any G_Message_rate.txt
stat_any G_Average_bandwidth_rate.txt 5
stat_any G_Biggest_message.txt
stat_any G_Detected_errors.txt 2
2022-02-15 19:29:45 +01:00
#stat_any G_Exiting_with_return_value.txt 5 # GROUP
2019-07-03 01:20:46 +02:00
stat_any G_Memory_consumption_at_the_end.txt 7
#stat_any G_failure_Error_login.txt
2021-08-04 21:14:36 +02:00
percent_stat_useragent_X ; echo
2020-04-11 01:15:57 +02:00
stat_exit_value
2019-07-03 01:25:47 +02:00
echo "Data made at" `date -r grep_stats.txt`
}
stat_transfer_time_mean()
{
2022-02-15 19:29:45 +01:00
statsfile=`statsfile Transfer_time "$1"`
datamash_file_op_index "$statsfile" mean
2019-07-03 01:25:47 +02:00
}
stat_throuput_since_day_one_in_days()
{
2022-02-15 19:29:45 +01:00
number_of_syncs=`number_of_syncs "$1"`
days_since_first_use=`days_since_first_use "$1"`
2019-07-03 01:25:47 +02:00
c "$number_of_syncs / $days_since_first_use"
}
2022-02-15 19:29:45 +01:00
stat_queue_mean_old()
2019-07-03 01:25:47 +02:00
{
stat_throuput_since_day_one_in_days=`stat_throuput_since_day_one_in_days`
2022-02-15 19:29:45 +01:00
stat_transfer_time_mean=`stat_transfer_time_mean "$1"`
2019-07-03 01:25:47 +02:00
stat_queue_mean_raw=`c "$stat_throuput_since_day_one_in_days * $stat_transfer_time_mean / 3600 / 24"`
2021-08-04 21:14:36 +02:00
LC_ALL=C printf "%2.2f\n" $stat_queue_mean_raw
2019-07-03 01:20:46 +02:00
}
2022-02-15 19:29:45 +01:00
stat_queue_mean()
{
first_log=`first_log "$1"`
last_log=`last_log "$1"`
number_of_syncs=`number_of_syncs "$1"`
seconds_between_files=`seconds_between_files $first_log $last_log`
stat_transfer_time_mean=`stat_transfer_time_mean "$1"`
stat_queue_mean_raw=`c "$number_of_syncs / $seconds_between_files * $stat_transfer_time_mean"`
LC_ALL=C printf "%2.2f\n" $stat_queue_mean_raw
}
first_log()
{
list_all_logs "$1"_ | grep /"$1" | head -1
}
last_log()
{
list_all_logs "$1"_ | grep /"$1" | tail -1
}
start_date()
{
first_log=`first_log "$1"`
date -r "$first_log"
}
end_date()
{
last_log=`last_log "$1"`
date -r "$last_log"
}
2019-07-03 01:20:46 +02:00
echoq dirs_of_syncs_finished_recently
dirs_of_syncs_finished_recently() {
find . -maxdepth 1 -mtime "${1:--1}" | grep -v "385d7a4d8d428d7aa2b57c8982629e2bd67698ed" | egrep [a-f0-9]{40} | while read d; do
test -f "$d" && continue
test -f $d/imapsync.pid && continue
echo $d
done
}
echoq 'logfiles_finished_recently -3 # less than 3 days, default is like -1'
logfiles_finished_recently()
{
{
# +2 more than 2 days ago
# -3 less than 3 days ago
# 7 exactly 7 days ago
#set -x
2020-04-11 01:15:57 +02:00
find . -maxdepth 1 -mtime "${1:--1}" | grep -v "385d7a4d8d428d7aa2b57c8982629e2bd67698ed" | egrep [a-f0-9]{40} | while read d; do
test -f "$d" && continue
test -f $d/imapsync.pid && continue
2021-08-04 21:14:36 +02:00
test -d $d/ || continue
ls -trb `find $d/ -type f -mtime "${1:--1}" | grep \.txt`
2019-07-03 01:20:46 +02:00
done
}
}
2022-02-15 19:29:45 +01:00
2019-07-03 01:20:46 +02:00
last_dirs_written()
{
2019-07-03 01:25:47 +02:00
ls -tr | tail -1800
2019-07-03 01:20:46 +02:00
}
last_file_written_in_dir()
{
2021-08-04 21:14:36 +02:00
ls -trd $1/*.txt |tail -1
2019-07-03 01:20:46 +02:00
}
is_dir_running_imapsync()
{
test -d "$1" || return 1
2019-07-03 01:25:47 +02:00
test -f "$1/imapsync.pid" && PID=`head -1 "$1/imapsync.pid"` &&
2019-07-03 01:20:46 +02:00
ps -p $PID -o comm= > /dev/null
}
echoq logfiles_running
logfiles_running()
{
last_dirs_written | while read d
do
is_dir_running_imapsync "$d" &&
last_file_written_in_dir "$d"
done
}
epoch_of_file()
{
date -r "$1" +%s
}
epoch_of_now()
{
date +%s
}
is_file_older_than()
{
# return 1 if not exist or recent
# return 0 if older than "$2" seconds or 15 minutes (900 secondes)
test -f "$1" || return 1
epoch_file=`epoch_of_file "$1"`
epoch_now=`epoch_of_now`
epoch_diff=`expr $epoch_now - $epoch_file`
#echo "$epoch_now - $epoch_file = $epoch_diff"
if test "${2:-900}" -lt "$epoch_diff"
then
#echo older than $2
return 0
else
#echo newer than $2
return 1
fi
}
pids_of_imapsync_not_writing_since_x_secondes()
{
x_secondes=${1:-900} # 15 minutes by default
last_dirs_written | while read d
do
is_dir_running_imapsync "$d" &&
is_file_older_than `last_file_written_in_dir "$d"` "$x_secondes" &&
2019-07-03 01:25:47 +02:00
head -1 "$d/imapsync.pid" | tr '\n' ' '
2019-07-03 01:20:46 +02:00
done
}
kill_HUP_pids_of_imapsync_not_writing_since_x_secondes()
{
pids_not_writing=`pids_of_imapsync_not_writing_since_x_secondes ${1:-900}`
2019-07-03 01:25:47 +02:00
test -n "$pids_not_writing" && echo kill -HUP "$pids_not_writing" # && kill -HUP "$pids_not_writing"
2019-07-03 01:20:46 +02:00
}
watch_logfiles_running_old() {
# the "tail --pid=" option does not exist on FreeBSD, it's GNU/Linux
while date; do
inotifywait /var/tmp/imapsync_cgi -e create 2>/dev/null &
PID_inotifywait=$!
logfiles_running | xargs -d'\n' tail --pid=$PID_inotifywait -f -v
echo "NEW SYNC IS RUNNING"
echo "Syncs running: "; number_and_pids_of_imapsync_running
sleep 3
done
}
watch_logfiles_running_old2() {
while date; do
kill $PID_inotifywait
inotifywait /var/tmp/imapsync_cgi -e create 2>/dev/null &
PID_inotifywait=$!
kill_tail_logfiles_running
tail_logfiles_running
wait $PID_inotifywait
kill_tail_logfiles_running
echo "NEW SYNC IS RUNNING"
echo "Syncs running: "; number_and_pids_of_imapsync_running
sleep 3
done
}
tail_logfiles_running() {
logfiles_running=`logfiles_running`
test -n "$logfiles_running" && tail -f $logfiles_running
#PID_tail_logfiles_running=$!
#fg
}
echoq watch_logfiles_running
watch_logfiles_running() {
tail_logfiles_running
}
kill_tail_logfiles_running() {
kill $PID_tail_logfiles_running
}
echoq watch_new_runs
watch_new_runs() {
while { date; echo -n "Nb syncs currently: " ; number_and_pids_of_imapsync_running ; } do
inotifywait . -e create 2>/dev/null | { read path action f
echo $f
sleep 2
2019-07-03 01:25:47 +02:00
test -f $f/imapsync.pid && PID=`head -1 $f/imapsync.pid` && echo PID $PID
2019-07-03 01:20:46 +02:00
echo -e '\a'
}
done
}
echoq pidfiles_running_and_not_running
pidfiles_running_and_not_running() {
ls -tr | while read f; do
2019-07-03 01:25:47 +02:00
test -f $f/imapsync.pid && PID=`head -1 $f/imapsync.pid` && echo -n "$PID " &&
2019-07-03 01:20:46 +02:00
{ ps -p $PID -o comm= | tr '\n' ' ' && { test -f /proc/$PID/oom_score &&
{ echo -12 > /proc/$PID/oom_adj ; } && echo -n "oom_score " && cat /proc/$PID/oom_score | tr '\n' ' ' ; : ; }
} &&
2021-08-04 21:14:36 +02:00
{ ls -tr $f/*.txt |tail -1 ; }
2019-07-03 01:20:46 +02:00
done
}
pidfile_dandling() {
pidfile_dandling_DIR=$1
test -d $pidfile_dandling_DIR || return 2
test -f $pidfile_dandling_DIR/imapsync.pid || return 3
2019-07-03 01:25:47 +02:00
pidfile_dandling_PID=`head -1 $pidfile_dandling_DIR/imapsync.pid`
2019-07-03 01:20:46 +02:00
#echo "$pidfile_dandling_PID"
test -n "$pidfile_dandling_PID" || return 4
test "$pidfile_dandling_PID" -ge 1 || return 5
if ! ps -p "$pidfile_dandling_PID" -o comm= > /dev/null ; then
#echo -n "DANDLING $pidfile_dandling_DIR/imapsync.pid "
#echo "# PID $pidfile_dandling_PID"
return 0
fi
return 99
}
echoq pidfiles_not_running
pidfiles_not_running() {
ls -tr | while read f; do
if pidfile_dandling "$f" ; then
2019-07-03 01:25:47 +02:00
pidfiles_not_running_PID=`head -1 $f/imapsync.pid`
2019-07-03 01:20:46 +02:00
echo -n "rm $f/imapsync.pid # "
2021-08-04 21:14:36 +02:00
{ ls -tr $f/*.txt 2>/dev/null |tail -1 ; } | tr '\n' ' '
2019-07-03 01:20:46 +02:00
echo "# PID $pidfiles_not_running_PID"
2019-07-03 01:25:47 +02:00
#head -2 $f/imapsync.pid
2019-07-03 01:20:46 +02:00
fi
done
}
first_use() {
test -f first_use && cat first_use && return
echo "${1:-2017} ${2:-01} ${3:-09}"
}
2022-02-15 19:29:45 +01:00
filedate()
{
test FreeBSD = `uname -s` && gdate -r "$1" '+%Y %m %d'
test Linux = `uname -s` && date -r "$1" '+%Y %m %d'
}
days_between_files()
{
epoch1=`epoch_of_file "$1"`
epoch2=`epoch_of_file "$2"`
echo epoch1 $epoch1 epoch2 $epoch2
expr \( $epoch2 - $epoch1 \) / 3600 / 24
}
seconds_between_files()
{
epoch1=`epoch_of_file "$1"`
epoch2=`epoch_of_file "$2"`
expr \( $epoch2 - $epoch1 \)
}
2019-07-03 01:20:46 +02:00
days_since_first_use() {
first_use=`first_use "$@"`
#echo $[$[$(date +%s)-$(epoch_of_y_m_d_h_m_s 2017 01 09 00 00 00)]/60/60/24]
echo $[$[$(date +%s)-$(epoch_of_y_m_d_h_m_s $first_use 00 00 00)]/60/60/24]
}
2022-02-15 19:29:45 +01:00
2019-07-03 01:20:46 +02:00
epoch_of_y_m_d_h_m_s() {
date -v -1d > /dev/null 2>&1 && date -u -v ${1:-1970}y -v ${2:-1}m -v ${3:-1}d -v ${4:-0}H -v ${5:-0}M -v ${6:-0}S +%s && return
date --date="1 day ago" > /dev/null && date -u -d "${1:-1970}-${2:-1}-${3:-1} ${4:-0}:${5:-0}:${6:-0}" +%s && return
}
date_x_days_ago() {
date -v -1d > /dev/null 2>&1 && date -u -v -${1:-0}d "+%Y-%m-%d %a" && return
date --date="1 day ago" > /dev/null && date -u --date="${1:-0} day ago" "+%Y-%m-%d %a" && return
}
seconds_to_days_hours() {
#eval "echo $(date -ud "@${1:-0}" +'$((%s/3600/24)) days %_H hours %_M min %_S sec')"
date -v -1d > /dev/null 2>&1 && eval "echo $(date -ur "${1:-0}" +'$((%s/3600/24)) days %_H hours %_M min %_S sec')" && return
date --date="1 day ago" > /dev/null && eval "echo $(date -ud "@${1:-0}" +'$((%s/3600/24)) days %_H hours %_M min %_S sec')" && return
}
seconds_to_days_hours_echo() {
date -v -1d > /dev/null 2>&1 && echo "echo $(date -ur "${1:-0}" +'$((%s/3600/24)) days %_H hours %_M min %_S sec')" && return
date --date="1 day ago" > /dev/null && echo "echo $(date -ud "@${1:-0}" +'$((%s/3600/24)) days %_H hours %_M min %_S sec')" && return
}
2019-07-03 01:25:47 +02:00
printf_this_one_div10()
{
num=$1
printf "% $((num/10))s\n" $1
}
2019-07-03 01:20:46 +02:00
echoq 'runs_per_day 7 # last 7 days'
runs_per_day() {
historic_start=`days_since_first_use`
start=${1:-$historic_start}
for cc in `count 0 $start`; do
DATE=`date_x_days_ago $cc`
2019-07-03 01:25:47 +02:00
# find on FreeBSD finds nothing with -mtime 0
test FreeBSD = `uname -s` && cc=`expr 1 + $cc`
runs_this_day=`find . -maxdepth 1 -mtime $cc -ls |wc -l`
echo -n "$DATE $cc days ago: " ; printf_this_one_div10 $runs_this_day
2019-07-03 01:20:46 +02:00
done
}
echoq summary_run
summary_run() {
for summary_run_DIR in "$@"; do
echo Analysing $summary_run_DIR
2021-08-04 21:14:36 +02:00
echo -n "Nb logs: "; ls $summary_run_DIR/*.txt | wc -l
summary_run_LOGS_LIST=`ls $summary_run_DIR/*.txt`
2019-07-03 01:20:46 +02:00
echo -n "List logs: "; echo $summary_run_LOGS_LIST
#echo connect failure
2021-08-04 21:14:36 +02:00
summary_run_CONNECT_FAIL=`grep -i 'failure: can not open imap connection on' $summary_run_DIR/*.txt|wc -l`
2019-07-03 01:20:46 +02:00
echo CONN $summary_run_CONNECT_FAIL
#echo login failure
2021-08-04 21:14:36 +02:00
grep -i 'failure: Error login on' $summary_run_DIR/*.txt
2019-07-03 01:20:46 +02:00
#echo Differences
2021-08-04 21:14:36 +02:00
grep -i "difference host2 - host1" $summary_run_DIR/*.txt
2019-07-03 01:20:46 +02:00
done
}
logs_nb() {
logs_nb_DIR="$1"
2021-08-04 21:14:36 +02:00
logs_nb_LOGS_LIST="$logs_nb_DIR"/*.txt
2019-07-03 01:20:46 +02:00
}
vnstat_init() {
2021-08-04 21:14:36 +02:00
test FreeBSD = `uname -s` && VNSTATI_DIR=/usr/local/www/apache24/data/vnstat
test Linux = `uname -s` && VNSTATI_DIR=/var/www/vnstat
2019-07-03 01:20:46 +02:00
test -d $VNSTATI_DIR || mkdir -p $VNSTATI_DIR
}
echoq vnstat_gen
vnstat_gen() {
vnstat_init || return
2022-02-15 19:29:45 +01:00
for opt in s h hg hs d m y t vs 5 5g ; do
2019-07-03 01:20:46 +02:00
test "$1" && echo vnstati -$opt -o $VNSTATI_DIR/vnstat_${opt}.png
vnstati -$opt -o $VNSTATI_DIR/vnstat_${opt}.png
done
}
2021-08-04 21:14:36 +02:00
echoq vnstat_index_hs
vnstat_index_hs()
{
(
vnstat_init || return
cd $VNSTATI_DIR/ || return
for f in `ls -r ./*/vnstat_hs.png`
do
echo '<img src="'$f'" border="0" alt="hourly"><br>'
done > index_hs.html
)
}
2019-07-03 01:20:46 +02:00
echoq vnstat_archive
vnstat_archive() {
(
vnstat_gen "$1" || return
now_ymdhms=`date +%Y_%m_%d_%H_%M_%S` || return
mkdir $VNSTATI_DIR/$now_ymdhms/ || return
cd $VNSTATI_DIR/$now_ymdhms/ || return
test "$1" && pwd
2021-08-04 21:14:36 +02:00
cp -a ../*.png ../*.html .
2019-07-03 01:20:46 +02:00
)
test "$1" && pwd
}
echoq dstat_csv
dstat_csv() {
#dstat -l -n -cdgyms 60 1
dstat -t -l -n -cdgyms --output dstat.csv 60
}
echoq 'ratio_killed_by_TERM -3 # last 3 days'
ratio_killed_by_TERM() {
logfiles_finished_recently=`logfiles_finished_recently $1`
nb_logfiles_finished_recently=`echo $logfiles_finished_recently | wc -w`
echo -n "Got a signal TERM: " && echo $logfiles_finished_recently | xargs grep -i 'Got a signal TERM' | wc -l
echo -n "Got a signal : " && echo $logfiles_finished_recently | xargs grep -i 'Got a signal' | wc -l
echo -n "Among finished : " && echo $nb_logfiles_finished_recently
echo "logfiles_finished_recently $1 | xargs grep -i 'Got a signal TERM' "
}
echoq 'nb_syncs_badly_finished -1 # last 1 day'
2020-04-11 01:15:57 +02:00
nb_syncs_badly_finished()
{
logfiles_finished_recently=`logfiles_finished_recently $1`
nb_logfiles_finished_recently=`echo $logfiles_finished_recently | wc -w | tr -d ' '`
nb_syncs_badly_finished=`echo $logfiles_finished_recently | xargs grep -i 'Exiting with return value' | grep -v 'return value 0' | wc -l `
echo $nb_syncs_badly_finished / $nb_logfiles_finished_recently \
| awk '{ printf "%s %.2g%% %s\n", "Total:", 100*$1/$3, $0 }'
echo $logfiles_finished_recently | xargs grep -i 'Exiting with return value' \
| grep -v 'return value 0' | grep -o 'Exiting with return value.*)' \
| sort | uniq -c | sort -n \
| awk -v nb_logfiles_finished_recently=$nb_logfiles_finished_recently \
'{ printf "%.2g%% %s\n", 100*$1/nb_logfiles_finished_recently, $0 }'
2019-07-03 01:20:46 +02:00
cat <<EOF
2021-08-04 21:14:36 +02:00
logfiles_finished_recently $1 | xargs grep -i 'Exiting with return value' | grep -v 'return value 0 ' | cut -d: -f1 | xargs tail -11
2019-07-03 01:20:46 +02:00
EOF
}
2021-08-04 21:14:36 +02:00
echoq 'referrer_of_x /var/log/apache/access.log_2021*.gz | sort | uniq -c | sort -n'
2019-07-03 01:20:46 +02:00
referrer_of_x() {
2021-08-04 21:14:36 +02:00
zegrep -h -s -o 'GET /X/? .*http[^"]+' "${@:-/var/log/apache/access.log}" | grep -o 'http.*'
2019-07-03 01:20:46 +02:00
}
2022-02-15 19:29:45 +01:00
2019-07-03 01:20:46 +02:00
biggest_message_seen() {
2022-02-15 19:29:45 +01:00
statsfile=`statsfile Biggest_message $1`
cat "$statsfile" | grep -v Memory | datamash -W max 4 | xargs bytestohuman
2019-07-03 01:20:46 +02:00
}
biggest_message_transferred() {
2022-02-15 19:29:45 +01:00
# With this, the "Biggest message" may be not be transferred by imapsync itself.
statsfile=`statsfile Biggest_message $1`
grep 'Host2 Biggest message' < "$statsfile" | datamash -W max 4 | xargs bytestohuman
2019-07-03 01:20:46 +02:00
}
biggest_bandwidth_rate() {
2022-02-15 19:29:45 +01:00
statsfile=`statsfile Average_bandwidth_rate $1`
datamash_file_op_index "$statsfile" max 5 | tr -d ' ' | tr '\n' ' '
2019-07-03 01:20:46 +02:00
echo KiB/s
}
2021-08-04 21:14:36 +02:00
average_bandwidth_rate() {
2022-02-15 19:29:45 +01:00
statsfile=`statsfile Average_bandwidth_rate $1`
datamash_file_op_index "$statsfile" mean 5 | tr -d ' ' | tr '\n' ' '
2021-08-04 21:14:36 +02:00
echo KiB/s
}
max_number_of_messages_transferred() {
2022-02-15 19:29:45 +01:00
statsfile=`statsfile Messages_transferred $1`
datamash_file_op_index "$statsfile" max 4 "%.0f"
2021-08-04 21:14:36 +02:00
}
max_number_of_messages_skipped() {
2022-02-15 19:29:45 +01:00
statsfile=`statsfile Messages_skipped $1`
datamash_file_op_index "$statsfile" max 4 "%.0f"
2021-08-04 21:14:36 +02:00
}
2019-07-03 01:20:46 +02:00
echoq number_of_X_users
2022-02-15 19:29:45 +01:00
number_of_X_users()
{
statsfile=`statsfile REMOTE_ADDR $1`
test -f $statsfile || { echo No exists $statsfile ; return ; }
datamash_file_op_index $statsfile unique 3 | tr , '\n' | wc -l
2019-07-03 01:20:46 +02:00
}
2022-02-15 19:29:45 +01:00
summary_compute_old() {
2019-07-03 01:20:46 +02:00
list_all_logs_generate \
&& grep_stats_from_list_all_logs \
&& grep_all_stat_from_patterns_list \
&& summary_display
}
2022-02-15 19:29:45 +01:00
echoq 'summary_compute lognamepattern # time depending on lognamepattern'
summary_compute() {
grep_stats_from_list_log_matching "$1" \
&& grep_all_stat_from_patterns_list "$1" \
&& summary_display "$1"
}
2019-07-03 01:25:47 +02:00
number_of_syncs()
{
2022-02-15 19:29:45 +01:00
list_all_logs | egrep "$1"_ | wc -l
2019-07-03 01:25:47 +02:00
}
2020-04-11 01:15:57 +02:00
count_expression()
{
#echo count_expression "[$1]" "[$2]"
egrep -- "$1" "$2" | wc -l
}
patterns_alone_file_generate()
{
patterns_file="$1"
patterns_alone_file="$2"
> $patterns_alone_file
cat $patterns_file |
while read imap_server pattern
do
echo "$pattern" >> $patterns_alone_file
done
}
count_imap_server_all()
{
count_imap_server_all=0
cat $patterns_file |
while read imap_server pattern
do
#echo count_imap_server "$pattern" "$banners_files"
count_imap_server=`count_expression "$pattern" "$banners_files"`
count_imap_server_all=`expr $count_imap_server_all + $count_imap_server`
echo $count_imap_server_all
done
}
2021-08-04 21:14:36 +02:00
#echoq server_survey_percent
2020-04-11 01:15:57 +02:00
server_survey_percent()
{
banners_files=${1:-G_Host1_banner.txt}
patterns_file=${2:-server_survey_patterns.txt}
patterns_alone_file=${patterns_file}.alone.txt
patterns_alone_file_generate $patterns_file $patterns_alone_file
banners_counted=`egrep -f $patterns_alone_file $banners_files | wc -l | tr -d ' \n'`
banners_not_counted=`egrep -v -f $patterns_alone_file $banners_files | wc -l | tr -d ' \n'`
banners_all=`cat $banners_files | wc -l | tr -d ' \n'`
banners_all_verif=`expr $banners_not_counted + $banners_counted`
cat $patterns_file |
while read imap_server pattern
do
#echo count_imap_server "$pattern" "$banners_files"
count_imap_server=`count_expression "$pattern" "$banners_files"`
percent_imap_server=`echo "scale=2; 100 * $count_imap_server/$banners_all" | bc -l`
echo $percent_imap_server% : $count_imap_server " : $imap_server : " "[$pattern]"
done | sort -n
}
2021-08-04 21:14:36 +02:00
#echoq server_survey
2020-04-11 01:15:57 +02:00
server_survey()
{
banners_files=${1:-G_Host1_banner.txt}
patterns_file=${2:-server_survey_patterns.txt}
server_survey_percent $banners_files $patterns_file
count_imap_server_all=`count_imap_server_all | tail -1`
echo $banners_files
echo "Banners counted sum $count_imap_server_all"
echo "Banners counted $banners_counted"
echo "Banners not counted $banners_not_counted"
echo "Banners all $banners_all"
echo "Banners all verif $banners_all_verif = $banners_not_counted + $banners_counted"
if test $count_imap_server_all != $banners_counted
then echo WARNING count_imap_server_all $count_imap_server_all != $banners_counted banners_counted \
diff `expr $count_imap_server_all - $banners_counted`
fi
echo "server_survey $banners_files # finished"
}
echoq server_survey_next_pattern
server_survey_next_pattern()
{
patterns_alone_file_generate server_survey_patterns.txt server_survey_patterns.txt.alone.txt
grep -h -o 'banner:.*' G_Host?_banner.txt |sort | uniq -c | sort -g > banner_counted_sorted.txt
egrep -v -f server_survey_patterns.txt.alone.txt banner_counted_sorted.txt
}
echoq server_survey_last_pattern
server_survey_last_pattern()
{
banners_files1=${1:-G_Host1_banner.txt}
banners_files2=${2:-G_Host2_banner.txt}
tail -1 server_survey_patterns.txt > pattern_alone_file.txt
server_survey $banners_files1 pattern_alone_file.txt
server_survey $banners_files2 pattern_alone_file.txt
}
echoq server_survey_host1
server_survey_host1()
{
server_survey G_Host1_banner.txt
}
echoq server_survey_host2
server_survey_host2()
{
server_survey G_Host2_banner.txt
}
2022-02-15 19:29:45 +01:00
echoq summary_display lognamepattern
2019-07-03 01:20:46 +02:00
summary_display() {
2022-02-15 19:29:45 +01:00
echo "Start date : " `start_date "$1"`
echo "End date : " `end_date "$1"`
echo -n "Number of /X users: " ; number_of_X_users "$1"
echo -n "Number of /X accounts synced: " ; nb_migrations_launched "$1"
echo -n "Number of /X syncs: " ; number_of_syncs "$1"
echo -n "Total volume /X transferred: " ; total_volume_transferred "$1"
echo -n "Total messages /X transferred: " ; total_messages_transferred "$1"
echo -n "Biggest transfer: " ; biggest_transfer "$1"
echo -n "Biggest message seen: " ; biggest_message_seen "$1"
echo -n "Biggest message transferred: " ; biggest_message_transferred "$1"
echo -n "Biggest bandwidth rate: " ; biggest_bandwidth_rate "$1"
echo -n "Average bandwidth rate: " ; average_bandwidth_rate "$1"
echo -n "Max messages transferred: " ; max_number_of_messages_transferred "$1"
echo -n "Max messages skipped: " ; max_number_of_messages_skipped "$1"
echo -n "Longest transfer: " ; seconds_to_days_hours `longest_transfer "$1"`
echo -n "Queue length mean is: " ; stat_queue_mean "$1"
echo "Data made at" `date -r grep_stats_"$1".txt`
2019-07-03 01:20:46 +02:00
}
echoq sync_ks2_i005
sync_ks2_i005()
{
test "Xks2" = "X`hostname`" \
&& echo Here is ks2 nothing to do \
&& return
test "Xi005" = "X`hostname`" && echo Here is i005 \
&& date \
&& cd /home/imapsync_cgi_ks2/ \
&& rsync -a root@ks2:/var/tmp/imapsync_cgi/ /home/imapsync_cgi_ks2/ \
2021-08-04 21:14:36 +02:00
&& summary_compute \
2019-07-03 01:20:46 +02:00
&& echo sending txt back to ks2 \
&& rsync -av /home/imapsync_cgi_ks2/*txt root@ks2:/var/tmp/imapsync_cgi/ \
2019-07-03 01:25:47 +02:00
&& summary_display \
2019-07-03 01:20:46 +02:00
&& date \
2019-07-03 01:25:47 +02:00
&& pwd
2019-07-03 01:20:46 +02:00
}
2020-04-11 01:15:57 +02:00
date_space()
{
date | tr -d '\n'
echo -n " "
2019-07-03 01:20:46 +02:00
}
#echoq date_if_new_hour
date_if_new_hour()
{
min=`date +%M`
sec=`date +%S`
#echo $min $sec
if test "00" = "$min" && test 6 -ge $sec
then
echo
date_space
sleep 1
fi
}
2020-04-11 01:15:57 +02:00
echoq watch_number_of_imapsync_running
watch_number_of_imapsync_running()
2019-07-03 01:20:46 +02:00
{
2020-04-11 01:15:57 +02:00
date_space
while number_of_imapsync_running | tr -d ' \n'
do
sleep 6
date_if_new_hour
done
2021-08-04 21:14:36 +02:00
}
#echoq number_of_bytes_sent_received_per_second_during
number_of_bytes_sent_received_per_second_during()
{
# $1 : number of seconds to watch
here_is_freebsd && netstat -I em0 -w ${1:-1} -q 1 | tail -1 | awk -v sec="${1:-1}" '{ printf "%.0f", ($4+$7)/sec }'
here_is_linux && number_of_bytes_sent_received_per_second_during_linux ${1:-1}
}
number_of_bytes_sent_received_per_second_during_linux()
{
tx_1=`cat /sys/class/net/eth0/statistics/tx_bytes`
rx_1=`cat /sys/class/net/eth0/statistics/rx_bytes`
sleep ${1:-1}
tx_2=`cat /sys/class/net/eth0/statistics/tx_bytes`
rx_2=`cat /sys/class/net/eth0/statistics/rx_bytes`
c $tx_2 - $tx_1 + $rx_2 - $rx_1
}
div_1_by_2_or_zero()
{
if test X"$2" = X"0"; then
echo "0"
else
echo "$1 $2" | awk '{ printf "%.0f\n", $1/$2 }'
fi
}
echoq number_of_imapsync_running_bandwidth
number_of_imapsync_running_bandwidth()
{
nir=`number_of_imapsync_running`
nbsr=`number_of_bytes_sent_received_per_second_during ${1:-1}`
ratio=`div_1_by_2_or_zero $nbsr $nir`
date=`date_ymdhms`
date_u=`LANG= date -u`
nbsr_human=`bytestohuman $nbsr`
ratio_human=`bytestohuman $ratio`
echo "$date $nir $nbsr $ratio $nbsr_human$ratio_human"
echo "Current number of syncs: $nir; Current total bandwidth: $nbsr_human/s; Current bandwidth per sync: $ratio_human/s; Current date/time: $date_u; ">/var/tmp/imapsync_current.txt
}
echoq loop_number_of_imapsync_running_bandwidth
loop_number_of_imapsync_running_bandwidth()
{
while :
do
:
nirbd=`number_of_imapsync_running_bandwidth ${1:-1}`
echo $nirbd
echo $nirbd >> /var/tmp/number_of_imapsync_running_every_${1:-1}s.txt
done
2020-04-11 01:15:57 +02:00
2019-07-03 01:20:46 +02:00
}
echoq various_usefull
various_usefull() {
cat <<'EOF'
2020-04-11 01:15:57 +02:00
strace -e trace=signal -f `pgrep /usr/sbin/apach | xargs -n1 echo -n " -p "` 2>&1
egrep -o '[0-9]+/[0-9]+' G_Folders_synced.txt | sort -g
egrep -o '[0-9]+/[0-9]+' G_Folders_synced.txt | sort -t/ -g -k2 | uniq -c
egrep -o '* ID .*' G_Read___ID.txt | sort | uniq -c | sort -n
egrep -o 'imapsync_runs=[0-9]+' G_HTTP_COOKIE.txt | egrep -o '[0-9]+' | sort -n | uniq -c | sort -g -k1,2
egrep -o 'HTTP_REFERER is .*' G_HTTP_REFERER.txt | sort -g | uniq -c | sort -g
egrep -o 'REMOTE_HOST is .*' G_REMOTE_HOST.txt | sort -g | uniq -c | sort -g
egrep -o 'REMOTE_ADDR is .*' G_REMOTE_ADDR.txt | sort -g | uniq -c | sort -g
datamash -s -W -g 4 count 4 < G_Host1_IMAP_server.txt | awk '{ print $2 " " $1 }' | sort -g | tail -22
datamash -s -W -g 4 count 4 < G_Host2_IMAP_server.txt | awk '{ print $2 " " $1 }' | sort -g | tail -22
egrep -o '* ID .*' G_Read___ID.txt | sort | awk '{ print $1 " " $2 " " $3 " NIL" }' | datamash -s -W -g 3 count 3 | awk '{ print $2 " " $1 }' | sort -g
locate perl.core | xargs -n 1 gdb -q -x /tmp/gdb_quit.txt -c
2021-08-04 21:14:36 +02:00
zcat /var/log/apache/httpd-access.log.*.gz|egrep -o -w 'Mozilla/5.0 \([^;]+' | sort | egrep -o '\([a-zA-Z]+' | sort | uniq -c | sort -g | grep -v KHTML
zcat /var/log/apache/httpd-access.log.*.gz|grep 'POST /cgi-bin/imapsync' | egrep -o -w 'Mozilla/5.0 \([^;]+' | sort | egrep -o '\([a-zA-Z]+' | sort | uniq -c | sort -g | grep -v KHTML
egrep -o '\[.+@[^]]+]' G_success_login.txt |head -222222 | sort | uniq -c | sort -g
list_all_logs |tail -9999 | xargs grep -i 'Exiting with return value 112' | tee Error_112_last_9999_syncs.txt
cut -d: -f1 Error_112_last_30_days.txt | xargs grep -oih 'Invalid system flag.*' | sort | uniq -c
list_all_logs | xargs grep -i 'Exiting with return value 112' | tee Error_112_all_syncs.txt
cut -d: -f1 Error_112_all_syncs.txt | tail -100 | xargs egrep -oih 'Invalid system flag [^(]+' | sort | uniq -c
cat G_success_login_on.txt | ./domains_servers_map | sort | uniq -c | sort -g
logfiles_finished_recently -300| xargs grep -i 'Exiting with return value 10 ' | grep -v 'return value 0 ' | cut -d: -f1 | xargs tail -11 | grep 'failure: can not open imap connection on' | uniq -c | sort -g | grep http | tee ../http_host_failures.txt
# Searching big messages copied over 500 MB
list_all_logs|tail -50000 | xargs egrep '{.?[56789]........} copied'
# online processes stats
cat /var/tmp/number_of_imapsync_running.txt | datamash -W min 2 max 2 mean 2 median 2 q1 2 q3 2
for v in 2 3 4; do cat /var/tmp/number_of_imapsync_running_every_6s.txt | datamash --format=%10.0f -W min $v max $v mean $v median $v q1 $v q3 $v ; done
netstat -I em0 -b -n -w 6 -q 1
while :; do ssh root@ks5 'cd /var/tmp/imapsync_cgi/ ; . cgi_memo ; loop_number_of_imapsync_running_bandwidth 6' ; echo $?; done
# Search memory eater
cat G_Memory_consumption_at_the_end.txt | sort -g -k7 | grep 202[01] |tail -100 | cut -f1 -d: | while read f; do echo $f ; grep 'Memory consumption at the end' $f; grep 'Host. Nb messages' $f ; grep 'Biggest message' $f ; grep 'Memory/biggest message ratio' $f ; done
cat G_Host2_Nb_messages.txt | sort -g -k4 | grep 202[01] |tail -100 | cut -f1 -d: | while read f; do echo $f ; grep 'Memory consumption at the end' $f; grep 'Host. Nb messages' $f ; grep 'Biggest message' $f ; grep 'Memory/biggest message ratio' $f ; done
cat G_Host1_Nb_messages.txt | sort -g -k4 | grep 202[01] |tail -100 | cut -f1 -d: | while read f; do echo $f ; grep 'Memory consumption at the end' $f; grep 'Host. Nb messages' $f ; grep 'Biggest message' $f ; grep 'Memory/biggest message ratio' $f ; done
2022-02-15 19:29:45 +01:00
# Best bandwidth moments
cat /var/tmp/number_of_imapsync_running_every_60s.txt | sort -k3 -g| tail -66
2019-07-03 01:20:46 +02:00
EOF
}
2020-04-11 01:15:57 +02:00
echoq perf_help
perf_help() {
test FreeBSD = `uname -s` && {
echo FreeBSD here
2022-02-15 19:29:45 +01:00
echo "nload -t 6000 em0 -u K -i 100000 -o 100000"
2020-04-11 01:15:57 +02:00
echo "iftop -i em0 -f 'port imap or port imaps' -B # t p >"
}
test Linux = `uname -s` && {
echo Linux here
2022-02-15 19:29:45 +01:00
echo "nload -t 6000 eth0 -u K -i 100000 -o 100000 # Linux"
2020-04-11 01:15:57 +02:00
echo "iftop -i eth0 -f 'port imap or port imaps' -B # t p >"
}
}
2021-08-04 21:14:36 +02:00
tests_all_verbose_if_failure