aboutsummaryrefslogtreecommitdiff
path: root/scripts/test.sh
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/test.sh')
-rwxr-xr-xscripts/test.sh113
1 files changed, 113 insertions, 0 deletions
diff --git a/scripts/test.sh b/scripts/test.sh
new file mode 100755
index 0000000..beac64f
--- /dev/null
+++ b/scripts/test.sh
@@ -0,0 +1,113 @@
+#!/bin/bash
+
+#
+# A script that debugs how different onion-grab configurations perform with
+# regards to successful connections, connection errors, found onions, etc. To
+# conduct these tests from multiple vantage points, Mullvad VPN is used.
+#
+
+###
+# Options you will likely want to tune yourself
+###
+relays=(\
+ se-sto-wg-001\
+ us-nyc-wg-501\
+ au-mel-wg-301\
+)
+limits=(\
+ 5\
+ 10\
+)
+num_workers=16
+input_file=example.lst
+
+###
+# Other options
+###
+tmp_dir="$(mktemp -d)"
+out_dir="data/$(date +%Y%m%d-%H%M%S)"
+mullvad_wait_s=5
+timeout=30s
+
+set -eu
+function main() {
+ num_sites="$(wc -l $input_file | cut -d' ' -f1)"
+ debug "relays=${#relays[@]}"
+ debug "limits=${#limits[@]}"
+ debug "numWorkers=$num_workers"
+ debug "inputFile=$input_file ($num_sites sites)"
+ debug "tmp_dir=$tmp_dir"
+ debug "out_dir=$out_dir"
+ debug "mullvad_wait_s=$mullvad_wait_s"
+
+ num_test="$(( ${#limits[@]} * ${#relays[@]} ))"
+ debug "about to start $num_test tests"
+
+ runtime_s=0
+ for limit in ${limits[@]}; do
+ for relay in ${relays[@]}; do
+ seconds=$(( $num_sites / $limit ))
+ runtime_s=$(( $runtime_s + $seconds ))
+ done
+ done
+ now=$(date +%s)
+ estimated_done=$(( $now + $runtime_s ))
+ debug "estimated done? earliest $(date -d @$estimated_done +"%Y-%m-%d %H:%M:%S %Z")"
+
+ i=1
+ for limit in ${limits[@]}; do
+ for relay in ${relays[@]}; do
+ mullvad disconnect >/dev/null
+ mullvad relay set hostname $relay >/dev/null
+ mullvad connect >/dev/null
+ sleep $mullvad_wait_s
+
+ ip=$(curl -s https://am.i.mullvad.net)
+ debug "working on test $i/$num_test at relay $relay ($ip) with limit $limit sites/s"
+
+ stdout_file=$tmp_dir/$relay-$limit.stdout
+ stderr_file=$tmp_dir/$relay-$limit.stderr
+ onion-grab -i $input_file -t $timeout -l $limit -w $num_workers >$stdout_file 2>$stderr_file
+
+ i=$(( $i + 1 ))
+ done
+ done
+
+ mkdir -p $out_dir
+ for limit in ${limits[@]}; do
+ dup_file=$tmp_dir/dup.txt
+
+ rm -f $dup_file
+ for relay in ${relays[@]}; do
+ stdout_file=$tmp_dir/$relay-$limit.stdout
+ stderr_file=$tmp_dir/$relay-$limit.stderr
+ cut -d' ' -f1 $stdout_file >> $dup_file
+
+ num_onion=$(wc -l $stdout_file | cut -d' ' -f1)
+ processed=$(grep Processed $stderr_file | tail -n1 | xargs | cut -d' ' -f2)
+ success=$(grep Success $stderr_file | tail -n1 | xargs | cut -d' ' -f2)
+ failure=$(grep Failure $stderr_file | tail -n1 | xargs | cut -d' ' -f2)
+ failure_dns=$(grep DNS $stderr_file | tail -n1 | xargs | cut -d' ' -f2)
+ failure_dns_detailed=$(grep DNS $stderr_file | tail -n1 | xargs | cut -d' ' -f3-)
+ info "relay:$relay limit:$limit onions:$num_onion connected:$success/$processed dns-error:$failure_dns/$failure $failure_dns_detailed"
+
+ output_file="$out_dir/$relay-l$limit.txt"
+ mv $stdout_file $output_file
+ done
+
+ num_sites=$(sort -Vu $dup_file | wc -l | cut -d' ' -f1)
+ info "all relays with limit $limit sites/s: $num_sites unique sites with onion locations found"
+ done
+
+ info "stored collected test data in $out_dir"
+ rm -rf $tmp_dir
+}
+
+function debug() {
+ echo "$(date +"%Y-%m-%d %H:%M:%S %Z") [DEBU] $@" >&2
+}
+function info() {
+ echo "$(date +"%Y-%m-%d %H:%M:%S %Z") [INFO] $@" >&2
+}
+
+main $@