diff options
author | Rasmus Dahlberg <rasmus@rgdd.se> | 2023-04-02 09:56:18 +0200 |
---|---|---|
committer | Rasmus Dahlberg <rasmus@rgdd.se> | 2023-04-02 09:56:18 +0200 |
commit | 0845fbfd1916b3c153caa14a0719666177e10c3b (patch) | |
tree | 1eb8db856bd2f429cbaf63dc8356b29c3363a9e5 /scripts | |
parent | e029b6717ee4a20eaf12ac91b98792a9d397c199 (diff) |
Fix quoting and minor inconsistencies
Diffstat (limited to 'scripts')
-rwxr-xr-x | scripts/test.sh | 55 |
1 files changed, 26 insertions, 29 deletions
diff --git a/scripts/test.sh b/scripts/test.sh index 809c5af..dbd4b6f 100755 --- a/scripts/test.sh +++ b/scripts/test.sh @@ -24,23 +24,24 @@ input_file=example.lst ### # Other options ### -tmp_dir="$(mktemp -d)" -out_dir="data/$(date +%Y%m%d-%H%M%S)" +tmp_dir=$(mktemp -d) +out_dir=data/$(date +%Y%m%d-%H%M%S) mullvad_wait_s=5 timeout=30s set -eu function main() { - num_sites="$(wc -l $input_file | cut -d' ' -f1)" + num_sites=$(wc -l $input_file | cut -d' ' -f1) debug "relays=${#relays[@]} (${relays[@]})" debug "limits=${#limits[@]} (${limits[@]})" - debug "numWorkers=$num_workers" - debug "inputFile=$input_file ($num_sites sites)" + debug "num_workers=$num_workers" + debug "input_file=$input_file ($num_sites sites)" debug "tmp_dir=$tmp_dir" debug "out_dir=$out_dir" debug "mullvad_wait_s=$mullvad_wait_s" + debug "timeout=$timeout" - num_test="$(( ${#limits[@]} * ${#relays[@]} ))" + num_test=$(( ${#limits[@]} * ${#relays[@]} )) debug "about to start $num_test tests" runtime_s=0 @@ -65,55 +66,51 @@ function main() { ip=$(curl -s https://am.i.mullvad.net) debug "working on test $i/$num_test at relay $relay ($ip) with limit $limit sites/s" - shuf -o $tmp_dir/input.lst $input_file + shuf -o "$tmp_dir/input.lst" "$input_file" stdout_file=$tmp_dir/$relay-$limit.stdout stderr_file=$tmp_dir/$relay-$limit.stderr - onion-grab -i $tmp_dir/input.lst -t $timeout -l $limit -w $num_workers >$stdout_file 2>$stderr_file + onion-grab -i "$tmp_dir/input.lst" -t "$timeout" -l "$limit" -w "$num_workers" >"$stdout_file" 2>"$stderr_file" i=$(( $i + 1 )) done done - mkdir -p $out_dir + mkdir -p "$out_dir" for limit in ${limits[@]}; do dup_file=$tmp_dir/dup.txt - rm -f $dup_file + rm -f "$dup_file" for relay in ${relays[@]}; do stdout_file=$tmp_dir/$relay-$limit.stdout stderr_file=$tmp_dir/$relay-$limit.stderr - cut -d' ' -f1 $stdout_file >> $dup_file + cut -d' ' -f1 "$stdout_file" >> "$dup_file" - avg_rate=$(grep 'Average rate:' $stderr_file | tail -n1 | xargs | cut -d' ' -f3) + avg_rate=$(grep 'Average rate:' "$stderr_file" | tail -n1 | xargs | cut -d' ' -f3) if [[ -z "$avg_rate" ]]; then avg_rate="n/a" fi - num_onion=$(wc -l $stdout_file | cut -d' ' -f1) - processed=$(grep Processed $stderr_file | tail -n1 | xargs | cut -d' ' -f2) - success=$(grep Success $stderr_file | tail -n1 | xargs | cut -d' ' -f2) - failure=$(grep Failure $stderr_file | tail -n1 | xargs | cut -d' ' -f2) - failure_dns=$(grep DNS $stderr_file | tail -n1 | xargs | cut -d' ' -f2) - failure_dns_detailed=$(grep DNS $stderr_file | tail -n1 | xargs | cut -d' ' -f3-) + num_onion=$(wc -l "$stdout_file" | cut -d' ' -f1) + processed=$(grep 'Processed:' "$stderr_file" | tail -n1 | xargs | cut -d' ' -f2) + success=$(grep 'Success:' "$stderr_file" | tail -n1 | xargs | cut -d' ' -f2) + failure=$(grep 'Failure:' "$stderr_file" | tail -n1 | xargs | cut -d' ' -f2) + failure_dns=$(grep 'DNS:' "$stderr_file" | tail -n1 | xargs | cut -d' ' -f2) + failure_dns_detailed=$(grep 'DNS:' "$stderr_file" | tail -n1 | xargs | cut -d' ' -f3-) info "relay:$relay limit:$limit/s avg-rate:$avg_rate/s onions:$num_onion connected:$success/$processed dns-error:$failure_dns/$failure $failure_dns_detailed" - output_file="$out_dir/$relay-l$limit.txt" - mv $stdout_file $output_file + output_file=$out_dir/$relay-l$limit.txt + mv "$stdout_file" "$output_file" done - num_sites=$(sort -Vu $dup_file | wc -l | cut -d' ' -f1) - info "all relays with limit $limit sites/s: $num_sites unique sites with onion locations found" + num_sites=$(sort -Vu "$dup_file" | wc -l | cut -d' ' -f1) + info "all relays with limit $limit/s: $num_sites unique sites set onion location header or attribute" done info "stored collected test data in $out_dir" - rm -rf $tmp_dir + rm -rf "$tmp_dir" } -function debug() { - echo "$(date +"%Y-%m-%d %H:%M:%S %Z") [DEBU] $@" >&2 -} -function info() { - echo "$(date +"%Y-%m-%d %H:%M:%S %Z") [INFO] $@" >&2 -} +function debug() { echo "$(date +"%Y-%m-%d %H:%M:%S %Z") [DEBU] $@" >&2; } +function info() { echo "$(date +"%Y-%m-%d %H:%M:%S %Z") [INFO] $@" >&2; } main $@ |