2019-09-24 19:45:01 +01:00
|
|
|
#!/usr/bin/env bash
|
|
|
|
|
|
|
|
set -ueo pipefail
|
|
|
|
|
|
|
|
main_cfg_dir=$1
|
|
|
|
command=$2
|
2020-01-17 17:36:50 +00:00
|
|
|
uplink_version=$3
|
2020-03-10 18:58:14 +00:00
|
|
|
update_access_script_path=$4
|
2019-09-24 19:45:01 +01:00
|
|
|
|
|
|
|
bucket="bucket-123"
|
|
|
|
test_files_dir="${main_cfg_dir}/testfiles"
|
|
|
|
stage1_dst_dir="${main_cfg_dir}/stage1"
|
|
|
|
stage2_dst_dir="${main_cfg_dir}/stage2"
|
|
|
|
|
|
|
|
setup(){
|
|
|
|
mkdir -p "$test_files_dir" "$stage1_dst_dir" "$stage2_dst_dir"
|
|
|
|
random_bytes_file () {
|
|
|
|
size=$1
|
|
|
|
output=$2
|
|
|
|
head -c $size </dev/urandom > $output
|
|
|
|
}
|
|
|
|
random_bytes_file "2048" "$test_files_dir/small-upload-testfile" # create 2kb file of random bytes (inline)
|
2020-01-17 17:36:50 +00:00
|
|
|
random_bytes_file "5120" "$test_files_dir/big-upload-testfile" # create 5kb file of random bytes (remote)
|
2020-01-22 14:11:59 +00:00
|
|
|
random_bytes_file "131072" "$test_files_dir/multisegment-upload-testfile" # create 128kb file of random bytes (remote)
|
2019-09-24 19:45:01 +01:00
|
|
|
|
|
|
|
echo "setup test successfully"
|
|
|
|
}
|
|
|
|
|
2019-12-20 22:31:00 +00:00
|
|
|
wait_for_all_background_jobs_to_finish(){
|
|
|
|
for job in `jobs -p`
|
|
|
|
do
|
|
|
|
echo "wait for $job"
|
|
|
|
RESULT=0
|
|
|
|
wait $job || RESULT=1
|
|
|
|
if [ "$RESULT" == "1" ]; then
|
2020-01-17 17:36:50 +00:00
|
|
|
echo "job $job failed"
|
2019-12-20 22:31:00 +00:00
|
|
|
fi
|
|
|
|
done
|
|
|
|
}
|
|
|
|
|
2019-09-24 19:45:01 +01:00
|
|
|
echo "Begin test-versions.sh, storj-sim config directory:" ${main_cfg_dir}
|
|
|
|
|
|
|
|
echo "which storj-sim: $(which storj-sim)"
|
|
|
|
echo "Shasum for storj-sim:"
|
|
|
|
shasum $(which storj-sim)
|
|
|
|
|
|
|
|
echo -e "\nConfig directory for uplink:"
|
|
|
|
echo "${main_cfg_dir}/uplink"
|
|
|
|
echo "which uplink: $(which uplink)"
|
|
|
|
echo "Shasum for uplink:"
|
|
|
|
shasum $(which uplink)
|
|
|
|
|
2020-01-17 17:36:50 +00:00
|
|
|
# for oldest uplink versions, access is not supported, and we need to configure separate values for api key, sat addr, and encryption key
|
2019-09-24 19:45:01 +01:00
|
|
|
if [ ! -d ${main_cfg_dir}/uplink ]; then
|
|
|
|
mkdir -p ${main_cfg_dir}/uplink
|
2020-01-17 17:36:50 +00:00
|
|
|
api_key=$(storj-sim --config-dir=$main_cfg_dir network env GATEWAY_0_API_KEY)
|
|
|
|
sat_addr=$(storj-sim --config-dir=$main_cfg_dir network env SATELLITE_0_ADDR)
|
2020-01-21 16:50:13 +00:00
|
|
|
should_use_access=$(echo $uplink_version | awk 'BEGIN{FS="[v.]"} $3 >= 30 || $2 >= 1 {print $0}')
|
2020-01-17 17:36:50 +00:00
|
|
|
if [[ ${#should_use_access} -gt 0 ]]; then
|
|
|
|
access=$(storj-sim --config-dir=$main_cfg_dir network env GATEWAY_0_ACCESS)
|
2020-03-10 18:58:14 +00:00
|
|
|
new_access=$(go run $update_access_script_path $(storj-sim --config-dir=$main_cfg_dir network env SATELLITE_0_DIR) $access)
|
|
|
|
uplink import --config-dir="${main_cfg_dir}/uplink" "${new_access}" --client.segment-size="64.0 KiB"
|
2020-01-17 17:36:50 +00:00
|
|
|
else
|
|
|
|
uplink setup --config-dir="${main_cfg_dir}/uplink" --non-interactive --api-key="$api_key" --satellite-addr="$sat_addr" --enc.encryption-key="test" --client.segment-size="64.0 KiB"
|
|
|
|
fi
|
|
|
|
fi
|
|
|
|
|
|
|
|
if [[ $uplink_version = "v0.29.10" ]]; then
|
|
|
|
uplink share --config-dir="${main_cfg_dir}/uplink" | grep "Scope" | awk -F ": " '{print $2}' | tee ${main_cfg_dir}/uplink/access.txt
|
|
|
|
fi
|
|
|
|
|
2020-01-21 16:50:13 +00:00
|
|
|
# after version v0.30.x we need to use access instead of separate values for api key, sat addr, and encryption key
|
|
|
|
should_use_access=$(echo $uplink_version | awk 'BEGIN{FS="[v.]"} $3 == 30 {print $0}')
|
|
|
|
if [[ ${#should_use_access} -gt 0 ]] && [ -e ${main_cfg_dir}/uplink/access.txt ]
|
2020-01-17 17:36:50 +00:00
|
|
|
then
|
|
|
|
# the access provided by storj-sim uses an empty encryption key; we cannot do uplink setup above with an empty encryption key
|
|
|
|
# therefore, we use a hack -> get an access key from the existing uplink config, then import that same access key
|
|
|
|
|
|
|
|
# super hack:
|
|
|
|
access=$(head -n 1 ${main_cfg_dir}/uplink/access.txt)
|
2020-03-10 18:58:14 +00:00
|
|
|
new_access=$(go run $update_access_script_path $(storj-sim --config-dir=$main_cfg_dir network env SATELLITE_0_DIR) $access)
|
|
|
|
uplink import --config-dir="${main_cfg_dir}/uplink" "${new_access}"
|
2020-01-17 17:36:50 +00:00
|
|
|
rm -rf ${main_cfg_dir}/uplink/access.txt
|
2019-09-24 19:45:01 +01:00
|
|
|
fi
|
|
|
|
|
|
|
|
echo -e "\nConfig directory for satellite:"
|
|
|
|
echo "${main_cfg_dir}/satellite/0"
|
|
|
|
echo "Shasum for satellite:"
|
|
|
|
shasum ${main_cfg_dir}/satellite/0/satellite
|
|
|
|
|
|
|
|
echo -e "\nStoragenode config directories:"
|
|
|
|
for i in {0..9}
|
|
|
|
do
|
|
|
|
echo -e "\nConfig directory for sn ${i}:"
|
|
|
|
echo "${main_cfg_dir}/storagenode/${i}"
|
|
|
|
echo "Shasum for sn ${i} binary:"
|
|
|
|
shasum ${main_cfg_dir}/storagenode/${i}/storagenode
|
|
|
|
done
|
|
|
|
|
|
|
|
if [[ "$command" == "upload" ]]; then
|
|
|
|
setup
|
|
|
|
bucket_name=${bucket}-${uplink_version}
|
|
|
|
download_dst_dir=${stage1_dst_dir}/${uplink_version}
|
|
|
|
mkdir -p "$download_dst_dir"
|
|
|
|
|
|
|
|
uplink mb "sj://$bucket_name/" --config-dir="${main_cfg_dir}/uplink"
|
|
|
|
|
2019-12-20 22:31:00 +00:00
|
|
|
# run each upload in parallel
|
|
|
|
uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "${test_files_dir}/small-upload-testfile" "sj://$bucket_name/" &
|
|
|
|
uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "${test_files_dir}/big-upload-testfile" "sj://$bucket_name/" &
|
|
|
|
uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "${test_files_dir}/multisegment-upload-testfile" "sj://$bucket_name/" &
|
|
|
|
wait_for_all_background_jobs_to_finish
|
2019-09-24 19:45:01 +01:00
|
|
|
|
2019-12-20 22:31:00 +00:00
|
|
|
# run each download in parallel
|
|
|
|
uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "sj://$bucket_name/small-upload-testfile" "${download_dst_dir}" &
|
|
|
|
uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "sj://$bucket_name/big-upload-testfile" "${download_dst_dir}" &
|
|
|
|
uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "sj://$bucket_name/multisegment-upload-testfile" "${download_dst_dir}" &
|
|
|
|
wait_for_all_background_jobs_to_finish
|
2019-09-24 19:45:01 +01:00
|
|
|
|
|
|
|
if cmp "${test_files_dir}/small-upload-testfile" "${download_dst_dir}/small-upload-testfile"
|
|
|
|
then
|
|
|
|
echo "upload test on release tag: small upload testfile matches uploaded file"
|
|
|
|
else
|
|
|
|
echo "upload test on release tag: small upload testfile does not match uploaded file"
|
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
|
|
|
|
if cmp "${test_files_dir}/big-upload-testfile" "${download_dst_dir}/big-upload-testfile"
|
|
|
|
then
|
|
|
|
echo "upload test on release tag: big upload testfile matches uploaded file"
|
|
|
|
else
|
|
|
|
echo "upload test on release tag: big upload testfile does not match uploaded file"
|
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
|
|
|
|
if cmp "${test_files_dir}/multisegment-upload-testfile" "${download_dst_dir}/multisegment-upload-testfile"
|
|
|
|
then
|
|
|
|
echo "upload test on release tag: multisegment upload testfile matches uploaded file"
|
|
|
|
else
|
|
|
|
echo "upload test on release tag: multisegment upload testfile does not match uploaded file"
|
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
|
2019-12-20 22:31:00 +00:00
|
|
|
rm -rf ${test_files_dir}
|
2019-09-24 19:45:01 +01:00
|
|
|
fi
|
|
|
|
|
|
|
|
if [[ "$command" == "download" ]]; then
|
2020-01-17 17:36:50 +00:00
|
|
|
existing_bucket_name_suffixes=$4
|
2019-09-24 19:45:01 +01:00
|
|
|
|
|
|
|
# download all uploaded files from stage 1 with currently selected uplink
|
|
|
|
for suffix in ${existing_bucket_name_suffixes}; do
|
|
|
|
bucket_name=${bucket}-${suffix}
|
|
|
|
original_dst_dir=${stage1_dst_dir}/${suffix}
|
|
|
|
download_dst_dir=${stage2_dst_dir}/${suffix}
|
|
|
|
mkdir -p "$download_dst_dir"
|
|
|
|
|
|
|
|
echo "bucket name: ${bucket_name}"
|
|
|
|
echo "download folder name: ${download_dst_dir}"
|
2019-12-20 22:31:00 +00:00
|
|
|
# run each download in parallel
|
|
|
|
uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "sj://$bucket_name/small-upload-testfile" "${download_dst_dir}" &
|
|
|
|
uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "sj://$bucket_name/big-upload-testfile" "${download_dst_dir}" &
|
|
|
|
uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "sj://$bucket_name/multisegment-upload-testfile" "${download_dst_dir}" &
|
|
|
|
wait_for_all_background_jobs_to_finish
|
2019-09-24 19:45:01 +01:00
|
|
|
|
|
|
|
if cmp "${original_dst_dir}/small-upload-testfile" "${download_dst_dir}/small-upload-testfile"
|
|
|
|
then
|
|
|
|
echo "download test on current branch: small upload testfile matches uploaded file"
|
|
|
|
else
|
|
|
|
echo "download test on current branch: small upload testfile does not match uploaded file"
|
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
|
|
|
|
if cmp "${original_dst_dir}/big-upload-testfile" "${download_dst_dir}/big-upload-testfile"
|
|
|
|
then
|
|
|
|
echo "download test on current branch: big upload testfile matches uploaded file"
|
|
|
|
else
|
|
|
|
echo "download test on current branch: big upload testfile does not match uploaded file"
|
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
|
|
|
|
if cmp "${original_dst_dir}/multisegment-upload-testfile" "${download_dst_dir}/multisegment-upload-testfile"
|
|
|
|
then
|
|
|
|
echo "download test on current branch: multisegment upload testfile matches uploaded file"
|
|
|
|
else
|
|
|
|
echo "download test on current branch: multisegment upload testfile does not match uploaded file"
|
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
|
|
|
|
rm -rf ${download_dst_dir}
|
|
|
|
done
|
|
|
|
fi
|
|
|
|
|
|
|
|
if [[ "$command" == "cleanup" ]]; then
|
|
|
|
uplink_versions=$3
|
|
|
|
for ul_version in ${uplink_versions}; do
|
|
|
|
bucket_name=${bucket}-${ul_version}
|
|
|
|
uplink rm --config-dir="${main_cfg_dir}/uplink" "sj://$bucket_name/small-upload-testfile"
|
|
|
|
uplink rm --config-dir="${main_cfg_dir}/uplink" "sj://$bucket_name/big-upload-testfile"
|
|
|
|
uplink rm --config-dir="${main_cfg_dir}/uplink" "sj://$bucket_name/multisegment-upload-testfile"
|
|
|
|
uplink rb --config-dir="${main_cfg_dir}/uplink" "sj://$bucket_name"
|
|
|
|
done
|
|
|
|
fi
|
|
|
|
|
|
|
|
echo "Done with test-versions.sh"
|