scripts/tests/testversions: make binary installation and upload/download running in parallel

Change-Id: I16d87f7e16e2daf30e4d7ee5490b76c175b06930
This commit is contained in:
Yingrong Zhao 2019-12-20 17:31:00 -05:00 committed by Yingrong Zhao
parent 80b41af8f1
commit 71c5c2213f
3 changed files with 87 additions and 50 deletions

2
Jenkinsfile vendored
View File

@ -29,7 +29,7 @@ node('node') {
sh 'docker exec postgres createdb -U postgres teststorj' sh 'docker exec postgres createdb -U postgres teststorj'
// fetch the remote master branch // fetch the remote master branch
sh 'git fetch --no-tags --progress -- https://github.com/storj/storj.git +refs/heads/master:refs/remotes/origin/master' sh 'git fetch --no-tags --progress -- https://github.com/storj/storj.git +refs/heads/master:refs/remotes/origin/master'
sh 'docker run -u $(id -u):$(id -g) --rm -i -v $PWD:$PWD -w $PWD --entrypoint $PWD/scripts/test-sim-versions.sh -e STORJ_SIM_POSTGRES -e STORJ_SIM_REDIS --link redis:redis --link postgres:postgres -e CC=gcc storjlabs/golang:1.13.5' sh 'docker run -u $(id -u):$(id -g) --rm -i -v $PWD:$PWD -w $PWD --entrypoint $PWD/scripts/tests/testversions/test-sim-versions.sh -e STORJ_SIM_POSTGRES -e STORJ_SIM_REDIS --link redis:redis --link postgres:postgres -e CC=gcc storjlabs/golang:1.13.5'
} }
catch(err){ catch(err){
throw err throw err

View File

@ -134,52 +134,70 @@ fi
if [ -z ${STORJ_SIM_REDIS} ]; then if [ -z ${STORJ_SIM_REDIS} ]; then
echo "STORJ_SIM_REDIS is required for the satellite DB. Example: STORJ_SIM_REDIS=127.0.0.1:[port]" echo "STORJ_SIM_REDIS is required for the satellite DB. Example: STORJ_SIM_REDIS=127.0.0.1:[port]"
exit 1 exit 1
fi fi
echo "Setting up environments for versions" ${unique_versions} echo "Setting up environments for versions" ${unique_versions}
# Get latest release tags and clean up git worktree # clean up git worktree
git fetch git worktree prune
for version in ${unique_versions}; do for version in ${unique_versions}; do
dir=$(version_dir ${version}) # run in parallel
bin_dir=${dir}/bin (
dir=$(version_dir ${version})
bin_dir=${dir}/bin
echo -e "\nAdding worktree for ${version} in ${dir}." echo -e "\nAdding worktree for ${version} in ${dir}."
if [[ $version = "master" ]] if [[ $version = "master" ]]
then then
git worktree add -f "$dir" "origin/master" git worktree add -f "$dir" "origin/master"
else else
git worktree add -f "$dir" "${version}" git worktree add -f "$dir" "${version}"
fi fi
rm -f ${dir}/private/version/release.go rm -f ${dir}/private/version/release.go
rm -f ${dir}/internal/version/release.go rm -f ${dir}/internal/version/release.go
if [[ $version = $current_release_version || $version = "master" ]] if [[ $version = $current_release_version || $version = "master" ]]
then then
echo "Installing storj-sim for ${version} in ${dir}." echo "Installing storj-sim for ${version} in ${dir}."
pushd ${dir} pushd ${dir}
install_sim ${bin_dir} install_sim ${bin_dir}
echo "finished installing" echo "finished installing"
popd popd
echo "Setting up storj-sim for ${version}. Bin: ${bin_dir}, Config: ${dir}/local-network" # uncomment for local testing
PATH=${bin_dir}:$PATH storj-sim -x --host="${STORJ_NETWORK_HOST4}" --postgres="${STORJ_SIM_POSTGRES}" --config-dir "${dir}/local-network" network setup > /dev/null 2>&1 # GOBIN=${bin_dir} make -C ${dir} install-sim >/dev/null 2>&1
echo "Finished setting up. ${dir}/local-network:" $(ls ${dir}/local-network) echo "Setting up storj-sim for ${version}. Bin: ${bin_dir}, Config: ${dir}/local-network"
echo "Binary shasums:" PATH=${bin_dir}:$PATH storj-sim -x --host="${STORJ_NETWORK_HOST4}" --postgres="${STORJ_SIM_POSTGRES}" --config-dir "${dir}/local-network" network setup > /dev/null 2>&1
shasum ${bin_dir}/satellite echo "Finished setting up. ${dir}/local-network:" $(ls ${dir}/local-network)
shasum ${bin_dir}/storagenode echo "Binary shasums:"
shasum ${bin_dir}/uplink shasum ${bin_dir}/satellite
shasum ${bin_dir}/gateway shasum ${bin_dir}/storagenode
else shasum ${bin_dir}/uplink
echo "Installing uplink and gateway for ${version} in ${dir}." shasum ${bin_dir}/gateway
pushd ${dir} else
mkdir -p ${bin_dir} echo "Installing uplink and gateway for ${version} in ${dir}."
go install -race -v -o ${bin_dir}/uplink storj.io/storj/cmd/uplink >/dev/null 2>&1 pushd ${dir}
go install -race -v -o ${bin_dir}/gateway storj.io/storj/cmd/gateway >/dev/null 2>&1 mkdir -p ${bin_dir}
popd go install -race -v -o ${bin_dir}/uplink storj.io/storj/cmd/uplink >/dev/null 2>&1
echo "Finished installing. ${bin_dir}:" $(ls ${bin_dir}) go install -race -v -o ${bin_dir}/gateway storj.io/storj/cmd/gateway >/dev/null 2>&1
echo "Binary shasums:" # uncomment for local testing
shasum ${bin_dir}/uplink # GOBIN=${bin_dir} go install -race -v storj.io/storj/cmd/uplink >/dev/null 2>&1
shasum ${bin_dir}/gateway # GOBIN=${bin_dir} go install -race -v storj.io/storj/cmd/gateway >/dev/null 2>&1
popd
echo "Finished installing. ${bin_dir}:" $(ls ${bin_dir})
echo "Binary shasums:"
shasum ${bin_dir}/uplink
shasum ${bin_dir}/gateway
fi
) &
done
for job in `jobs -p`
do
echo "wait for $job"
RESULT=0
wait $job || RESULT=1
if [ "$RESULT" == "1" ]; then
exit $?
fi fi
done done

View File

@ -24,6 +24,18 @@ setup(){
echo "setup test successfully" echo "setup test successfully"
} }
wait_for_all_background_jobs_to_finish(){
for job in `jobs -p`
do
echo "wait for $job"
RESULT=0
wait $job || RESULT=1
if [ "$RESULT" == "1" ]; then
exit $?
fi
done
}
echo "Begin test-versions.sh, storj-sim config directory:" ${main_cfg_dir} echo "Begin test-versions.sh, storj-sim config directory:" ${main_cfg_dir}
echo "which storj-sim: $(which storj-sim)" echo "which storj-sim: $(which storj-sim)"
@ -66,13 +78,17 @@ if [[ "$command" == "upload" ]]; then
uplink mb "sj://$bucket_name/" --config-dir="${main_cfg_dir}/uplink" uplink mb "sj://$bucket_name/" --config-dir="${main_cfg_dir}/uplink"
uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "${test_files_dir}/small-upload-testfile" "sj://$bucket_name/" # run each upload in parallel
uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "${test_files_dir}/big-upload-testfile" "sj://$bucket_name/" uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "${test_files_dir}/small-upload-testfile" "sj://$bucket_name/" &
uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "${test_files_dir}/multisegment-upload-testfile" "sj://$bucket_name/" uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "${test_files_dir}/big-upload-testfile" "sj://$bucket_name/" &
uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "${test_files_dir}/multisegment-upload-testfile" "sj://$bucket_name/" &
wait_for_all_background_jobs_to_finish
uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "sj://$bucket_name/small-upload-testfile" "${download_dst_dir}" # run each download in parallel
uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "sj://$bucket_name/big-upload-testfile" "${download_dst_dir}" uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "sj://$bucket_name/small-upload-testfile" "${download_dst_dir}" &
uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "sj://$bucket_name/multisegment-upload-testfile" "${download_dst_dir}" uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "sj://$bucket_name/big-upload-testfile" "${download_dst_dir}" &
uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "sj://$bucket_name/multisegment-upload-testfile" "${download_dst_dir}" &
wait_for_all_background_jobs_to_finish
if cmp "${test_files_dir}/small-upload-testfile" "${download_dst_dir}/small-upload-testfile" if cmp "${test_files_dir}/small-upload-testfile" "${download_dst_dir}/small-upload-testfile"
then then
@ -98,6 +114,7 @@ if [[ "$command" == "upload" ]]; then
exit 1 exit 1
fi fi
rm -rf ${test_files_dir}
fi fi
if [[ "$command" == "download" ]]; then if [[ "$command" == "download" ]]; then
@ -112,9 +129,11 @@ if [[ "$command" == "download" ]]; then
echo "bucket name: ${bucket_name}" echo "bucket name: ${bucket_name}"
echo "download folder name: ${download_dst_dir}" echo "download folder name: ${download_dst_dir}"
uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "sj://$bucket_name/small-upload-testfile" "${download_dst_dir}" # run each download in parallel
uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "sj://$bucket_name/big-upload-testfile" "${download_dst_dir}" uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "sj://$bucket_name/small-upload-testfile" "${download_dst_dir}" &
uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "sj://$bucket_name/multisegment-upload-testfile" "${download_dst_dir}" uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "sj://$bucket_name/big-upload-testfile" "${download_dst_dir}" &
uplink cp --config-dir="${main_cfg_dir}/uplink" --progress=false "sj://$bucket_name/multisegment-upload-testfile" "${download_dst_dir}" &
wait_for_all_background_jobs_to_finish
if cmp "${original_dst_dir}/small-upload-testfile" "${download_dst_dir}/small-upload-testfile" if cmp "${original_dst_dir}/small-upload-testfile" "${download_dst_dir}/small-upload-testfile"
then then