scripts: reduce segment size for integration tests
This will speedup integration tests. Change-Id: Ifb1fadd01692390457971bfe7f9ba73a0796abb2
This commit is contained in:
parent
c021b35879
commit
884638a294
2
go.mod
2
go.mod
@ -45,5 +45,5 @@ require (
|
||||
storj.io/drpc v0.0.12-0.20200413163255-debb458a7474
|
||||
storj.io/monkit-jaeger v0.0.0-20200403204040-f5a746eeacca
|
||||
storj.io/private v0.0.0-20200403212157-26f222c154f0
|
||||
storj.io/uplink v1.0.4-0.20200406100056-baa89e6fe434
|
||||
storj.io/uplink v1.0.4-0.20200415152100-8fbdcbad8065
|
||||
)
|
||||
|
5
go.sum
5
go.sum
@ -623,6 +623,9 @@ honnef.co/go/tools v0.0.1-2019.2.3 h1:3JgtbtFHMiCmsznwGVTUWbgGov+pVqnlf1dEJTNAXe
|
||||
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
|
||||
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
|
||||
storj.io/common v0.0.0-20200402141523-7780ee0cca0d/go.mod h1:pZyXiIE7bGETIRXtfs0nICqMwp7PM8HqnDuyUeldNA0=
|
||||
storj.io/common v0.0.0-20200408133928-c307b675fdaa/go.mod h1:pZyXiIE7bGETIRXtfs0nICqMwp7PM8HqnDuyUeldNA0=
|
||||
storj.io/common v0.0.0-20200413160743-f212d3029dbf h1:3ltVLnAtkJ/nbHg1aStSRr99doJRqPLl7BmRV9XKWpg=
|
||||
storj.io/common v0.0.0-20200413160743-f212d3029dbf/go.mod h1:pZyXiIE7bGETIRXtfs0nICqMwp7PM8HqnDuyUeldNA0=
|
||||
storj.io/common v0.0.0-20200416175331-40469cc6b6d5 h1:ApFVw3uGVbUqPPkJbTVq8wcqJFmJRU24uhjdoedsj/M=
|
||||
storj.io/common v0.0.0-20200416175331-40469cc6b6d5/go.mod h1:pZyXiIE7bGETIRXtfs0nICqMwp7PM8HqnDuyUeldNA0=
|
||||
storj.io/common v0.0.0-20200419143955-e54806ee8c56 h1:SACDryIjBnW4sqkHbzEpAmB0PhJYv5/mmJCHsuStsKY=
|
||||
@ -639,3 +642,5 @@ storj.io/private v0.0.0-20200403212157-26f222c154f0 h1:7raQWpuP5poUd2vMYIRE4XcEC
|
||||
storj.io/private v0.0.0-20200403212157-26f222c154f0/go.mod h1:s0T6C5sBLRubb5uT+FbE4ZGaLBUs6jZ/qzGp/OvVpCI=
|
||||
storj.io/uplink v1.0.4-0.20200406100056-baa89e6fe434 h1:GfuoaAq9VLdC19+T8H//WPQjzOhyU1otpBme/O6R5JM=
|
||||
storj.io/uplink v1.0.4-0.20200406100056-baa89e6fe434/go.mod h1:bGqCqd9cuAw3WCXznwN6iwmJhhJE5omGx1C28XevHGQ=
|
||||
storj.io/uplink v1.0.4-0.20200415152100-8fbdcbad8065 h1:qmnad330OivpK72+qJfjuo+jW0PFo+0zvCtE/QtHqvo=
|
||||
storj.io/uplink v1.0.4-0.20200415152100-8fbdcbad8065/go.mod h1:+5KSvuwpOz4lBcrOa7YuXJ3lb5Z5/3+acENozs9/aIw=
|
||||
|
@ -4,9 +4,6 @@ set +x
|
||||
|
||||
SCRIPTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
|
||||
echo "Running test-sim"
|
||||
make -C "$SCRIPTDIR"/.. install-sim
|
||||
|
||||
# setup tmpdir for testfiles and cleanup
|
||||
TMP=$(mktemp -d -t tmp.XXXXXXXXXX)
|
||||
cleanup(){
|
||||
@ -14,6 +11,15 @@ cleanup(){
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
echo "Running test-sim"
|
||||
make -C "$SCRIPTDIR"/.. install-sim
|
||||
|
||||
echo "Overriding default max segment size to 6MiB"
|
||||
GOBIN=$TMP go install -v -ldflags "-X 'storj.io/uplink.maxSegmentSize=6MiB'" storj.io/storj/cmd/uplink
|
||||
|
||||
# use modifed version of uplink
|
||||
export PATH=$TMP:$PATH
|
||||
|
||||
export STORJ_NETWORK_DIR=$TMP
|
||||
|
||||
STORJ_NETWORK_HOST4=${STORJ_NETWORK_HOST4:-127.0.0.1}
|
||||
|
@ -37,8 +37,8 @@ compare_files () {
|
||||
random_bytes_file "2KiB" "$SRC_DIR/small-upload-testfile" # create 2KiB file of random bytes (inline)
|
||||
random_bytes_file "5MiB" "$SRC_DIR/big-upload-testfile" # create 5MiB file of random bytes (remote)
|
||||
# this is special case where we need to test at least one remote segment and inline segment of exact size 0
|
||||
random_bytes_file "64MiB" "$SRC_DIR/multisegment-upload-testfile" # create 64MiB file of random bytes (1 remote segments + inline)
|
||||
random_bytes_file "68MiB" "$SRC_DIR/diff-size-segments" # create 68MiB file of random bytes (2 remote segments)
|
||||
random_bytes_file "12MiB" "$SRC_DIR/multisegment-upload-testfile" # create 12MiB file of random bytes (1 remote segments + inline)
|
||||
random_bytes_file "13MiB" "$SRC_DIR/diff-size-segments" # create 13MiB file of random bytes (2 remote segments)
|
||||
|
||||
random_bytes_file "100KiB" "$SRC_DIR/put-file" # create 100KiB file of random bytes (remote)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user