add performance benchmarks (#2021)

* update uplink cmd args

* add uplod/download benchmark, add script to run benchmarks

* fix lint err

* changes per CR comments

* export env var for linux

* fix lint

* add byte count to bench test
This commit is contained in:
Jess G 2019-05-24 13:25:53 -07:00 committed by GitHub
parent 8f078d2841
commit 6f23aeef18
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 242 additions and 7 deletions

183
cmd/uplink/cmd/cp_test.go Normal file
View File

@ -0,0 +1,183 @@
// Copyright (C) 2019 Storj Labs, Inc.
// See LICENSE for copying information.
package cmd_test
import (
"encoding/hex"
"errors"
"log"
"math/rand"
"os"
"testing"
"storj.io/storj/internal/memory"
"storj.io/storj/internal/s3client"
)
const (
bucket = "testbucket"
)
var benchmarkCases = []struct {
name string
objectsize memory.Size
}{
{"100B", 100 * memory.B},
{"1MB", 1 * memory.MiB},
{"10MB", 10 * memory.MiB},
{"100MB", 100 * memory.MiB},
{"1G", 1 * memory.GiB},
}
var testObjects = createObjects()
// createObjects generates the objects (i.e. slice of bytes) that
// will be used as the objects to upload/download tests
func createObjects() map[string][]byte {
objects := make(map[string][]byte)
for _, bm := range benchmarkCases {
data := make([]byte, bm.objectsize)
_, err := rand.Read(data)
if err != nil {
log.Fatalf("failed to read random bytes: %+v\n", err)
}
objects[bm.name] = data
}
return objects
}
// uplinkSetup setups an uplink to use for testing uploads/downloads
func uplinkSetup() s3client.Client {
conf, err := setupConfig()
if err != nil {
log.Fatalf("failed to setup s3client config: %+v\n", err)
}
client, err := s3client.NewUplink(conf)
if err != nil {
log.Fatalf("failed to create s3client NewUplink: %+v\n", err)
}
err = client.MakeBucket(bucket, "")
if err != nil {
log.Fatalf("failed to create bucket with s3client %q: %+v\n", bucket, err)
}
return client
}
func setupConfig() (s3client.Config, error) {
const (
uplinkEncryptionKey = "supersecretkey"
defaultSatelliteAddr = "127.0.0.1:10000"
)
var conf s3client.Config
conf.EncryptionKey = uplinkEncryptionKey
conf.Satellite = getEnvOrDefault("SATELLITE_0_ADDR", defaultSatelliteAddr)
conf.APIKey = getEnvOrDefault(os.Getenv("GATEWAY_0_API_KEY"), os.Getenv("apiKey"))
if conf.APIKey == "" {
return conf, errors.New("no api key provided. Expecting an env var $GATEWAY_0_API_KEY or $apiKey")
}
return conf, nil
}
func getEnvOrDefault(key, fallback string) string {
if value, exists := os.LookupEnv(key); exists {
return value
}
return fallback
}
func BenchmarkUpload(b *testing.B) {
var client = uplinkSetup()
// uploadedObjects is used to store the names of all objects that are uploaded
// so that we can make sure to delete them all during cleanup
var uploadedObjects = map[string][]string{}
for _, bm := range benchmarkCases {
b.Run(bm.name, func(b *testing.B) {
b.SetBytes(bm.objectsize.Int64())
b.ResetTimer()
for i := 0; i < b.N; i++ {
// make some random bytes so the objectPath is unique
randomBytes := make([]byte, 16)
rand.Read(randomBytes)
uniquePathPart := hex.EncodeToString(randomBytes)
objectPath := "folder/data" + uniquePathPart + "_" + bm.name
err := client.Upload(bucket, objectPath, testObjects[bm.name])
if err != nil {
log.Fatalf("failed to upload object %q: %+v\n", objectPath, err)
}
if uploadedObjects[bm.name] == nil {
uploadedObjects[bm.name] = []string{}
}
uploadedObjects[bm.name] = append(uploadedObjects[bm.name], objectPath)
}
})
}
teardown(client, uploadedObjects)
}
func teardown(client s3client.Client, uploadedObjects map[string][]string) {
for _, bm := range benchmarkCases {
for _, objectPath := range uploadedObjects[bm.name] {
err := client.Delete(bucket, objectPath)
if err != nil {
log.Printf("failed to delete object %q: %+v\n", objectPath, err)
}
}
}
err := client.RemoveBucket(bucket)
if err != nil {
log.Fatalf("failed to remove bucket %q: %+v\n", bucket, err)
}
}
func BenchmarkDownload(b *testing.B) {
var client = uplinkSetup()
// upload some test objects so that there is something to download
uploadTestObjects(client)
for _, bm := range benchmarkCases {
b.Run(bm.name, func(b *testing.B) {
buf := make([]byte, bm.objectsize)
b.SetBytes(bm.objectsize.Int64())
b.ResetTimer()
for i := 0; i < b.N; i++ {
objectName := "folder/data_" + bm.name
_, err := client.Download(bucket, objectName, buf)
if err != nil {
log.Fatalf("failed to download object %q: %+v\n", objectName, err)
}
}
})
}
teardownTestObjects(client)
}
func uploadTestObjects(client s3client.Client) {
for name, data := range testObjects {
objectName := "folder/data_" + name
err := client.Upload(bucket, objectName, data)
if err != nil {
log.Fatalf("failed to upload object %q: %+v\n", objectName, err)
}
}
}
func teardownTestObjects(client s3client.Client) {
for name := range testObjects {
objectName := "folder/data_" + name
err := client.Delete(bucket, objectName)
if err != nil {
log.Fatalf("failed to delete object %q: %+v\n", objectName, err)
}
}
err := client.RemoveBucket(bucket)
if err != nil {
log.Fatalf("failed to remove bucket %q: %+v\n", bucket, err)
}
}

View File

@ -5,11 +5,15 @@ package s3client
import (
"bytes"
"fmt"
"os"
"os/exec"
"path/filepath"
"strings"
"github.com/zeebo/errs"
"storj.io/storj/internal/fpath"
)
// UplinkError is class for minio errors
@ -24,16 +28,38 @@ type Uplink struct {
func NewUplink(conf Config) (Client, error) {
client := &Uplink{conf}
defaultConfDir := fpath.ApplicationDir("storj", "uplink")
setupDir, err := filepath.Abs(defaultConfDir)
if err != nil {
return nil, UplinkError.Wrap(fullExitError(err))
}
validForSetup, _ := fpath.IsValidSetupDir(setupDir)
// uplink configuration doesn't exists
if validForSetup {
fmt.Printf(`No existing uplink configuration located at (%v)...
Creating uplink configuration with the following settings:
"--non-interactive: true",
"--api-key: %s",
"--enc.key: %s",
"--satellite-addr: %s
`,
setupDir, client.conf.APIKey, client.conf.EncryptionKey, client.conf.Satellite,
)
cmd := client.cmd("setup",
"--overwrite",
"--non-interactive", "true",
"--api-key", client.conf.APIKey,
"--enc-key", client.conf.EncryptionKey,
"--enc.key", client.conf.EncryptionKey,
"--satellite-addr", client.conf.Satellite)
_, err := cmd.Output()
if err != nil {
return nil, UplinkError.Wrap(fullExitError(err))
}
} else {
// if uplink config file already exists, use the current config
fmt.Printf("Using existing uplink configuration from (%v). To pass in new settings, delete existing configs first\n", setupDir)
}
return client, nil
}

26
scripts/test-sim-benchmark.sh Executable file
View File

@ -0,0 +1,26 @@
#!/bin/bash
set -ueo pipefail
# Purpose: This script executes uplink upload and download benchmark tests against storj-sim.
# Setup: Remove any existing uplink configs.
# Usage: from root of storj repo, run
# $ storj-sim network test bash ./scripts/test-sim-benchmark.sh
# To run and filter out storj-sim logs, run:
# $ storj-sim -x network test bash ./scripts/test-sim-benchmark.sh | grep -i "test.out"
SATELLITE_0_ADDR=${SATELLITE_0_ADDR:-127.0.0.1}
apiKey=$(storj-sim network env GATEWAY_0_API_KEY)
export apiKey=$(storj-sim network env GATEWAY_0_API_KEY)
echo "apiKey:"
echo "$apiKey"
# run benchmark tests normally
echo
echo "Executing benchmark tests locally"
go test -bench . -benchmem ./cmd/uplink/cmd/
# run s3-benchmark with uplink
echo
echo "Executing s3-benchmark tests with uplink client..."
s3-benchmark --client=uplink --satellite="$SATELLITE_0_ADDR" --apikey="$apiKey"