storj/storage/postgreskv/client_test.go

165 lines
3.8 KiB
Go
Raw Normal View History

2019-01-24 20:15:10 +00:00
// Copyright (C) 2019 Storj Labs, Inc.
// See LICENSE for copying information.
package postgreskv
import (
"context"
"strings"
"testing"
"github.com/jackc/pgx/v4"
"github.com/jackc/pgx/v4/stdlib"
"github.com/stretchr/testify/require"
"github.com/zeebo/errs"
"storj.io/common/testcontext"
"storj.io/storj/private/dbutil/pgtest"
"storj.io/storj/private/tagsql"
"storj.io/storj/storage"
"storj.io/storj/storage/testsuite"
)
func newTestPostgres(t testing.TB) (store *Client, cleanup func()) {
connstr := pgtest.PickPostgres(t)
pgdb, err := New(connstr)
if err != nil {
t.Fatalf("init: %v", err)
}
return pgdb, func() {
if err := pgdb.Close(); err != nil {
t.Fatalf("failed to close db: %v", err)
}
}
}
func TestSuite(t *testing.T) {
store, cleanup := newTestPostgres(t)
defer cleanup()
ctx := testcontext.New(t)
defer ctx.Cleanup()
err := store.MigrateToLatest(ctx)
require.NoError(t, err)
Public Jenkins (#1779) * initial test * add parenthesis * remove pipeline * add few todos * use docker image for environment * use pipeline * fix * add missing steps * invoke with bash * disable protoc * try using golang image * try as root * Disable install-awscli.sh temporarily * Debugging * debugging part 2 * Set absolute path for debugging * Remove absolute path * Dont run as root * Install unzip * Dont forget to apt-get update * Put into folder that is in PATH * disable IPv6 Test * add verbose info and check protobuf * make integration non-parallel * remove -v and make checkout part of build * make a single block for linting * fix echo * update * try using things directly * try add xunit output * fix name * don't print empty lines * skip testsuites without any tests * remove coverage, because it's not showing the right thing * try using dockerfile * fix deb source * fix typos * setup postgres * use the right flag * try using postgresdb * expose different port * remove port mapping * start postgres * export * use env block * try using different host for integration tests * eat standard ports * try building images and binaries * remove if statement * add steps * do before verification * add go get goversioninfo * make separate jenkinsfile * add check * don't add empty packages * disable logging to reduce output size * add timeout * add comment about mfridman * Revert Absolute Path * Add aws to PATH * PATH Changes * Docker Env Fixes * PATH Simplification * Debugging the PATH * Debug Logs * Debugging * Update PATH Handling * Rename * revert changes to Jenkinsfile
2019-04-22 14:45:53 +01:00
// zap := zaptest.NewLogger(t)
// loggedStore := storelogger.New(zap, store)
store.SetLookupLimit(500)
Public Jenkins (#1779) * initial test * add parenthesis * remove pipeline * add few todos * use docker image for environment * use pipeline * fix * add missing steps * invoke with bash * disable protoc * try using golang image * try as root * Disable install-awscli.sh temporarily * Debugging * debugging part 2 * Set absolute path for debugging * Remove absolute path * Dont run as root * Install unzip * Dont forget to apt-get update * Put into folder that is in PATH * disable IPv6 Test * add verbose info and check protobuf * make integration non-parallel * remove -v and make checkout part of build * make a single block for linting * fix echo * update * try using things directly * try add xunit output * fix name * don't print empty lines * skip testsuites without any tests * remove coverage, because it's not showing the right thing * try using dockerfile * fix deb source * fix typos * setup postgres * use the right flag * try using postgresdb * expose different port * remove port mapping * start postgres * export * use env block * try using different host for integration tests * eat standard ports * try building images and binaries * remove if statement * add steps * do before verification * add go get goversioninfo * make separate jenkinsfile * add check * don't add empty packages * disable logging to reduce output size * add timeout * add comment about mfridman * Revert Absolute Path * Add aws to PATH * PATH Changes * Docker Env Fixes * PATH Simplification * Debugging the PATH * Debug Logs * Debugging * Update PATH Handling * Rename * revert changes to Jenkinsfile
2019-04-22 14:45:53 +01:00
testsuite.RunTests(t, store)
}
func TestThatMigrationActuallyHappened(t *testing.T) {
2020-01-16 23:48:59 +00:00
t.Skip()
ctx := testcontext.New(t)
defer ctx.Cleanup()
store, cleanup := newTestPostgres(t)
defer cleanup()
rows, err := store.db.Query(ctx, `
SELECT prosrc
FROM pg_catalog.pg_proc p,
pg_catalog.pg_namespace n
WHERE p.pronamespace = n.oid
AND p.proname = 'list_directory'
AND n.nspname = ANY(current_schemas(true))
AND p.pronargs = 4
`)
if err != nil {
t.Fatalf("failed to get list_directory source: %v", err)
}
defer func() {
if err := rows.Close(); err != nil {
t.Fatalf("failed to close rows: %v", err)
}
}()
numFound := 0
for rows.Next() {
numFound++
if numFound > 1 {
t.Fatal("there are multiple eligible list_directory() functions??")
}
var source string
if err := rows.Scan(&source); err != nil {
t.Fatalf("failed to read list_directory source: %v", err)
}
if strings.Contains(source, "distinct_prefix (truncatedpath)") {
t.Fatal("list_directory() function in pg appears to be the oldnbusted one")
}
}
}
func BenchmarkSuite(b *testing.B) {
b.Skip("broken")
store, cleanup := newTestPostgres(b)
defer cleanup()
testsuite.RunBenchmarks(b, store)
}
type bulkImportCopyFromSource struct {
ctx context.Context
iter storage.Iterator
item storage.ListItem
}
func (bs *bulkImportCopyFromSource) Next() bool {
return bs.iter.Next(bs.ctx, &bs.item)
}
func (bs *bulkImportCopyFromSource) Values() ([]interface{}, error) {
return []interface{}{bs.item.Key, bs.item.Value}, nil
}
func (bs *bulkImportCopyFromSource) Err() error {
// we can't determine this from storage.Iterator, I guess
return nil
}
func bulkImport(ctx context.Context, db tagsql.DB, iter storage.Iterator) (err error) {
defer mon.Task()(&ctx)(&err)
pgxConn, err := stdlib.AcquireConn(db.Internal())
if err != nil {
return err
}
defer func() {
err = errs.Combine(err, stdlib.ReleaseConn(db.Internal(), pgxConn))
}()
importSource := &bulkImportCopyFromSource{iter: iter}
_, err = pgxConn.CopyFrom(ctx, pgx.Identifier{"pathdata"}, []string{"fullpath", "metadata"}, importSource)
return err
}
func bulkDeleteAll(ctx context.Context, db tagsql.DB) error {
_, err := db.Exec(ctx, "TRUNCATE pathdata")
if err != nil {
return errs.New("Failed to TRUNCATE pathdata table: %v", err)
}
return nil
}
type pgLongBenchmarkStore struct {
*Client
}
func (store *pgLongBenchmarkStore) BulkImport(ctx context.Context, iter storage.Iterator) error {
2020-01-16 23:48:59 +00:00
return bulkImport(ctx, store.db, iter)
}
func (store *pgLongBenchmarkStore) BulkDeleteAll(ctx context.Context) error {
return bulkDeleteAll(ctx, store.db)
}
func BenchmarkSuiteLong(b *testing.B) {
store, cleanup := newTestPostgres(b)
defer cleanup()
testsuite.BenchmarkPathOperationsInLargeDb(b, &pgLongBenchmarkStore{store})
}