server side macaroons (#1945)
What: Adds macaroon support to the server side Why: So that api keys are now macaroons
This commit is contained in:
parent
b15b6edc58
commit
1bd52b9f90
@ -19,6 +19,7 @@ import (
|
|||||||
"storj.io/storj/pkg/auth/signing"
|
"storj.io/storj/pkg/auth/signing"
|
||||||
"storj.io/storj/pkg/cfgstruct"
|
"storj.io/storj/pkg/cfgstruct"
|
||||||
"storj.io/storj/pkg/identity"
|
"storj.io/storj/pkg/identity"
|
||||||
|
"storj.io/storj/pkg/macaroon"
|
||||||
"storj.io/storj/pkg/pb"
|
"storj.io/storj/pkg/pb"
|
||||||
"storj.io/storj/pkg/peertls/tlsopts"
|
"storj.io/storj/pkg/peertls/tlsopts"
|
||||||
"storj.io/storj/pkg/storage/streams"
|
"storj.io/storj/pkg/storage/streams"
|
||||||
@ -82,7 +83,10 @@ func (planet *Planet) newUplink(name string, storageNodeCount int) (*Uplink, err
|
|||||||
consoleDB := satellite.DB.Console()
|
consoleDB := satellite.DB.Console()
|
||||||
|
|
||||||
projectName := fmt.Sprintf("%s_%d", name, j)
|
projectName := fmt.Sprintf("%s_%d", name, j)
|
||||||
key := console.APIKeyFromBytes([]byte(projectName))
|
key, err := macaroon.NewAPIKey([]byte("testSecret"))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
project, err := consoleDB.Projects().Insert(
|
project, err := consoleDB.Projects().Insert(
|
||||||
context.Background(),
|
context.Background(),
|
||||||
@ -96,17 +100,18 @@ func (planet *Planet) newUplink(name string, storageNodeCount int) (*Uplink, err
|
|||||||
|
|
||||||
_, err = consoleDB.APIKeys().Create(
|
_, err = consoleDB.APIKeys().Create(
|
||||||
context.Background(),
|
context.Background(),
|
||||||
*key,
|
key.Head(),
|
||||||
console.APIKeyInfo{
|
console.APIKeyInfo{
|
||||||
Name: "root",
|
Name: "root",
|
||||||
ProjectID: project.ID,
|
ProjectID: project.ID,
|
||||||
|
Secret: []byte("testSecret"),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
apiKeys[satellite.ID()] = key.String()
|
apiKeys[satellite.ID()] = key.Serialize()
|
||||||
}
|
}
|
||||||
|
|
||||||
uplink.APIKey = apiKeys
|
uplink.APIKey = apiKeys
|
||||||
|
@ -18,6 +18,7 @@ import (
|
|||||||
"storj.io/storj/internal/testcontext"
|
"storj.io/storj/internal/testcontext"
|
||||||
"storj.io/storj/internal/testplanet"
|
"storj.io/storj/internal/testplanet"
|
||||||
"storj.io/storj/pkg/eestream"
|
"storj.io/storj/pkg/eestream"
|
||||||
|
"storj.io/storj/pkg/macaroon"
|
||||||
"storj.io/storj/pkg/metainfo/kvmetainfo"
|
"storj.io/storj/pkg/metainfo/kvmetainfo"
|
||||||
"storj.io/storj/pkg/storage/buckets"
|
"storj.io/storj/pkg/storage/buckets"
|
||||||
ecclient "storj.io/storj/pkg/storage/ec"
|
ecclient "storj.io/storj/pkg/storage/ec"
|
||||||
@ -31,8 +32,6 @@ const (
|
|||||||
TestBucket = "test-bucket"
|
TestBucket = "test-bucket"
|
||||||
)
|
)
|
||||||
|
|
||||||
var TestAPIKey = "test-api-key"
|
|
||||||
|
|
||||||
func TestBucketsBasic(t *testing.T) {
|
func TestBucketsBasic(t *testing.T) {
|
||||||
runTest(t, func(ctx context.Context, planet *testplanet.Planet, db *kvmetainfo.DB, buckets buckets.Store, streams streams.Store) {
|
runTest(t, func(ctx context.Context, planet *testplanet.Planet, db *kvmetainfo.DB, buckets buckets.Store, streams streams.Store) {
|
||||||
// Create new bucket
|
// Create new bucket
|
||||||
@ -326,24 +325,28 @@ func newMetainfoParts(planet *testplanet.Planet) (*kvmetainfo.DB, buckets.Store,
|
|||||||
project, err := planet.Satellites[0].DB.Console().Projects().Insert(context.Background(), &console.Project{
|
project, err := planet.Satellites[0].DB.Console().Projects().Insert(context.Background(), &console.Project{
|
||||||
Name: "testProject",
|
Name: "testProject",
|
||||||
})
|
})
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, err
|
return nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
apiKey := console.APIKey{}
|
apiKey, err := macaroon.NewAPIKey([]byte("testSecret"))
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
apiKeyInfo := console.APIKeyInfo{
|
apiKeyInfo := console.APIKeyInfo{
|
||||||
ProjectID: project.ID,
|
ProjectID: project.ID,
|
||||||
Name: "testKey",
|
Name: "testKey",
|
||||||
|
Secret: []byte("testSecret"),
|
||||||
}
|
}
|
||||||
|
|
||||||
// add api key to db
|
// add api key to db
|
||||||
_, err = planet.Satellites[0].DB.Console().APIKeys().Create(context.Background(), apiKey, apiKeyInfo)
|
_, err = planet.Satellites[0].DB.Console().APIKeys().Create(context.Background(), apiKey.Head(), apiKeyInfo)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, err
|
return nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
metainfo, err := planet.Uplinks[0].DialMetainfo(context.Background(), planet.Satellites[0], apiKey.String())
|
metainfo, err := planet.Uplinks[0].DialMetainfo(context.Background(), planet.Satellites[0], apiKey.Serialize())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, err
|
return nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
@ -23,6 +23,7 @@ import (
|
|||||||
"storj.io/storj/internal/testplanet"
|
"storj.io/storj/internal/testplanet"
|
||||||
libuplink "storj.io/storj/lib/uplink"
|
libuplink "storj.io/storj/lib/uplink"
|
||||||
"storj.io/storj/pkg/eestream"
|
"storj.io/storj/pkg/eestream"
|
||||||
|
"storj.io/storj/pkg/macaroon"
|
||||||
"storj.io/storj/pkg/metainfo/kvmetainfo"
|
"storj.io/storj/pkg/metainfo/kvmetainfo"
|
||||||
"storj.io/storj/pkg/pb"
|
"storj.io/storj/pkg/pb"
|
||||||
"storj.io/storj/pkg/storage/buckets"
|
"storj.io/storj/pkg/storage/buckets"
|
||||||
@ -662,24 +663,28 @@ func initEnv(ctx context.Context, planet *testplanet.Planet) (minio.ObjectLayer,
|
|||||||
project, err := planet.Satellites[0].DB.Console().Projects().Insert(ctx, &console.Project{
|
project, err := planet.Satellites[0].DB.Console().Projects().Insert(ctx, &console.Project{
|
||||||
Name: "testProject",
|
Name: "testProject",
|
||||||
})
|
})
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, err
|
return nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
apiKey := console.APIKey{}
|
apiKey, err := macaroon.NewAPIKey([]byte("testSecret"))
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
apiKeyInfo := console.APIKeyInfo{
|
apiKeyInfo := console.APIKeyInfo{
|
||||||
ProjectID: project.ID,
|
ProjectID: project.ID,
|
||||||
Name: "testKey",
|
Name: "testKey",
|
||||||
|
Secret: []byte("testSecret"),
|
||||||
}
|
}
|
||||||
|
|
||||||
// add api key to db
|
// add api key to db
|
||||||
_, err = planet.Satellites[0].DB.Console().APIKeys().Create(ctx, apiKey, apiKeyInfo)
|
_, err = planet.Satellites[0].DB.Console().APIKeys().Create(ctx, apiKey.Head(), apiKeyInfo)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, err
|
return nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
metainfo, err := planet.Uplinks[0].DialMetainfo(ctx, planet.Satellites[0], apiKey.String())
|
metainfo, err := planet.Uplinks[0].DialMetainfo(ctx, planet.Satellites[0], apiKey.Serialize())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, err
|
return nil, nil, nil, err
|
||||||
}
|
}
|
||||||
@ -722,7 +727,7 @@ func initEnv(ctx context.Context, planet *testplanet.Planet) (minio.ObjectLayer,
|
|||||||
return nil, nil, nil, err
|
return nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
parsedAPIKey, err := libuplink.ParseAPIKey(apiKey.String())
|
parsedAPIKey, err := libuplink.ParseAPIKey(apiKey.Serialize())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, err
|
return nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
@ -28,6 +28,7 @@ import (
|
|||||||
libuplink "storj.io/storj/lib/uplink"
|
libuplink "storj.io/storj/lib/uplink"
|
||||||
"storj.io/storj/pkg/cfgstruct"
|
"storj.io/storj/pkg/cfgstruct"
|
||||||
"storj.io/storj/pkg/identity"
|
"storj.io/storj/pkg/identity"
|
||||||
|
"storj.io/storj/pkg/macaroon"
|
||||||
"storj.io/storj/pkg/miniogw"
|
"storj.io/storj/pkg/miniogw"
|
||||||
"storj.io/storj/pkg/storj"
|
"storj.io/storj/pkg/storj"
|
||||||
"storj.io/storj/satellite/console"
|
"storj.io/storj/satellite/console"
|
||||||
@ -54,17 +55,19 @@ func TestUploadDownload(t *testing.T) {
|
|||||||
project, err := planet.Satellites[0].DB.Console().Projects().Insert(context.Background(), &console.Project{
|
project, err := planet.Satellites[0].DB.Console().Projects().Insert(context.Background(), &console.Project{
|
||||||
Name: "testProject",
|
Name: "testProject",
|
||||||
})
|
})
|
||||||
|
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
apiKey := console.APIKey{}
|
apiKey, err := macaroon.NewAPIKey([]byte("testSecret"))
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
apiKeyInfo := console.APIKeyInfo{
|
apiKeyInfo := console.APIKeyInfo{
|
||||||
ProjectID: project.ID,
|
ProjectID: project.ID,
|
||||||
Name: "testKey",
|
Name: "testKey",
|
||||||
|
Secret: []byte("testSecret"),
|
||||||
}
|
}
|
||||||
|
|
||||||
// add api key to db
|
// add api key to db
|
||||||
_, err = planet.Satellites[0].DB.Console().APIKeys().Create(context.Background(), apiKey, apiKeyInfo)
|
_, err = planet.Satellites[0].DB.Console().APIKeys().Create(context.Background(), apiKey.Head(), apiKeyInfo)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
// bind default values to config
|
// bind default values to config
|
||||||
|
@ -22,6 +22,7 @@ import (
|
|||||||
"storj.io/storj/internal/testcontext"
|
"storj.io/storj/internal/testcontext"
|
||||||
"storj.io/storj/internal/testplanet"
|
"storj.io/storj/internal/testplanet"
|
||||||
"storj.io/storj/pkg/eestream"
|
"storj.io/storj/pkg/eestream"
|
||||||
|
"storj.io/storj/pkg/macaroon"
|
||||||
"storj.io/storj/pkg/pb"
|
"storj.io/storj/pkg/pb"
|
||||||
ecclient "storj.io/storj/pkg/storage/ec"
|
ecclient "storj.io/storj/pkg/storage/ec"
|
||||||
"storj.io/storj/pkg/storage/meta"
|
"storj.io/storj/pkg/storage/meta"
|
||||||
@ -246,17 +247,20 @@ func runTest(t *testing.T, test func(t *testing.T, ctx *testcontext.Context, pla
|
|||||||
})
|
})
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
apiKey := console.APIKey{}
|
apiKey, err := macaroon.NewAPIKey([]byte("testSecret"))
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
apiKeyInfo := console.APIKeyInfo{
|
apiKeyInfo := console.APIKeyInfo{
|
||||||
ProjectID: project.ID,
|
ProjectID: project.ID,
|
||||||
Name: "testKey",
|
Name: "testKey",
|
||||||
|
Secret: []byte("testSecret"),
|
||||||
}
|
}
|
||||||
|
|
||||||
// add api key to db
|
// add api key to db
|
||||||
_, err = planet.Satellites[0].DB.Console().APIKeys().Create(context.Background(), apiKey, apiKeyInfo)
|
_, err = planet.Satellites[0].DB.Console().APIKeys().Create(context.Background(), apiKey.Head(), apiKeyInfo)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
TestAPIKey := apiKey.String()
|
TestAPIKey := apiKey.Serialize()
|
||||||
|
|
||||||
metainfo, err := planet.Uplinks[0].DialMetainfo(context.Background(), planet.Satellites[0], TestAPIKey)
|
metainfo, err := planet.Uplinks[0].DialMetainfo(context.Background(), planet.Satellites[0], TestAPIKey)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
@ -5,13 +5,9 @@ package console
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"crypto/rand"
|
|
||||||
"encoding/base32"
|
|
||||||
"io"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/skyrings/skyring-common/tools/uuid"
|
"github.com/skyrings/skyring-common/tools/uuid"
|
||||||
"github.com/zeebo/errs"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// APIKeys is interface for working with api keys store
|
// APIKeys is interface for working with api keys store
|
||||||
@ -20,10 +16,10 @@ type APIKeys interface {
|
|||||||
GetByProjectID(ctx context.Context, projectID uuid.UUID) ([]APIKeyInfo, error)
|
GetByProjectID(ctx context.Context, projectID uuid.UUID) ([]APIKeyInfo, error)
|
||||||
// Get retrieves APIKeyInfo with given ID
|
// Get retrieves APIKeyInfo with given ID
|
||||||
Get(ctx context.Context, id uuid.UUID) (*APIKeyInfo, error)
|
Get(ctx context.Context, id uuid.UUID) (*APIKeyInfo, error)
|
||||||
//GetByKey retrieves APIKeyInfo for given key
|
// GetByHead retrieves APIKeyInfo for given key head
|
||||||
GetByKey(ctx context.Context, key APIKey) (*APIKeyInfo, error)
|
GetByHead(ctx context.Context, head []byte) (*APIKeyInfo, error)
|
||||||
// Create creates and stores new APIKeyInfo
|
// Create creates and stores new APIKeyInfo
|
||||||
Create(ctx context.Context, key APIKey, info APIKeyInfo) (*APIKeyInfo, error)
|
Create(ctx context.Context, head []byte, info APIKeyInfo) (*APIKeyInfo, error)
|
||||||
// Update updates APIKeyInfo in store
|
// Update updates APIKeyInfo in store
|
||||||
Update(ctx context.Context, key APIKeyInfo) error
|
Update(ctx context.Context, key APIKeyInfo) error
|
||||||
// Delete deletes APIKeyInfo from store
|
// Delete deletes APIKeyInfo from store
|
||||||
@ -32,50 +28,9 @@ type APIKeys interface {
|
|||||||
|
|
||||||
// APIKeyInfo describing api key model in the database
|
// APIKeyInfo describing api key model in the database
|
||||||
type APIKeyInfo struct {
|
type APIKeyInfo struct {
|
||||||
ID uuid.UUID `json:"id"`
|
ID uuid.UUID `json:"id"`
|
||||||
|
|
||||||
// Fk on project
|
|
||||||
ProjectID uuid.UUID `json:"projectId"`
|
ProjectID uuid.UUID `json:"projectId"`
|
||||||
|
Name string `json:"name"`
|
||||||
Name string `json:"name"`
|
Secret []byte `json:"-"`
|
||||||
|
|
||||||
CreatedAt time.Time `json:"createdAt"`
|
CreatedAt time.Time `json:"createdAt"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// APIKey is an api key type
|
|
||||||
type APIKey [24]byte
|
|
||||||
|
|
||||||
// String implements Stringer
|
|
||||||
func (key APIKey) String() string {
|
|
||||||
return base32.HexEncoding.EncodeToString(key[:])
|
|
||||||
}
|
|
||||||
|
|
||||||
// APIKeyFromBytes creates new key from byte slice
|
|
||||||
func APIKeyFromBytes(b []byte) *APIKey {
|
|
||||||
key := new(APIKey)
|
|
||||||
copy(key[:], b)
|
|
||||||
return key
|
|
||||||
}
|
|
||||||
|
|
||||||
// APIKeyFromBase32 creates new key from base32 string
|
|
||||||
func APIKeyFromBase32(s string) (*APIKey, error) {
|
|
||||||
b, err := base32.HexEncoding.DecodeString(s)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
key := new(APIKey)
|
|
||||||
copy(key[:], b)
|
|
||||||
return key, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// CreateAPIKey creates new api key
|
|
||||||
func CreateAPIKey() (*APIKey, error) {
|
|
||||||
key := new(APIKey)
|
|
||||||
|
|
||||||
n, err := io.ReadFull(rand.Reader, key[:])
|
|
||||||
if err != nil || n != 24 {
|
|
||||||
return nil, errs.New(internalErrMsg)
|
|
||||||
}
|
|
||||||
|
|
||||||
return key, nil
|
|
||||||
}
|
|
||||||
|
@ -10,6 +10,7 @@ import (
|
|||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
|
||||||
"storj.io/storj/internal/testcontext"
|
"storj.io/storj/internal/testcontext"
|
||||||
|
"storj.io/storj/pkg/macaroon"
|
||||||
"storj.io/storj/satellite"
|
"storj.io/storj/satellite"
|
||||||
"storj.io/storj/satellite/console"
|
"storj.io/storj/satellite/console"
|
||||||
"storj.io/storj/satellite/satellitedb/satellitedbtest"
|
"storj.io/storj/satellite/satellitedb/satellitedbtest"
|
||||||
@ -32,15 +33,16 @@ func TestApiKeysRepository(t *testing.T) {
|
|||||||
|
|
||||||
t.Run("Creation success", func(t *testing.T) {
|
t.Run("Creation success", func(t *testing.T) {
|
||||||
for i := 0; i < 10; i++ {
|
for i := 0; i < 10; i++ {
|
||||||
key, err := console.CreateAPIKey()
|
key, err := macaroon.NewAPIKey([]byte("testSecret"))
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
keyInfo := console.APIKeyInfo{
|
keyInfo := console.APIKeyInfo{
|
||||||
Name: fmt.Sprintf("key %d", i),
|
Name: fmt.Sprintf("key %d", i),
|
||||||
ProjectID: project.ID,
|
ProjectID: project.ID,
|
||||||
|
Secret: []byte("testSecret"),
|
||||||
}
|
}
|
||||||
|
|
||||||
createdKey, err := apikeys.Create(ctx, *key, keyInfo)
|
createdKey, err := apikeys.Create(ctx, key.Head(), keyInfo)
|
||||||
assert.NotNil(t, createdKey)
|
assert.NotNil(t, createdKey)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
@ -55,8 +55,8 @@ func graphqlCreateAPIKey(types *TypeCreator) *graphql.Object {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// createAPIKey holds satellite.APIKey and satellite.APIKeyInfo
|
// createAPIKey holds macaroon.APIKey and console.APIKeyInfo
|
||||||
type createAPIKey struct {
|
type createAPIKey struct {
|
||||||
Key *console.APIKey
|
Key string
|
||||||
KeyInfo *console.APIKeyInfo
|
KeyInfo *console.APIKeyInfo
|
||||||
}
|
}
|
||||||
|
@ -379,7 +379,7 @@ func rootMutation(log *zap.Logger, service *console.Service, mailService *mailse
|
|||||||
}
|
}
|
||||||
|
|
||||||
return createAPIKey{
|
return createAPIKey{
|
||||||
Key: key,
|
Key: key.Serialize(),
|
||||||
KeyInfo: info,
|
KeyInfo: info,
|
||||||
}, nil
|
}, nil
|
||||||
},
|
},
|
||||||
|
@ -15,6 +15,7 @@ import (
|
|||||||
"gopkg.in/spacemonkeygo/monkit.v2"
|
"gopkg.in/spacemonkeygo/monkit.v2"
|
||||||
|
|
||||||
"storj.io/storj/pkg/auth"
|
"storj.io/storj/pkg/auth"
|
||||||
|
"storj.io/storj/pkg/macaroon"
|
||||||
"storj.io/storj/satellite/console/consoleauth"
|
"storj.io/storj/satellite/console/consoleauth"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -44,7 +45,7 @@ const (
|
|||||||
credentialsErrMsg = "Your email or password was incorrect, please try again"
|
credentialsErrMsg = "Your email or password was incorrect, please try again"
|
||||||
oldPassIncorrectErrMsg = "Old password is incorrect, please try again"
|
oldPassIncorrectErrMsg = "Old password is incorrect, please try again"
|
||||||
passwordIncorrectErrMsg = "Your password needs at least %d characters long"
|
passwordIncorrectErrMsg = "Your password needs at least %d characters long"
|
||||||
teamMemberDoesNotExistErrMsg = `There is no account on this Satellite for the user(s) you have entered.
|
teamMemberDoesNotExistErrMsg = `There is no account on this Satellite for the user(s) you have entered.
|
||||||
Please add team members with active accounts`
|
Please add team members with active accounts`
|
||||||
|
|
||||||
// TODO: remove after vanguard release
|
// TODO: remove after vanguard release
|
||||||
@ -657,7 +658,7 @@ func (s *Service) GetProjectMembers(ctx context.Context, projectID uuid.UUID, pa
|
|||||||
}
|
}
|
||||||
|
|
||||||
// CreateAPIKey creates new api key
|
// CreateAPIKey creates new api key
|
||||||
func (s *Service) CreateAPIKey(ctx context.Context, projectID uuid.UUID, name string) (*APIKeyInfo, *APIKey, error) {
|
func (s *Service) CreateAPIKey(ctx context.Context, projectID uuid.UUID, name string) (*APIKeyInfo, *macaroon.APIKey, error) {
|
||||||
var err error
|
var err error
|
||||||
defer mon.Task()(&ctx)(&err)
|
defer mon.Task()(&ctx)(&err)
|
||||||
|
|
||||||
@ -671,14 +672,20 @@ func (s *Service) CreateAPIKey(ctx context.Context, projectID uuid.UUID, name st
|
|||||||
return nil, nil, ErrUnauthorized.Wrap(err)
|
return nil, nil, ErrUnauthorized.Wrap(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
key, err := CreateAPIKey()
|
secret, err := macaroon.NewSecret()
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, errs.New(internalErrMsg)
|
||||||
|
}
|
||||||
|
|
||||||
|
key, err := macaroon.NewAPIKey(secret)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
info, err := s.store.APIKeys().Create(ctx, *key, APIKeyInfo{
|
info, err := s.store.APIKeys().Create(ctx, key.Head(), APIKeyInfo{
|
||||||
Name: name,
|
Name: name,
|
||||||
ProjectID: projectID,
|
ProjectID: projectID,
|
||||||
|
Secret: secret,
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, errs.New(internalErrMsg)
|
return nil, nil, errs.New(internalErrMsg)
|
||||||
|
@ -23,6 +23,7 @@ import (
|
|||||||
"storj.io/storj/pkg/auth"
|
"storj.io/storj/pkg/auth"
|
||||||
"storj.io/storj/pkg/eestream"
|
"storj.io/storj/pkg/eestream"
|
||||||
"storj.io/storj/pkg/identity"
|
"storj.io/storj/pkg/identity"
|
||||||
|
"storj.io/storj/pkg/macaroon"
|
||||||
"storj.io/storj/pkg/overlay"
|
"storj.io/storj/pkg/overlay"
|
||||||
"storj.io/storj/pkg/pb"
|
"storj.io/storj/pkg/pb"
|
||||||
"storj.io/storj/pkg/storj"
|
"storj.io/storj/pkg/storj"
|
||||||
@ -39,7 +40,12 @@ var (
|
|||||||
|
|
||||||
// APIKeys is api keys store methods used by endpoint
|
// APIKeys is api keys store methods used by endpoint
|
||||||
type APIKeys interface {
|
type APIKeys interface {
|
||||||
GetByKey(ctx context.Context, key console.APIKey) (*console.APIKeyInfo, error)
|
GetByHead(ctx context.Context, head []byte) (*console.APIKeyInfo, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Revocations is the revocations store methods used by the endpoint
|
||||||
|
type Revocations interface {
|
||||||
|
GetByProjectID(ctx context.Context, projectID uuid.UUID) ([][]byte, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Endpoint metainfo endpoint
|
// Endpoint metainfo endpoint
|
||||||
@ -56,7 +62,11 @@ type Endpoint struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// NewEndpoint creates new metainfo endpoint instance
|
// NewEndpoint creates new metainfo endpoint instance
|
||||||
func NewEndpoint(log *zap.Logger, metainfo *Service, orders *orders.Service, cache *overlay.Cache, apiKeys APIKeys, sdb accounting.StoragenodeAccounting, pdb accounting.ProjectAccounting, liveAccounting live.Service, maxAlphaUsage memory.Size) *Endpoint {
|
func NewEndpoint(log *zap.Logger, metainfo *Service, orders *orders.Service, cache *overlay.Cache,
|
||||||
|
apiKeys APIKeys, sdb accounting.StoragenodeAccounting,
|
||||||
|
pdb accounting.ProjectAccounting, liveAccounting live.Service,
|
||||||
|
maxAlphaUsage memory.Size) *Endpoint {
|
||||||
|
|
||||||
// TODO do something with too many params
|
// TODO do something with too many params
|
||||||
return &Endpoint{
|
return &Endpoint{
|
||||||
log: log,
|
log: log,
|
||||||
@ -74,22 +84,29 @@ func NewEndpoint(log *zap.Logger, metainfo *Service, orders *orders.Service, cac
|
|||||||
// Close closes resources
|
// Close closes resources
|
||||||
func (endpoint *Endpoint) Close() error { return nil }
|
func (endpoint *Endpoint) Close() error { return nil }
|
||||||
|
|
||||||
func (endpoint *Endpoint) validateAuth(ctx context.Context) (*console.APIKeyInfo, error) {
|
func (endpoint *Endpoint) validateAuth(ctx context.Context, action macaroon.Action) (*console.APIKeyInfo, error) {
|
||||||
APIKey, ok := auth.GetAPIKey(ctx)
|
keyData, ok := auth.GetAPIKey(ctx)
|
||||||
if !ok {
|
if !ok {
|
||||||
endpoint.log.Error("unauthorized request: ", zap.Error(status.Errorf(codes.Unauthenticated, "Invalid API credential")))
|
endpoint.log.Error("unauthorized request", zap.Error(status.Errorf(codes.Unauthenticated, "Invalid API credential")))
|
||||||
return nil, status.Errorf(codes.Unauthenticated, "Invalid API credential")
|
return nil, status.Errorf(codes.Unauthenticated, "Invalid API credential")
|
||||||
}
|
}
|
||||||
|
|
||||||
key, err := console.APIKeyFromBase32(string(APIKey))
|
key, err := macaroon.ParseAPIKey(string(keyData))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
endpoint.log.Error("unauthorized request: ", zap.Error(status.Errorf(codes.Unauthenticated, "Invalid API credential")))
|
endpoint.log.Error("unauthorized request", zap.Error(status.Errorf(codes.Unauthenticated, "Invalid API credential")))
|
||||||
return nil, status.Errorf(codes.Unauthenticated, "Invalid API credential")
|
return nil, status.Errorf(codes.Unauthenticated, "Invalid API credential")
|
||||||
}
|
}
|
||||||
|
|
||||||
keyInfo, err := endpoint.apiKeys.GetByKey(ctx, *key)
|
keyInfo, err := endpoint.apiKeys.GetByHead(ctx, key.Head())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
endpoint.log.Error("unauthorized request: ", zap.Error(status.Errorf(codes.Unauthenticated, err.Error())))
|
endpoint.log.Error("unauthorized request", zap.Error(status.Errorf(codes.Unauthenticated, err.Error())))
|
||||||
|
return nil, status.Errorf(codes.Unauthenticated, "Invalid API credential")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Revocations are currently handled by just deleting the key.
|
||||||
|
err = key.Check(keyInfo.Secret, action, nil)
|
||||||
|
if err != nil {
|
||||||
|
endpoint.log.Error("unauthorized request", zap.Error(status.Errorf(codes.Unauthenticated, err.Error())))
|
||||||
return nil, status.Errorf(codes.Unauthenticated, "Invalid API credential")
|
return nil, status.Errorf(codes.Unauthenticated, "Invalid API credential")
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -100,7 +117,12 @@ func (endpoint *Endpoint) validateAuth(ctx context.Context) (*console.APIKeyInfo
|
|||||||
func (endpoint *Endpoint) SegmentInfo(ctx context.Context, req *pb.SegmentInfoRequest) (resp *pb.SegmentInfoResponse, err error) {
|
func (endpoint *Endpoint) SegmentInfo(ctx context.Context, req *pb.SegmentInfoRequest) (resp *pb.SegmentInfoResponse, err error) {
|
||||||
defer mon.Task()(&ctx)(&err)
|
defer mon.Task()(&ctx)(&err)
|
||||||
|
|
||||||
keyInfo, err := endpoint.validateAuth(ctx)
|
keyInfo, err := endpoint.validateAuth(ctx, macaroon.Action{
|
||||||
|
Op: macaroon.ActionRead,
|
||||||
|
Bucket: req.Bucket,
|
||||||
|
EncryptedPath: req.Path,
|
||||||
|
Time: time.Now(),
|
||||||
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, status.Errorf(codes.Unauthenticated, err.Error())
|
return nil, status.Errorf(codes.Unauthenticated, err.Error())
|
||||||
}
|
}
|
||||||
@ -131,7 +153,12 @@ func (endpoint *Endpoint) SegmentInfo(ctx context.Context, req *pb.SegmentInfoRe
|
|||||||
func (endpoint *Endpoint) CreateSegment(ctx context.Context, req *pb.SegmentWriteRequest) (resp *pb.SegmentWriteResponse, err error) {
|
func (endpoint *Endpoint) CreateSegment(ctx context.Context, req *pb.SegmentWriteRequest) (resp *pb.SegmentWriteResponse, err error) {
|
||||||
defer mon.Task()(&ctx)(&err)
|
defer mon.Task()(&ctx)(&err)
|
||||||
|
|
||||||
keyInfo, err := endpoint.validateAuth(ctx)
|
keyInfo, err := endpoint.validateAuth(ctx, macaroon.Action{
|
||||||
|
Op: macaroon.ActionWrite,
|
||||||
|
Bucket: req.Bucket,
|
||||||
|
EncryptedPath: req.Path,
|
||||||
|
Time: time.Now(),
|
||||||
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, status.Errorf(codes.Unauthenticated, err.Error())
|
return nil, status.Errorf(codes.Unauthenticated, err.Error())
|
||||||
}
|
}
|
||||||
@ -226,7 +253,12 @@ func calculateSpaceUsed(ptr *pb.Pointer) (inlineSpace, remoteSpace int64) {
|
|||||||
func (endpoint *Endpoint) CommitSegment(ctx context.Context, req *pb.SegmentCommitRequest) (resp *pb.SegmentCommitResponse, err error) {
|
func (endpoint *Endpoint) CommitSegment(ctx context.Context, req *pb.SegmentCommitRequest) (resp *pb.SegmentCommitResponse, err error) {
|
||||||
defer mon.Task()(&ctx)(&err)
|
defer mon.Task()(&ctx)(&err)
|
||||||
|
|
||||||
keyInfo, err := endpoint.validateAuth(ctx)
|
keyInfo, err := endpoint.validateAuth(ctx, macaroon.Action{
|
||||||
|
Op: macaroon.ActionWrite,
|
||||||
|
Bucket: req.Bucket,
|
||||||
|
EncryptedPath: req.Path,
|
||||||
|
Time: time.Now(),
|
||||||
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, status.Errorf(codes.Unauthenticated, err.Error())
|
return nil, status.Errorf(codes.Unauthenticated, err.Error())
|
||||||
}
|
}
|
||||||
@ -284,7 +316,12 @@ func (endpoint *Endpoint) CommitSegment(ctx context.Context, req *pb.SegmentComm
|
|||||||
func (endpoint *Endpoint) DownloadSegment(ctx context.Context, req *pb.SegmentDownloadRequest) (resp *pb.SegmentDownloadResponse, err error) {
|
func (endpoint *Endpoint) DownloadSegment(ctx context.Context, req *pb.SegmentDownloadRequest) (resp *pb.SegmentDownloadResponse, err error) {
|
||||||
defer mon.Task()(&ctx)(&err)
|
defer mon.Task()(&ctx)(&err)
|
||||||
|
|
||||||
keyInfo, err := endpoint.validateAuth(ctx)
|
keyInfo, err := endpoint.validateAuth(ctx, macaroon.Action{
|
||||||
|
Op: macaroon.ActionRead,
|
||||||
|
Bucket: req.Bucket,
|
||||||
|
EncryptedPath: req.Path,
|
||||||
|
Time: time.Now(),
|
||||||
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, status.Errorf(codes.Unauthenticated, err.Error())
|
return nil, status.Errorf(codes.Unauthenticated, err.Error())
|
||||||
}
|
}
|
||||||
@ -352,7 +389,12 @@ func (endpoint *Endpoint) DownloadSegment(ctx context.Context, req *pb.SegmentDo
|
|||||||
func (endpoint *Endpoint) DeleteSegment(ctx context.Context, req *pb.SegmentDeleteRequest) (resp *pb.SegmentDeleteResponse, err error) {
|
func (endpoint *Endpoint) DeleteSegment(ctx context.Context, req *pb.SegmentDeleteRequest) (resp *pb.SegmentDeleteResponse, err error) {
|
||||||
defer mon.Task()(&ctx)(&err)
|
defer mon.Task()(&ctx)(&err)
|
||||||
|
|
||||||
keyInfo, err := endpoint.validateAuth(ctx)
|
keyInfo, err := endpoint.validateAuth(ctx, macaroon.Action{
|
||||||
|
Op: macaroon.ActionDelete,
|
||||||
|
Bucket: req.Bucket,
|
||||||
|
EncryptedPath: req.Path,
|
||||||
|
Time: time.Now(),
|
||||||
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, status.Errorf(codes.Unauthenticated, err.Error())
|
return nil, status.Errorf(codes.Unauthenticated, err.Error())
|
||||||
}
|
}
|
||||||
@ -403,7 +445,12 @@ func (endpoint *Endpoint) DeleteSegment(ctx context.Context, req *pb.SegmentDele
|
|||||||
func (endpoint *Endpoint) ListSegments(ctx context.Context, req *pb.ListSegmentsRequest) (resp *pb.ListSegmentsResponse, err error) {
|
func (endpoint *Endpoint) ListSegments(ctx context.Context, req *pb.ListSegmentsRequest) (resp *pb.ListSegmentsResponse, err error) {
|
||||||
defer mon.Task()(&ctx)(&err)
|
defer mon.Task()(&ctx)(&err)
|
||||||
|
|
||||||
keyInfo, err := endpoint.validateAuth(ctx)
|
keyInfo, err := endpoint.validateAuth(ctx, macaroon.Action{
|
||||||
|
Op: macaroon.ActionList,
|
||||||
|
Bucket: req.Bucket,
|
||||||
|
EncryptedPath: req.Prefix,
|
||||||
|
Time: time.Now(),
|
||||||
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, status.Errorf(codes.Unauthenticated, err.Error())
|
return nil, status.Errorf(codes.Unauthenticated, err.Error())
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,6 @@ package metainfo_test
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"sort"
|
"sort"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
@ -19,6 +18,7 @@ import (
|
|||||||
|
|
||||||
"storj.io/storj/internal/testcontext"
|
"storj.io/storj/internal/testcontext"
|
||||||
"storj.io/storj/internal/testplanet"
|
"storj.io/storj/internal/testplanet"
|
||||||
|
"storj.io/storj/pkg/macaroon"
|
||||||
"storj.io/storj/pkg/pb"
|
"storj.io/storj/pkg/pb"
|
||||||
"storj.io/storj/pkg/storj"
|
"storj.io/storj/pkg/storj"
|
||||||
"storj.io/storj/satellite/console"
|
"storj.io/storj/satellite/console"
|
||||||
@ -31,7 +31,7 @@ type mockAPIKeys struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// GetByKey return api key info for given key
|
// GetByKey return api key info for given key
|
||||||
func (keys *mockAPIKeys) GetByKey(ctx context.Context, key console.APIKey) (*console.APIKeyInfo, error) {
|
func (keys *mockAPIKeys) GetByKey(ctx context.Context, key macaroon.APIKey) (*console.APIKeyInfo, error) {
|
||||||
return &keys.info, keys.err
|
return &keys.info, keys.err
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -50,31 +50,142 @@ func TestInvalidAPIKey(t *testing.T) {
|
|||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
_, _, err = client.CreateSegment(ctx, "hello", "world", 1, &pb.RedundancyScheme{}, 123, time.Now())
|
_, _, err = client.CreateSegment(ctx, "hello", "world", 1, &pb.RedundancyScheme{}, 123, time.Now())
|
||||||
assertUnauthenticated(t, err)
|
assertUnauthenticated(t, err, false)
|
||||||
|
|
||||||
_, err = client.CommitSegment(ctx, "testbucket", "testpath", 0, &pb.Pointer{}, nil)
|
_, err = client.CommitSegment(ctx, "testbucket", "testpath", 0, &pb.Pointer{}, nil)
|
||||||
assertUnauthenticated(t, err)
|
assertUnauthenticated(t, err, false)
|
||||||
|
|
||||||
_, err = client.SegmentInfo(ctx, "testbucket", "testpath", 0)
|
_, err = client.SegmentInfo(ctx, "testbucket", "testpath", 0)
|
||||||
assertUnauthenticated(t, err)
|
assertUnauthenticated(t, err, false)
|
||||||
|
|
||||||
_, _, err = client.ReadSegment(ctx, "testbucket", "testpath", 0)
|
_, _, err = client.ReadSegment(ctx, "testbucket", "testpath", 0)
|
||||||
assertUnauthenticated(t, err)
|
assertUnauthenticated(t, err, false)
|
||||||
|
|
||||||
_, err = client.DeleteSegment(ctx, "testbucket", "testpath", 0)
|
_, err = client.DeleteSegment(ctx, "testbucket", "testpath", 0)
|
||||||
assertUnauthenticated(t, err)
|
assertUnauthenticated(t, err, false)
|
||||||
|
|
||||||
_, _, err = client.ListSegments(ctx, "testbucket", "", "", "", true, 1, 0)
|
_, _, err = client.ListSegments(ctx, "testbucket", "", "", "", true, 1, 0)
|
||||||
assertUnauthenticated(t, err)
|
assertUnauthenticated(t, err, false)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func assertUnauthenticated(t *testing.T, err error) {
|
func TestRestrictedAPIKey(t *testing.T) {
|
||||||
|
ctx := testcontext.New(t)
|
||||||
|
defer ctx.Cleanup()
|
||||||
|
|
||||||
|
planet, err := testplanet.New(t, 1, 1, 1)
|
||||||
|
require.NoError(t, err)
|
||||||
|
defer ctx.Check(planet.Shutdown)
|
||||||
|
|
||||||
|
planet.Start(ctx)
|
||||||
|
|
||||||
|
key, err := macaroon.ParseAPIKey(planet.Uplinks[0].APIKey[planet.Satellites[0].ID()])
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
Caveat macaroon.Caveat
|
||||||
|
CreateSegmentAllowed bool
|
||||||
|
CommitSegmentAllowed bool
|
||||||
|
SegmentInfoAllowed bool
|
||||||
|
ReadSegmentAllowed bool
|
||||||
|
DeleteSegmentAllowed bool
|
||||||
|
ListSegmentsAllowed bool
|
||||||
|
}{
|
||||||
|
{ // Everything disallowed
|
||||||
|
Caveat: macaroon.Caveat{
|
||||||
|
DisallowReads: true,
|
||||||
|
DisallowWrites: true,
|
||||||
|
DisallowLists: true,
|
||||||
|
DisallowDeletes: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
{ // Read only
|
||||||
|
Caveat: macaroon.Caveat{
|
||||||
|
DisallowWrites: true,
|
||||||
|
DisallowDeletes: true,
|
||||||
|
},
|
||||||
|
SegmentInfoAllowed: true,
|
||||||
|
ReadSegmentAllowed: true,
|
||||||
|
ListSegmentsAllowed: true,
|
||||||
|
},
|
||||||
|
|
||||||
|
{ // Write only
|
||||||
|
Caveat: macaroon.Caveat{
|
||||||
|
DisallowReads: true,
|
||||||
|
DisallowLists: true,
|
||||||
|
},
|
||||||
|
CreateSegmentAllowed: true,
|
||||||
|
CommitSegmentAllowed: true,
|
||||||
|
DeleteSegmentAllowed: true,
|
||||||
|
},
|
||||||
|
|
||||||
|
{ // Bucket restriction
|
||||||
|
Caveat: macaroon.Caveat{
|
||||||
|
AllowedPaths: []*macaroon.Caveat_Path{{
|
||||||
|
Bucket: []byte("otherbucket"),
|
||||||
|
}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
{ // Path restriction
|
||||||
|
Caveat: macaroon.Caveat{
|
||||||
|
AllowedPaths: []*macaroon.Caveat_Path{{
|
||||||
|
Bucket: []byte("testbucket"),
|
||||||
|
EncryptedPathPrefix: []byte("otherpath"),
|
||||||
|
}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
{ // Time restriction after
|
||||||
|
Caveat: macaroon.Caveat{
|
||||||
|
NotAfter: func(x time.Time) *time.Time { return &x }(time.Now()),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
{ // Time restriction before
|
||||||
|
Caveat: macaroon.Caveat{
|
||||||
|
NotBefore: func(x time.Time) *time.Time { return &x }(time.Now().Add(time.Hour)),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range tests {
|
||||||
|
restrictedKey, err := key.Restrict(test.Caveat)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
client, err := planet.Uplinks[0].DialMetainfo(ctx, planet.Satellites[0], restrictedKey.Serialize())
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
_, _, err = client.CreateSegment(ctx, "testbucket", "testpath", 1, &pb.RedundancyScheme{}, 123, time.Now())
|
||||||
|
assertUnauthenticated(t, err, test.CreateSegmentAllowed)
|
||||||
|
|
||||||
|
_, err = client.CommitSegment(ctx, "testbucket", "testpath", 0, &pb.Pointer{}, nil)
|
||||||
|
assertUnauthenticated(t, err, test.CommitSegmentAllowed)
|
||||||
|
|
||||||
|
_, err = client.SegmentInfo(ctx, "testbucket", "testpath", 0)
|
||||||
|
assertUnauthenticated(t, err, test.SegmentInfoAllowed)
|
||||||
|
|
||||||
|
_, _, err = client.ReadSegment(ctx, "testbucket", "testpath", 0)
|
||||||
|
assertUnauthenticated(t, err, test.ReadSegmentAllowed)
|
||||||
|
|
||||||
|
_, err = client.DeleteSegment(ctx, "testbucket", "testpath", 0)
|
||||||
|
assertUnauthenticated(t, err, test.DeleteSegmentAllowed)
|
||||||
|
|
||||||
|
_, _, err = client.ListSegments(ctx, "testbucket", "testpath", "", "", true, 1, 0)
|
||||||
|
assertUnauthenticated(t, err, test.ListSegmentsAllowed)
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func assertUnauthenticated(t *testing.T, err error, allowed bool) {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
|
|
||||||
|
// If it's allowed, we allow any non-unauthenticated error because
|
||||||
|
// some calls error after authentication checks.
|
||||||
if err, ok := status.FromError(errs.Unwrap(err)); ok {
|
if err, ok := status.FromError(errs.Unwrap(err)); ok {
|
||||||
assert.Equal(t, codes.Unauthenticated, err.Code())
|
assert.Equal(t, codes.Unauthenticated == err.Code(), !allowed)
|
||||||
} else {
|
} else if !allowed {
|
||||||
assert.Fail(t, "got unexpected error", "%T", err)
|
assert.Fail(t, "got unexpected error", "%T", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -153,7 +264,7 @@ func TestServiceList(t *testing.T) {
|
|||||||
return list.Items[i].Path < list.Items[k].Path
|
return list.Items[i].Path < list.Items[k].Path
|
||||||
})
|
})
|
||||||
for i, item := range expected {
|
for i, item := range expected {
|
||||||
fmt.Println(item.Path, list.Items[i].Path)
|
t.Log(item.Path, list.Items[i].Path)
|
||||||
require.Equal(t, item.Path, list.Items[i].Path)
|
require.Equal(t, item.Path, list.Items[i].Path)
|
||||||
require.Equal(t, item.IsPrefix, list.Items[i].IsPrefix)
|
require.Equal(t, item.IsPrefix, list.Items[i].IsPrefix)
|
||||||
}
|
}
|
||||||
@ -163,9 +274,7 @@ func TestCommitSegment(t *testing.T) {
|
|||||||
testplanet.Run(t, testplanet.Config{
|
testplanet.Run(t, testplanet.Config{
|
||||||
SatelliteCount: 1, StorageNodeCount: 6, UplinkCount: 1,
|
SatelliteCount: 1, StorageNodeCount: 6, UplinkCount: 1,
|
||||||
}, func(t *testing.T, ctx *testcontext.Context, planet *testplanet.Planet) {
|
}, func(t *testing.T, ctx *testcontext.Context, planet *testplanet.Planet) {
|
||||||
projects, err := planet.Satellites[0].DB.Console().Projects().GetAll(ctx)
|
apiKey := planet.Uplinks[0].APIKey[planet.Satellites[0].ID()]
|
||||||
require.NoError(t, err)
|
|
||||||
apiKey := console.APIKeyFromBytes([]byte(projects[0].Name)).String()
|
|
||||||
|
|
||||||
metainfo, err := planet.Uplinks[0].DialMetainfo(ctx, planet.Satellites[0], apiKey)
|
metainfo, err := planet.Uplinks[0].DialMetainfo(ctx, planet.Satellites[0], apiKey)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
@ -55,9 +55,9 @@ func (keys *apikeys) Get(ctx context.Context, id uuid.UUID) (*console.APIKeyInfo
|
|||||||
return fromDBXAPIKey(dbKey)
|
return fromDBXAPIKey(dbKey)
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetByKey implements satellite.APIKeys
|
// GetByHead implements satellite.APIKeys
|
||||||
func (keys *apikeys) GetByKey(ctx context.Context, key console.APIKey) (*console.APIKeyInfo, error) {
|
func (keys *apikeys) GetByHead(ctx context.Context, head []byte) (*console.APIKeyInfo, error) {
|
||||||
dbKey, err := keys.db.Get_ApiKey_By_Key(ctx, dbx.ApiKey_Key(key[:]))
|
dbKey, err := keys.db.Get_ApiKey_By_Head(ctx, dbx.ApiKey_Head(head))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -66,7 +66,7 @@ func (keys *apikeys) GetByKey(ctx context.Context, key console.APIKey) (*console
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Create implements satellite.APIKeys
|
// Create implements satellite.APIKeys
|
||||||
func (keys *apikeys) Create(ctx context.Context, key console.APIKey, info console.APIKeyInfo) (*console.APIKeyInfo, error) {
|
func (keys *apikeys) Create(ctx context.Context, head []byte, info console.APIKeyInfo) (*console.APIKeyInfo, error) {
|
||||||
id, err := uuid.New()
|
id, err := uuid.New()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -76,8 +76,9 @@ func (keys *apikeys) Create(ctx context.Context, key console.APIKey, info consol
|
|||||||
ctx,
|
ctx,
|
||||||
dbx.ApiKey_Id(id[:]),
|
dbx.ApiKey_Id(id[:]),
|
||||||
dbx.ApiKey_ProjectId(info.ProjectID[:]),
|
dbx.ApiKey_ProjectId(info.ProjectID[:]),
|
||||||
dbx.ApiKey_Key(key[:]),
|
dbx.ApiKey_Head(head),
|
||||||
dbx.ApiKey_Name(info.Name),
|
dbx.ApiKey_Name(info.Name),
|
||||||
|
dbx.ApiKey_Secret(info.Secret),
|
||||||
)
|
)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -123,5 +124,6 @@ func fromDBXAPIKey(key *dbx.ApiKey) (*console.APIKeyInfo, error) {
|
|||||||
ProjectID: projectID,
|
ProjectID: projectID,
|
||||||
Name: key.Name,
|
Name: key.Name,
|
||||||
CreatedAt: key.CreatedAt,
|
CreatedAt: key.CreatedAt,
|
||||||
|
Secret: key.Secret,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
@ -261,16 +261,14 @@ delete project_member (
|
|||||||
|
|
||||||
model api_key (
|
model api_key (
|
||||||
key id
|
key id
|
||||||
unique key
|
unique head
|
||||||
unique name project_id
|
unique name project_id
|
||||||
|
|
||||||
field id blob
|
field id blob
|
||||||
field project_id project.id cascade
|
field project_id project.id cascade
|
||||||
|
field head blob
|
||||||
field key blob
|
|
||||||
|
|
||||||
field name text (updatable)
|
field name text (updatable)
|
||||||
|
field secret blob
|
||||||
field created_at timestamp (autoinsert)
|
field created_at timestamp (autoinsert)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -284,7 +282,7 @@ read one (
|
|||||||
)
|
)
|
||||||
read one (
|
read one (
|
||||||
select api_key
|
select api_key
|
||||||
where api_key.key = ?
|
where api_key.head = ?
|
||||||
)
|
)
|
||||||
read all (
|
read all (
|
||||||
select api_key
|
select api_key
|
||||||
@ -483,7 +481,7 @@ read all (
|
|||||||
|
|
||||||
model storagenode_storage_tally (
|
model storagenode_storage_tally (
|
||||||
key id
|
key id
|
||||||
|
|
||||||
field id serial64
|
field id serial64
|
||||||
field node_id blob
|
field node_id blob
|
||||||
field interval_end_time timestamp
|
field interval_end_time timestamp
|
||||||
|
@ -18,9 +18,8 @@ import (
|
|||||||
|
|
||||||
"github.com/lib/pq"
|
"github.com/lib/pq"
|
||||||
|
|
||||||
"math/rand"
|
|
||||||
|
|
||||||
"github.com/mattn/go-sqlite3"
|
"github.com/mattn/go-sqlite3"
|
||||||
|
"math/rand"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Prevent conditional imports from causing build failures
|
// Prevent conditional imports from causing build failures
|
||||||
@ -470,11 +469,12 @@ CREATE TABLE users (
|
|||||||
CREATE TABLE api_keys (
|
CREATE TABLE api_keys (
|
||||||
id bytea NOT NULL,
|
id bytea NOT NULL,
|
||||||
project_id bytea NOT NULL REFERENCES projects( id ) ON DELETE CASCADE,
|
project_id bytea NOT NULL REFERENCES projects( id ) ON DELETE CASCADE,
|
||||||
key bytea NOT NULL,
|
head bytea NOT NULL,
|
||||||
name text NOT NULL,
|
name text NOT NULL,
|
||||||
|
secret bytea NOT NULL,
|
||||||
created_at timestamp with time zone NOT NULL,
|
created_at timestamp with time zone NOT NULL,
|
||||||
PRIMARY KEY ( id ),
|
PRIMARY KEY ( id ),
|
||||||
UNIQUE ( key ),
|
UNIQUE ( head ),
|
||||||
UNIQUE ( name, project_id )
|
UNIQUE ( name, project_id )
|
||||||
);
|
);
|
||||||
CREATE TABLE project_members (
|
CREATE TABLE project_members (
|
||||||
@ -754,11 +754,12 @@ CREATE TABLE users (
|
|||||||
CREATE TABLE api_keys (
|
CREATE TABLE api_keys (
|
||||||
id BLOB NOT NULL,
|
id BLOB NOT NULL,
|
||||||
project_id BLOB NOT NULL REFERENCES projects( id ) ON DELETE CASCADE,
|
project_id BLOB NOT NULL REFERENCES projects( id ) ON DELETE CASCADE,
|
||||||
key BLOB NOT NULL,
|
head BLOB NOT NULL,
|
||||||
name TEXT NOT NULL,
|
name TEXT NOT NULL,
|
||||||
|
secret BLOB NOT NULL,
|
||||||
created_at TIMESTAMP NOT NULL,
|
created_at TIMESTAMP NOT NULL,
|
||||||
PRIMARY KEY ( id ),
|
PRIMARY KEY ( id ),
|
||||||
UNIQUE ( key ),
|
UNIQUE ( head ),
|
||||||
UNIQUE ( name, project_id )
|
UNIQUE ( name, project_id )
|
||||||
);
|
);
|
||||||
CREATE TABLE project_members (
|
CREATE TABLE project_members (
|
||||||
@ -3818,8 +3819,9 @@ func (User_CreatedAt_Field) _Column() string { return "created_at" }
|
|||||||
type ApiKey struct {
|
type ApiKey struct {
|
||||||
Id []byte
|
Id []byte
|
||||||
ProjectId []byte
|
ProjectId []byte
|
||||||
Key []byte
|
Head []byte
|
||||||
Name string
|
Name string
|
||||||
|
Secret []byte
|
||||||
CreatedAt time.Time
|
CreatedAt time.Time
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3867,24 +3869,24 @@ func (f ApiKey_ProjectId_Field) value() interface{} {
|
|||||||
|
|
||||||
func (ApiKey_ProjectId_Field) _Column() string { return "project_id" }
|
func (ApiKey_ProjectId_Field) _Column() string { return "project_id" }
|
||||||
|
|
||||||
type ApiKey_Key_Field struct {
|
type ApiKey_Head_Field struct {
|
||||||
_set bool
|
_set bool
|
||||||
_null bool
|
_null bool
|
||||||
_value []byte
|
_value []byte
|
||||||
}
|
}
|
||||||
|
|
||||||
func ApiKey_Key(v []byte) ApiKey_Key_Field {
|
func ApiKey_Head(v []byte) ApiKey_Head_Field {
|
||||||
return ApiKey_Key_Field{_set: true, _value: v}
|
return ApiKey_Head_Field{_set: true, _value: v}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f ApiKey_Key_Field) value() interface{} {
|
func (f ApiKey_Head_Field) value() interface{} {
|
||||||
if !f._set || f._null {
|
if !f._set || f._null {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
return f._value
|
return f._value
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ApiKey_Key_Field) _Column() string { return "key" }
|
func (ApiKey_Head_Field) _Column() string { return "head" }
|
||||||
|
|
||||||
type ApiKey_Name_Field struct {
|
type ApiKey_Name_Field struct {
|
||||||
_set bool
|
_set bool
|
||||||
@ -3905,6 +3907,25 @@ func (f ApiKey_Name_Field) value() interface{} {
|
|||||||
|
|
||||||
func (ApiKey_Name_Field) _Column() string { return "name" }
|
func (ApiKey_Name_Field) _Column() string { return "name" }
|
||||||
|
|
||||||
|
type ApiKey_Secret_Field struct {
|
||||||
|
_set bool
|
||||||
|
_null bool
|
||||||
|
_value []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func ApiKey_Secret(v []byte) ApiKey_Secret_Field {
|
||||||
|
return ApiKey_Secret_Field{_set: true, _value: v}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f ApiKey_Secret_Field) value() interface{} {
|
||||||
|
if !f._set || f._null {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return f._value
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ApiKey_Secret_Field) _Column() string { return "secret" }
|
||||||
|
|
||||||
type ApiKey_CreatedAt_Field struct {
|
type ApiKey_CreatedAt_Field struct {
|
||||||
_set bool
|
_set bool
|
||||||
_null bool
|
_null bool
|
||||||
@ -4527,24 +4548,26 @@ func (obj *postgresImpl) Create_ProjectMember(ctx context.Context,
|
|||||||
func (obj *postgresImpl) Create_ApiKey(ctx context.Context,
|
func (obj *postgresImpl) Create_ApiKey(ctx context.Context,
|
||||||
api_key_id ApiKey_Id_Field,
|
api_key_id ApiKey_Id_Field,
|
||||||
api_key_project_id ApiKey_ProjectId_Field,
|
api_key_project_id ApiKey_ProjectId_Field,
|
||||||
api_key_key ApiKey_Key_Field,
|
api_key_head ApiKey_Head_Field,
|
||||||
api_key_name ApiKey_Name_Field) (
|
api_key_name ApiKey_Name_Field,
|
||||||
|
api_key_secret ApiKey_Secret_Field) (
|
||||||
api_key *ApiKey, err error) {
|
api_key *ApiKey, err error) {
|
||||||
|
|
||||||
__now := obj.db.Hooks.Now().UTC()
|
__now := obj.db.Hooks.Now().UTC()
|
||||||
__id_val := api_key_id.value()
|
__id_val := api_key_id.value()
|
||||||
__project_id_val := api_key_project_id.value()
|
__project_id_val := api_key_project_id.value()
|
||||||
__key_val := api_key_key.value()
|
__head_val := api_key_head.value()
|
||||||
__name_val := api_key_name.value()
|
__name_val := api_key_name.value()
|
||||||
|
__secret_val := api_key_secret.value()
|
||||||
__created_at_val := __now
|
__created_at_val := __now
|
||||||
|
|
||||||
var __embed_stmt = __sqlbundle_Literal("INSERT INTO api_keys ( id, project_id, key, name, created_at ) VALUES ( ?, ?, ?, ?, ? ) RETURNING api_keys.id, api_keys.project_id, api_keys.key, api_keys.name, api_keys.created_at")
|
var __embed_stmt = __sqlbundle_Literal("INSERT INTO api_keys ( id, project_id, head, name, secret, created_at ) VALUES ( ?, ?, ?, ?, ?, ? ) RETURNING api_keys.id, api_keys.project_id, api_keys.head, api_keys.name, api_keys.secret, api_keys.created_at")
|
||||||
|
|
||||||
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
|
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
|
||||||
obj.logStmt(__stmt, __id_val, __project_id_val, __key_val, __name_val, __created_at_val)
|
obj.logStmt(__stmt, __id_val, __project_id_val, __head_val, __name_val, __secret_val, __created_at_val)
|
||||||
|
|
||||||
api_key = &ApiKey{}
|
api_key = &ApiKey{}
|
||||||
err = obj.driver.QueryRow(__stmt, __id_val, __project_id_val, __key_val, __name_val, __created_at_val).Scan(&api_key.Id, &api_key.ProjectId, &api_key.Key, &api_key.Name, &api_key.CreatedAt)
|
err = obj.driver.QueryRow(__stmt, __id_val, __project_id_val, __head_val, __name_val, __secret_val, __created_at_val).Scan(&api_key.Id, &api_key.ProjectId, &api_key.Head, &api_key.Name, &api_key.Secret, &api_key.CreatedAt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, obj.makeErr(err)
|
return nil, obj.makeErr(err)
|
||||||
}
|
}
|
||||||
@ -5273,7 +5296,7 @@ func (obj *postgresImpl) Get_ApiKey_By_Id(ctx context.Context,
|
|||||||
api_key_id ApiKey_Id_Field) (
|
api_key_id ApiKey_Id_Field) (
|
||||||
api_key *ApiKey, err error) {
|
api_key *ApiKey, err error) {
|
||||||
|
|
||||||
var __embed_stmt = __sqlbundle_Literal("SELECT api_keys.id, api_keys.project_id, api_keys.key, api_keys.name, api_keys.created_at FROM api_keys WHERE api_keys.id = ?")
|
var __embed_stmt = __sqlbundle_Literal("SELECT api_keys.id, api_keys.project_id, api_keys.head, api_keys.name, api_keys.secret, api_keys.created_at FROM api_keys WHERE api_keys.id = ?")
|
||||||
|
|
||||||
var __values []interface{}
|
var __values []interface{}
|
||||||
__values = append(__values, api_key_id.value())
|
__values = append(__values, api_key_id.value())
|
||||||
@ -5282,7 +5305,7 @@ func (obj *postgresImpl) Get_ApiKey_By_Id(ctx context.Context,
|
|||||||
obj.logStmt(__stmt, __values...)
|
obj.logStmt(__stmt, __values...)
|
||||||
|
|
||||||
api_key = &ApiKey{}
|
api_key = &ApiKey{}
|
||||||
err = obj.driver.QueryRow(__stmt, __values...).Scan(&api_key.Id, &api_key.ProjectId, &api_key.Key, &api_key.Name, &api_key.CreatedAt)
|
err = obj.driver.QueryRow(__stmt, __values...).Scan(&api_key.Id, &api_key.ProjectId, &api_key.Head, &api_key.Name, &api_key.Secret, &api_key.CreatedAt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, obj.makeErr(err)
|
return nil, obj.makeErr(err)
|
||||||
}
|
}
|
||||||
@ -5290,20 +5313,20 @@ func (obj *postgresImpl) Get_ApiKey_By_Id(ctx context.Context,
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (obj *postgresImpl) Get_ApiKey_By_Key(ctx context.Context,
|
func (obj *postgresImpl) Get_ApiKey_By_Head(ctx context.Context,
|
||||||
api_key_key ApiKey_Key_Field) (
|
api_key_head ApiKey_Head_Field) (
|
||||||
api_key *ApiKey, err error) {
|
api_key *ApiKey, err error) {
|
||||||
|
|
||||||
var __embed_stmt = __sqlbundle_Literal("SELECT api_keys.id, api_keys.project_id, api_keys.key, api_keys.name, api_keys.created_at FROM api_keys WHERE api_keys.key = ?")
|
var __embed_stmt = __sqlbundle_Literal("SELECT api_keys.id, api_keys.project_id, api_keys.head, api_keys.name, api_keys.secret, api_keys.created_at FROM api_keys WHERE api_keys.head = ?")
|
||||||
|
|
||||||
var __values []interface{}
|
var __values []interface{}
|
||||||
__values = append(__values, api_key_key.value())
|
__values = append(__values, api_key_head.value())
|
||||||
|
|
||||||
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
|
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
|
||||||
obj.logStmt(__stmt, __values...)
|
obj.logStmt(__stmt, __values...)
|
||||||
|
|
||||||
api_key = &ApiKey{}
|
api_key = &ApiKey{}
|
||||||
err = obj.driver.QueryRow(__stmt, __values...).Scan(&api_key.Id, &api_key.ProjectId, &api_key.Key, &api_key.Name, &api_key.CreatedAt)
|
err = obj.driver.QueryRow(__stmt, __values...).Scan(&api_key.Id, &api_key.ProjectId, &api_key.Head, &api_key.Name, &api_key.Secret, &api_key.CreatedAt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, obj.makeErr(err)
|
return nil, obj.makeErr(err)
|
||||||
}
|
}
|
||||||
@ -5315,7 +5338,7 @@ func (obj *postgresImpl) All_ApiKey_By_ProjectId_OrderBy_Asc_Name(ctx context.Co
|
|||||||
api_key_project_id ApiKey_ProjectId_Field) (
|
api_key_project_id ApiKey_ProjectId_Field) (
|
||||||
rows []*ApiKey, err error) {
|
rows []*ApiKey, err error) {
|
||||||
|
|
||||||
var __embed_stmt = __sqlbundle_Literal("SELECT api_keys.id, api_keys.project_id, api_keys.key, api_keys.name, api_keys.created_at FROM api_keys WHERE api_keys.project_id = ? ORDER BY api_keys.name")
|
var __embed_stmt = __sqlbundle_Literal("SELECT api_keys.id, api_keys.project_id, api_keys.head, api_keys.name, api_keys.secret, api_keys.created_at FROM api_keys WHERE api_keys.project_id = ? ORDER BY api_keys.name")
|
||||||
|
|
||||||
var __values []interface{}
|
var __values []interface{}
|
||||||
__values = append(__values, api_key_project_id.value())
|
__values = append(__values, api_key_project_id.value())
|
||||||
@ -5331,7 +5354,7 @@ func (obj *postgresImpl) All_ApiKey_By_ProjectId_OrderBy_Asc_Name(ctx context.Co
|
|||||||
|
|
||||||
for __rows.Next() {
|
for __rows.Next() {
|
||||||
api_key := &ApiKey{}
|
api_key := &ApiKey{}
|
||||||
err = __rows.Scan(&api_key.Id, &api_key.ProjectId, &api_key.Key, &api_key.Name, &api_key.CreatedAt)
|
err = __rows.Scan(&api_key.Id, &api_key.ProjectId, &api_key.Head, &api_key.Name, &api_key.Secret, &api_key.CreatedAt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, obj.makeErr(err)
|
return nil, obj.makeErr(err)
|
||||||
}
|
}
|
||||||
@ -6334,7 +6357,7 @@ func (obj *postgresImpl) Update_ApiKey_By_Id(ctx context.Context,
|
|||||||
api_key *ApiKey, err error) {
|
api_key *ApiKey, err error) {
|
||||||
var __sets = &__sqlbundle_Hole{}
|
var __sets = &__sqlbundle_Hole{}
|
||||||
|
|
||||||
var __embed_stmt = __sqlbundle_Literals{Join: "", SQLs: []__sqlbundle_SQL{__sqlbundle_Literal("UPDATE api_keys SET "), __sets, __sqlbundle_Literal(" WHERE api_keys.id = ? RETURNING api_keys.id, api_keys.project_id, api_keys.key, api_keys.name, api_keys.created_at")}}
|
var __embed_stmt = __sqlbundle_Literals{Join: "", SQLs: []__sqlbundle_SQL{__sqlbundle_Literal("UPDATE api_keys SET "), __sets, __sqlbundle_Literal(" WHERE api_keys.id = ? RETURNING api_keys.id, api_keys.project_id, api_keys.head, api_keys.name, api_keys.secret, api_keys.created_at")}}
|
||||||
|
|
||||||
__sets_sql := __sqlbundle_Literals{Join: ", "}
|
__sets_sql := __sqlbundle_Literals{Join: ", "}
|
||||||
var __values []interface{}
|
var __values []interface{}
|
||||||
@ -6358,7 +6381,7 @@ func (obj *postgresImpl) Update_ApiKey_By_Id(ctx context.Context,
|
|||||||
obj.logStmt(__stmt, __values...)
|
obj.logStmt(__stmt, __values...)
|
||||||
|
|
||||||
api_key = &ApiKey{}
|
api_key = &ApiKey{}
|
||||||
err = obj.driver.QueryRow(__stmt, __values...).Scan(&api_key.Id, &api_key.ProjectId, &api_key.Key, &api_key.Name, &api_key.CreatedAt)
|
err = obj.driver.QueryRow(__stmt, __values...).Scan(&api_key.Id, &api_key.ProjectId, &api_key.Head, &api_key.Name, &api_key.Secret, &api_key.CreatedAt)
|
||||||
if err == sql.ErrNoRows {
|
if err == sql.ErrNoRows {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
@ -7421,23 +7444,25 @@ func (obj *sqlite3Impl) Create_ProjectMember(ctx context.Context,
|
|||||||
func (obj *sqlite3Impl) Create_ApiKey(ctx context.Context,
|
func (obj *sqlite3Impl) Create_ApiKey(ctx context.Context,
|
||||||
api_key_id ApiKey_Id_Field,
|
api_key_id ApiKey_Id_Field,
|
||||||
api_key_project_id ApiKey_ProjectId_Field,
|
api_key_project_id ApiKey_ProjectId_Field,
|
||||||
api_key_key ApiKey_Key_Field,
|
api_key_head ApiKey_Head_Field,
|
||||||
api_key_name ApiKey_Name_Field) (
|
api_key_name ApiKey_Name_Field,
|
||||||
|
api_key_secret ApiKey_Secret_Field) (
|
||||||
api_key *ApiKey, err error) {
|
api_key *ApiKey, err error) {
|
||||||
|
|
||||||
__now := obj.db.Hooks.Now().UTC()
|
__now := obj.db.Hooks.Now().UTC()
|
||||||
__id_val := api_key_id.value()
|
__id_val := api_key_id.value()
|
||||||
__project_id_val := api_key_project_id.value()
|
__project_id_val := api_key_project_id.value()
|
||||||
__key_val := api_key_key.value()
|
__head_val := api_key_head.value()
|
||||||
__name_val := api_key_name.value()
|
__name_val := api_key_name.value()
|
||||||
|
__secret_val := api_key_secret.value()
|
||||||
__created_at_val := __now
|
__created_at_val := __now
|
||||||
|
|
||||||
var __embed_stmt = __sqlbundle_Literal("INSERT INTO api_keys ( id, project_id, key, name, created_at ) VALUES ( ?, ?, ?, ?, ? )")
|
var __embed_stmt = __sqlbundle_Literal("INSERT INTO api_keys ( id, project_id, head, name, secret, created_at ) VALUES ( ?, ?, ?, ?, ?, ? )")
|
||||||
|
|
||||||
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
|
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
|
||||||
obj.logStmt(__stmt, __id_val, __project_id_val, __key_val, __name_val, __created_at_val)
|
obj.logStmt(__stmt, __id_val, __project_id_val, __head_val, __name_val, __secret_val, __created_at_val)
|
||||||
|
|
||||||
__res, err := obj.driver.Exec(__stmt, __id_val, __project_id_val, __key_val, __name_val, __created_at_val)
|
__res, err := obj.driver.Exec(__stmt, __id_val, __project_id_val, __head_val, __name_val, __secret_val, __created_at_val)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, obj.makeErr(err)
|
return nil, obj.makeErr(err)
|
||||||
}
|
}
|
||||||
@ -8197,7 +8222,7 @@ func (obj *sqlite3Impl) Get_ApiKey_By_Id(ctx context.Context,
|
|||||||
api_key_id ApiKey_Id_Field) (
|
api_key_id ApiKey_Id_Field) (
|
||||||
api_key *ApiKey, err error) {
|
api_key *ApiKey, err error) {
|
||||||
|
|
||||||
var __embed_stmt = __sqlbundle_Literal("SELECT api_keys.id, api_keys.project_id, api_keys.key, api_keys.name, api_keys.created_at FROM api_keys WHERE api_keys.id = ?")
|
var __embed_stmt = __sqlbundle_Literal("SELECT api_keys.id, api_keys.project_id, api_keys.head, api_keys.name, api_keys.secret, api_keys.created_at FROM api_keys WHERE api_keys.id = ?")
|
||||||
|
|
||||||
var __values []interface{}
|
var __values []interface{}
|
||||||
__values = append(__values, api_key_id.value())
|
__values = append(__values, api_key_id.value())
|
||||||
@ -8206,7 +8231,7 @@ func (obj *sqlite3Impl) Get_ApiKey_By_Id(ctx context.Context,
|
|||||||
obj.logStmt(__stmt, __values...)
|
obj.logStmt(__stmt, __values...)
|
||||||
|
|
||||||
api_key = &ApiKey{}
|
api_key = &ApiKey{}
|
||||||
err = obj.driver.QueryRow(__stmt, __values...).Scan(&api_key.Id, &api_key.ProjectId, &api_key.Key, &api_key.Name, &api_key.CreatedAt)
|
err = obj.driver.QueryRow(__stmt, __values...).Scan(&api_key.Id, &api_key.ProjectId, &api_key.Head, &api_key.Name, &api_key.Secret, &api_key.CreatedAt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, obj.makeErr(err)
|
return nil, obj.makeErr(err)
|
||||||
}
|
}
|
||||||
@ -8214,20 +8239,20 @@ func (obj *sqlite3Impl) Get_ApiKey_By_Id(ctx context.Context,
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (obj *sqlite3Impl) Get_ApiKey_By_Key(ctx context.Context,
|
func (obj *sqlite3Impl) Get_ApiKey_By_Head(ctx context.Context,
|
||||||
api_key_key ApiKey_Key_Field) (
|
api_key_head ApiKey_Head_Field) (
|
||||||
api_key *ApiKey, err error) {
|
api_key *ApiKey, err error) {
|
||||||
|
|
||||||
var __embed_stmt = __sqlbundle_Literal("SELECT api_keys.id, api_keys.project_id, api_keys.key, api_keys.name, api_keys.created_at FROM api_keys WHERE api_keys.key = ?")
|
var __embed_stmt = __sqlbundle_Literal("SELECT api_keys.id, api_keys.project_id, api_keys.head, api_keys.name, api_keys.secret, api_keys.created_at FROM api_keys WHERE api_keys.head = ?")
|
||||||
|
|
||||||
var __values []interface{}
|
var __values []interface{}
|
||||||
__values = append(__values, api_key_key.value())
|
__values = append(__values, api_key_head.value())
|
||||||
|
|
||||||
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
|
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
|
||||||
obj.logStmt(__stmt, __values...)
|
obj.logStmt(__stmt, __values...)
|
||||||
|
|
||||||
api_key = &ApiKey{}
|
api_key = &ApiKey{}
|
||||||
err = obj.driver.QueryRow(__stmt, __values...).Scan(&api_key.Id, &api_key.ProjectId, &api_key.Key, &api_key.Name, &api_key.CreatedAt)
|
err = obj.driver.QueryRow(__stmt, __values...).Scan(&api_key.Id, &api_key.ProjectId, &api_key.Head, &api_key.Name, &api_key.Secret, &api_key.CreatedAt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, obj.makeErr(err)
|
return nil, obj.makeErr(err)
|
||||||
}
|
}
|
||||||
@ -8239,7 +8264,7 @@ func (obj *sqlite3Impl) All_ApiKey_By_ProjectId_OrderBy_Asc_Name(ctx context.Con
|
|||||||
api_key_project_id ApiKey_ProjectId_Field) (
|
api_key_project_id ApiKey_ProjectId_Field) (
|
||||||
rows []*ApiKey, err error) {
|
rows []*ApiKey, err error) {
|
||||||
|
|
||||||
var __embed_stmt = __sqlbundle_Literal("SELECT api_keys.id, api_keys.project_id, api_keys.key, api_keys.name, api_keys.created_at FROM api_keys WHERE api_keys.project_id = ? ORDER BY api_keys.name")
|
var __embed_stmt = __sqlbundle_Literal("SELECT api_keys.id, api_keys.project_id, api_keys.head, api_keys.name, api_keys.secret, api_keys.created_at FROM api_keys WHERE api_keys.project_id = ? ORDER BY api_keys.name")
|
||||||
|
|
||||||
var __values []interface{}
|
var __values []interface{}
|
||||||
__values = append(__values, api_key_project_id.value())
|
__values = append(__values, api_key_project_id.value())
|
||||||
@ -8255,7 +8280,7 @@ func (obj *sqlite3Impl) All_ApiKey_By_ProjectId_OrderBy_Asc_Name(ctx context.Con
|
|||||||
|
|
||||||
for __rows.Next() {
|
for __rows.Next() {
|
||||||
api_key := &ApiKey{}
|
api_key := &ApiKey{}
|
||||||
err = __rows.Scan(&api_key.Id, &api_key.ProjectId, &api_key.Key, &api_key.Name, &api_key.CreatedAt)
|
err = __rows.Scan(&api_key.Id, &api_key.ProjectId, &api_key.Head, &api_key.Name, &api_key.Secret, &api_key.CreatedAt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, obj.makeErr(err)
|
return nil, obj.makeErr(err)
|
||||||
}
|
}
|
||||||
@ -9347,12 +9372,12 @@ func (obj *sqlite3Impl) Update_ApiKey_By_Id(ctx context.Context,
|
|||||||
return nil, obj.makeErr(err)
|
return nil, obj.makeErr(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
var __embed_stmt_get = __sqlbundle_Literal("SELECT api_keys.id, api_keys.project_id, api_keys.key, api_keys.name, api_keys.created_at FROM api_keys WHERE api_keys.id = ?")
|
var __embed_stmt_get = __sqlbundle_Literal("SELECT api_keys.id, api_keys.project_id, api_keys.head, api_keys.name, api_keys.secret, api_keys.created_at FROM api_keys WHERE api_keys.id = ?")
|
||||||
|
|
||||||
var __stmt_get = __sqlbundle_Render(obj.dialect, __embed_stmt_get)
|
var __stmt_get = __sqlbundle_Render(obj.dialect, __embed_stmt_get)
|
||||||
obj.logStmt("(IMPLIED) "+__stmt_get, __args...)
|
obj.logStmt("(IMPLIED) "+__stmt_get, __args...)
|
||||||
|
|
||||||
err = obj.driver.QueryRow(__stmt_get, __args...).Scan(&api_key.Id, &api_key.ProjectId, &api_key.Key, &api_key.Name, &api_key.CreatedAt)
|
err = obj.driver.QueryRow(__stmt_get, __args...).Scan(&api_key.Id, &api_key.ProjectId, &api_key.Head, &api_key.Name, &api_key.Secret, &api_key.CreatedAt)
|
||||||
if err == sql.ErrNoRows {
|
if err == sql.ErrNoRows {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
@ -10066,13 +10091,13 @@ func (obj *sqlite3Impl) getLastApiKey(ctx context.Context,
|
|||||||
pk int64) (
|
pk int64) (
|
||||||
api_key *ApiKey, err error) {
|
api_key *ApiKey, err error) {
|
||||||
|
|
||||||
var __embed_stmt = __sqlbundle_Literal("SELECT api_keys.id, api_keys.project_id, api_keys.key, api_keys.name, api_keys.created_at FROM api_keys WHERE _rowid_ = ?")
|
var __embed_stmt = __sqlbundle_Literal("SELECT api_keys.id, api_keys.project_id, api_keys.head, api_keys.name, api_keys.secret, api_keys.created_at FROM api_keys WHERE _rowid_ = ?")
|
||||||
|
|
||||||
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
|
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
|
||||||
obj.logStmt(__stmt, pk)
|
obj.logStmt(__stmt, pk)
|
||||||
|
|
||||||
api_key = &ApiKey{}
|
api_key = &ApiKey{}
|
||||||
err = obj.driver.QueryRow(__stmt, pk).Scan(&api_key.Id, &api_key.ProjectId, &api_key.Key, &api_key.Name, &api_key.CreatedAt)
|
err = obj.driver.QueryRow(__stmt, pk).Scan(&api_key.Id, &api_key.ProjectId, &api_key.Head, &api_key.Name, &api_key.Secret, &api_key.CreatedAt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, obj.makeErr(err)
|
return nil, obj.makeErr(err)
|
||||||
}
|
}
|
||||||
@ -10669,14 +10694,15 @@ func (rx *Rx) Create_AccountingTimestamps(ctx context.Context,
|
|||||||
func (rx *Rx) Create_ApiKey(ctx context.Context,
|
func (rx *Rx) Create_ApiKey(ctx context.Context,
|
||||||
api_key_id ApiKey_Id_Field,
|
api_key_id ApiKey_Id_Field,
|
||||||
api_key_project_id ApiKey_ProjectId_Field,
|
api_key_project_id ApiKey_ProjectId_Field,
|
||||||
api_key_key ApiKey_Key_Field,
|
api_key_head ApiKey_Head_Field,
|
||||||
api_key_name ApiKey_Name_Field) (
|
api_key_name ApiKey_Name_Field,
|
||||||
|
api_key_secret ApiKey_Secret_Field) (
|
||||||
api_key *ApiKey, err error) {
|
api_key *ApiKey, err error) {
|
||||||
var tx *Tx
|
var tx *Tx
|
||||||
if tx, err = rx.getTx(ctx); err != nil {
|
if tx, err = rx.getTx(ctx); err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
return tx.Create_ApiKey(ctx, api_key_id, api_key_project_id, api_key_key, api_key_name)
|
return tx.Create_ApiKey(ctx, api_key_id, api_key_project_id, api_key_head, api_key_name, api_key_secret)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -11126,6 +11152,16 @@ func (rx *Rx) Get_AccountingRollup_By_Id(ctx context.Context,
|
|||||||
return tx.Get_AccountingRollup_By_Id(ctx, accounting_rollup_id)
|
return tx.Get_AccountingRollup_By_Id(ctx, accounting_rollup_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (rx *Rx) Get_ApiKey_By_Head(ctx context.Context,
|
||||||
|
api_key_head ApiKey_Head_Field) (
|
||||||
|
api_key *ApiKey, err error) {
|
||||||
|
var tx *Tx
|
||||||
|
if tx, err = rx.getTx(ctx); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
return tx.Get_ApiKey_By_Head(ctx, api_key_head)
|
||||||
|
}
|
||||||
|
|
||||||
func (rx *Rx) Get_ApiKey_By_Id(ctx context.Context,
|
func (rx *Rx) Get_ApiKey_By_Id(ctx context.Context,
|
||||||
api_key_id ApiKey_Id_Field) (
|
api_key_id ApiKey_Id_Field) (
|
||||||
api_key *ApiKey, err error) {
|
api_key *ApiKey, err error) {
|
||||||
@ -11136,16 +11172,6 @@ func (rx *Rx) Get_ApiKey_By_Id(ctx context.Context,
|
|||||||
return tx.Get_ApiKey_By_Id(ctx, api_key_id)
|
return tx.Get_ApiKey_By_Id(ctx, api_key_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (rx *Rx) Get_ApiKey_By_Key(ctx context.Context,
|
|
||||||
api_key_key ApiKey_Key_Field) (
|
|
||||||
api_key *ApiKey, err error) {
|
|
||||||
var tx *Tx
|
|
||||||
if tx, err = rx.getTx(ctx); err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
return tx.Get_ApiKey_By_Key(ctx, api_key_key)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (rx *Rx) Get_BucketUsage_By_Id(ctx context.Context,
|
func (rx *Rx) Get_BucketUsage_By_Id(ctx context.Context,
|
||||||
bucket_usage_id BucketUsage_Id_Field) (
|
bucket_usage_id BucketUsage_Id_Field) (
|
||||||
bucket_usage *BucketUsage, err error) {
|
bucket_usage *BucketUsage, err error) {
|
||||||
@ -11529,8 +11555,9 @@ type Methods interface {
|
|||||||
Create_ApiKey(ctx context.Context,
|
Create_ApiKey(ctx context.Context,
|
||||||
api_key_id ApiKey_Id_Field,
|
api_key_id ApiKey_Id_Field,
|
||||||
api_key_project_id ApiKey_ProjectId_Field,
|
api_key_project_id ApiKey_ProjectId_Field,
|
||||||
api_key_key ApiKey_Key_Field,
|
api_key_head ApiKey_Head_Field,
|
||||||
api_key_name ApiKey_Name_Field) (
|
api_key_name ApiKey_Name_Field,
|
||||||
|
api_key_secret ApiKey_Secret_Field) (
|
||||||
api_key *ApiKey, err error)
|
api_key *ApiKey, err error)
|
||||||
|
|
||||||
Create_BucketStorageTally(ctx context.Context,
|
Create_BucketStorageTally(ctx context.Context,
|
||||||
@ -11753,12 +11780,12 @@ type Methods interface {
|
|||||||
accounting_rollup_id AccountingRollup_Id_Field) (
|
accounting_rollup_id AccountingRollup_Id_Field) (
|
||||||
accounting_rollup *AccountingRollup, err error)
|
accounting_rollup *AccountingRollup, err error)
|
||||||
|
|
||||||
Get_ApiKey_By_Id(ctx context.Context,
|
Get_ApiKey_By_Head(ctx context.Context,
|
||||||
api_key_id ApiKey_Id_Field) (
|
api_key_head ApiKey_Head_Field) (
|
||||||
api_key *ApiKey, err error)
|
api_key *ApiKey, err error)
|
||||||
|
|
||||||
Get_ApiKey_By_Key(ctx context.Context,
|
Get_ApiKey_By_Id(ctx context.Context,
|
||||||
api_key_key ApiKey_Key_Field) (
|
api_key_id ApiKey_Id_Field) (
|
||||||
api_key *ApiKey, err error)
|
api_key *ApiKey, err error)
|
||||||
|
|
||||||
Get_BucketUsage_By_Id(ctx context.Context,
|
Get_BucketUsage_By_Id(ctx context.Context,
|
||||||
|
@ -197,11 +197,12 @@ CREATE TABLE users (
|
|||||||
CREATE TABLE api_keys (
|
CREATE TABLE api_keys (
|
||||||
id bytea NOT NULL,
|
id bytea NOT NULL,
|
||||||
project_id bytea NOT NULL REFERENCES projects( id ) ON DELETE CASCADE,
|
project_id bytea NOT NULL REFERENCES projects( id ) ON DELETE CASCADE,
|
||||||
key bytea NOT NULL,
|
head bytea NOT NULL,
|
||||||
name text NOT NULL,
|
name text NOT NULL,
|
||||||
|
secret bytea NOT NULL,
|
||||||
created_at timestamp with time zone NOT NULL,
|
created_at timestamp with time zone NOT NULL,
|
||||||
PRIMARY KEY ( id ),
|
PRIMARY KEY ( id ),
|
||||||
UNIQUE ( key ),
|
UNIQUE ( head ),
|
||||||
UNIQUE ( name, project_id )
|
UNIQUE ( name, project_id )
|
||||||
);
|
);
|
||||||
CREATE TABLE project_members (
|
CREATE TABLE project_members (
|
||||||
|
@ -197,11 +197,12 @@ CREATE TABLE users (
|
|||||||
CREATE TABLE api_keys (
|
CREATE TABLE api_keys (
|
||||||
id BLOB NOT NULL,
|
id BLOB NOT NULL,
|
||||||
project_id BLOB NOT NULL REFERENCES projects( id ) ON DELETE CASCADE,
|
project_id BLOB NOT NULL REFERENCES projects( id ) ON DELETE CASCADE,
|
||||||
key BLOB NOT NULL,
|
head BLOB NOT NULL,
|
||||||
name TEXT NOT NULL,
|
name TEXT NOT NULL,
|
||||||
|
secret BLOB NOT NULL,
|
||||||
created_at TIMESTAMP NOT NULL,
|
created_at TIMESTAMP NOT NULL,
|
||||||
PRIMARY KEY ( id ),
|
PRIMARY KEY ( id ),
|
||||||
UNIQUE ( key ),
|
UNIQUE ( head ),
|
||||||
UNIQUE ( name, project_id )
|
UNIQUE ( name, project_id )
|
||||||
);
|
);
|
||||||
CREATE TABLE project_members (
|
CREATE TABLE project_members (
|
||||||
|
@ -147,10 +147,10 @@ type lockedAPIKeys struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Create creates and stores new APIKeyInfo
|
// Create creates and stores new APIKeyInfo
|
||||||
func (m *lockedAPIKeys) Create(ctx context.Context, key console.APIKey, info console.APIKeyInfo) (*console.APIKeyInfo, error) {
|
func (m *lockedAPIKeys) Create(ctx context.Context, head []byte, info console.APIKeyInfo) (*console.APIKeyInfo, error) {
|
||||||
m.Lock()
|
m.Lock()
|
||||||
defer m.Unlock()
|
defer m.Unlock()
|
||||||
return m.db.Create(ctx, key, info)
|
return m.db.Create(ctx, head, info)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Delete deletes APIKeyInfo from store
|
// Delete deletes APIKeyInfo from store
|
||||||
@ -167,11 +167,11 @@ func (m *lockedAPIKeys) Get(ctx context.Context, id uuid.UUID) (*console.APIKeyI
|
|||||||
return m.db.Get(ctx, id)
|
return m.db.Get(ctx, id)
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetByKey retrieves APIKeyInfo for given key
|
// GetByHead retrieves APIKeyInfo for given key head
|
||||||
func (m *lockedAPIKeys) GetByKey(ctx context.Context, key console.APIKey) (*console.APIKeyInfo, error) {
|
func (m *lockedAPIKeys) GetByHead(ctx context.Context, head []byte) (*console.APIKeyInfo, error) {
|
||||||
m.Lock()
|
m.Lock()
|
||||||
defer m.Unlock()
|
defer m.Unlock()
|
||||||
return m.db.GetByKey(ctx, key)
|
return m.db.GetByHead(ctx, head)
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetByProjectID retrieves list of APIKeys for given projectID
|
// GetByProjectID retrieves list of APIKeys for given projectID
|
||||||
|
@ -694,6 +694,24 @@ func (db *DB) PostgresMigration() *migrate.Migration {
|
|||||||
);`,
|
);`,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Description: "Drops and recreates api key table to handle macaroons and adds revocation table",
|
||||||
|
Version: 23,
|
||||||
|
Action: migrate.SQL{
|
||||||
|
`DROP TABLE api_keys CASCADE`,
|
||||||
|
`CREATE TABLE api_keys (
|
||||||
|
id bytea NOT NULL,
|
||||||
|
project_id bytea NOT NULL REFERENCES projects( id ) ON DELETE CASCADE,
|
||||||
|
head bytea NOT NULL,
|
||||||
|
name text NOT NULL,
|
||||||
|
secret bytea NOT NULL,
|
||||||
|
created_at timestamp with time zone NOT NULL,
|
||||||
|
PRIMARY KEY ( id ),
|
||||||
|
UNIQUE ( head ),
|
||||||
|
UNIQUE ( name, project_id )
|
||||||
|
);`,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -96,7 +96,7 @@ CREATE TABLE irreparabledbs (
|
|||||||
CREATE TABLE nodes (
|
CREATE TABLE nodes (
|
||||||
id bytea NOT NULL,
|
id bytea NOT NULL,
|
||||||
address text NOT NULL,
|
address text NOT NULL,
|
||||||
last_ip text NOT NULL,
|
last_ip text NOT NULL,
|
||||||
protocol integer NOT NULL,
|
protocol integer NOT NULL,
|
||||||
type integer NOT NULL,
|
type integer NOT NULL,
|
||||||
email text NOT NULL,
|
email text NOT NULL,
|
||||||
@ -252,4 +252,4 @@ INSERT INTO "reset_password_tokens" ("secret", "owner_id", "created_at") VALUES
|
|||||||
|
|
||||||
INSERT INTO "pending_audits" ("node_id", "piece_id", "stripe_index", "share_size", "expected_share_hash", "reverify_count") VALUES (E'\\153\\313\\233\\074\\327\\177\\136\\070\\346\\001'::bytea, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 5, 1024, E'\\070\\127\\144\\013\\332\\344\\102\\376\\306\\056\\303\\130\\106\\132\\321\\276\\321\\274\\170\\264\\054\\333\\221\\116\\154\\221\\335\\070\\220\\146\\344\\216'::bytea, 1);
|
INSERT INTO "pending_audits" ("node_id", "piece_id", "stripe_index", "share_size", "expected_share_hash", "reverify_count") VALUES (E'\\153\\313\\233\\074\\327\\177\\136\\070\\346\\001'::bytea, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 5, 1024, E'\\070\\127\\144\\013\\332\\344\\102\\376\\306\\056\\303\\130\\106\\132\\321\\276\\321\\274\\170\\264\\054\\333\\221\\116\\154\\221\\335\\070\\220\\146\\344\\216'::bytea, 1);
|
||||||
|
|
||||||
-- NEW DATA --
|
-- NEW DATA --
|
||||||
|
@ -270,4 +270,4 @@ INSERT INTO "pending_audits" ("node_id", "piece_id", "stripe_index", "share_size
|
|||||||
|
|
||||||
-- NEW DATA --
|
-- NEW DATA --
|
||||||
|
|
||||||
INSERT INTO "offers" ("id", "name", "description", "type", "credit_in_cents", "award_credit_duration_days", "invitee_credit_duration_days", "redeemable_cap", "expires_at", "created_at", "num_redeemed", "status") VALUES (1, 'testOffer', 'Test offer 1', 0, 1000, 14, 14, 50, '2019-03-14 08:28:24.636949+00', '2019-02-14 08:28:24.636949+00', 0, 0);
|
INSERT INTO "offers" ("id", "name", "description", "type", "credit_in_cents", "award_credit_duration_days", "invitee_credit_duration_days", "redeemable_cap", "expires_at", "created_at", "num_redeemed", "status") VALUES (1, 'testOffer', 'Test offer 1', 0, 1000, 14, 14, 50, '2019-03-14 08:28:24.636949+00', '2019-02-14 08:28:24.636949+00', 0, 0);
|
||||||
|
273
satellite/satellitedb/testdata/postgres.v23.sql
vendored
Normal file
273
satellite/satellitedb/testdata/postgres.v23.sql
vendored
Normal file
@ -0,0 +1,273 @@
|
|||||||
|
CREATE TABLE accounting_rollups (
|
||||||
|
id bigserial NOT NULL,
|
||||||
|
node_id bytea NOT NULL,
|
||||||
|
start_time timestamp with time zone NOT NULL,
|
||||||
|
put_total bigint NOT NULL,
|
||||||
|
get_total bigint NOT NULL,
|
||||||
|
get_audit_total bigint NOT NULL,
|
||||||
|
get_repair_total bigint NOT NULL,
|
||||||
|
put_repair_total bigint NOT NULL,
|
||||||
|
at_rest_total double precision NOT NULL,
|
||||||
|
PRIMARY KEY ( id )
|
||||||
|
);
|
||||||
|
CREATE TABLE accounting_timestamps (
|
||||||
|
name text NOT NULL,
|
||||||
|
value timestamp with time zone NOT NULL,
|
||||||
|
PRIMARY KEY ( name )
|
||||||
|
);
|
||||||
|
CREATE TABLE bucket_bandwidth_rollups (
|
||||||
|
bucket_name bytea NOT NULL,
|
||||||
|
project_id bytea NOT NULL,
|
||||||
|
interval_start timestamp NOT NULL,
|
||||||
|
interval_seconds integer NOT NULL,
|
||||||
|
action integer NOT NULL,
|
||||||
|
inline bigint NOT NULL,
|
||||||
|
allocated bigint NOT NULL,
|
||||||
|
settled bigint NOT NULL,
|
||||||
|
PRIMARY KEY ( bucket_name, project_id, interval_start, action )
|
||||||
|
);
|
||||||
|
CREATE TABLE bucket_storage_tallies (
|
||||||
|
bucket_name bytea NOT NULL,
|
||||||
|
project_id bytea NOT NULL,
|
||||||
|
interval_start timestamp NOT NULL,
|
||||||
|
inline bigint NOT NULL,
|
||||||
|
remote bigint NOT NULL,
|
||||||
|
remote_segments_count integer NOT NULL,
|
||||||
|
inline_segments_count integer NOT NULL,
|
||||||
|
object_count integer NOT NULL,
|
||||||
|
metadata_size bigint NOT NULL,
|
||||||
|
PRIMARY KEY ( bucket_name, project_id, interval_start )
|
||||||
|
);
|
||||||
|
CREATE TABLE bucket_usages (
|
||||||
|
id bytea NOT NULL,
|
||||||
|
bucket_id bytea NOT NULL,
|
||||||
|
rollup_end_time timestamp with time zone NOT NULL,
|
||||||
|
remote_stored_data bigint NOT NULL,
|
||||||
|
inline_stored_data bigint NOT NULL,
|
||||||
|
remote_segments integer NOT NULL,
|
||||||
|
inline_segments integer NOT NULL,
|
||||||
|
objects integer NOT NULL,
|
||||||
|
metadata_size bigint NOT NULL,
|
||||||
|
repair_egress bigint NOT NULL,
|
||||||
|
get_egress bigint NOT NULL,
|
||||||
|
audit_egress bigint NOT NULL,
|
||||||
|
PRIMARY KEY ( id )
|
||||||
|
);
|
||||||
|
CREATE TABLE bwagreements (
|
||||||
|
serialnum text NOT NULL,
|
||||||
|
storage_node_id bytea NOT NULL,
|
||||||
|
uplink_id bytea NOT NULL,
|
||||||
|
action bigint NOT NULL,
|
||||||
|
total bigint NOT NULL,
|
||||||
|
created_at timestamp with time zone NOT NULL,
|
||||||
|
expires_at timestamp with time zone NOT NULL,
|
||||||
|
PRIMARY KEY ( serialnum )
|
||||||
|
);
|
||||||
|
CREATE TABLE certRecords (
|
||||||
|
publickey bytea NOT NULL,
|
||||||
|
id bytea NOT NULL,
|
||||||
|
update_at timestamp with time zone NOT NULL,
|
||||||
|
PRIMARY KEY ( id )
|
||||||
|
);
|
||||||
|
CREATE TABLE injuredsegments (
|
||||||
|
path text NOT NULL,
|
||||||
|
data bytea NOT NULL,
|
||||||
|
attempted timestamp,
|
||||||
|
PRIMARY KEY ( path )
|
||||||
|
);
|
||||||
|
CREATE TABLE irreparabledbs (
|
||||||
|
segmentpath bytea NOT NULL,
|
||||||
|
segmentdetail bytea NOT NULL,
|
||||||
|
pieces_lost_count bigint NOT NULL,
|
||||||
|
seg_damaged_unix_sec bigint NOT NULL,
|
||||||
|
repair_attempt_count bigint NOT NULL,
|
||||||
|
PRIMARY KEY ( segmentpath )
|
||||||
|
);
|
||||||
|
CREATE TABLE nodes (
|
||||||
|
id bytea NOT NULL,
|
||||||
|
address text NOT NULL,
|
||||||
|
last_ip text NOT NULL,
|
||||||
|
protocol integer NOT NULL,
|
||||||
|
type integer NOT NULL,
|
||||||
|
email text NOT NULL,
|
||||||
|
wallet text NOT NULL,
|
||||||
|
free_bandwidth bigint NOT NULL,
|
||||||
|
free_disk bigint NOT NULL,
|
||||||
|
major bigint NOT NULL,
|
||||||
|
minor bigint NOT NULL,
|
||||||
|
patch bigint NOT NULL,
|
||||||
|
hash text NOT NULL,
|
||||||
|
timestamp timestamp with time zone NOT NULL,
|
||||||
|
release boolean NOT NULL,
|
||||||
|
latency_90 bigint NOT NULL,
|
||||||
|
audit_success_count bigint NOT NULL,
|
||||||
|
total_audit_count bigint NOT NULL,
|
||||||
|
audit_success_ratio double precision NOT NULL,
|
||||||
|
uptime_success_count bigint NOT NULL,
|
||||||
|
total_uptime_count bigint NOT NULL,
|
||||||
|
uptime_ratio double precision NOT NULL,
|
||||||
|
created_at timestamp with time zone NOT NULL,
|
||||||
|
updated_at timestamp with time zone NOT NULL,
|
||||||
|
last_contact_success timestamp with time zone NOT NULL,
|
||||||
|
last_contact_failure timestamp with time zone NOT NULL,
|
||||||
|
contained boolean NOT NULL,
|
||||||
|
PRIMARY KEY ( id )
|
||||||
|
);
|
||||||
|
CREATE TABLE offers (
|
||||||
|
id serial NOT NULL,
|
||||||
|
name text NOT NULL,
|
||||||
|
description text NOT NULL,
|
||||||
|
type integer NOT NULL,
|
||||||
|
credit_in_cents integer NOT NULL,
|
||||||
|
award_credit_duration_days integer NOT NULL,
|
||||||
|
invitee_credit_duration_days integer NOT NULL,
|
||||||
|
redeemable_cap integer NOT NULL,
|
||||||
|
num_redeemed integer NOT NULL,
|
||||||
|
expires_at timestamp with time zone,
|
||||||
|
created_at timestamp with time zone NOT NULL,
|
||||||
|
status integer NOT NULL,
|
||||||
|
PRIMARY KEY ( id )
|
||||||
|
);
|
||||||
|
CREATE TABLE pending_audits (
|
||||||
|
node_id bytea NOT NULL,
|
||||||
|
piece_id bytea NOT NULL,
|
||||||
|
stripe_index bigint NOT NULL,
|
||||||
|
share_size bigint NOT NULL,
|
||||||
|
expected_share_hash bytea NOT NULL,
|
||||||
|
reverify_count bigint NOT NULL,
|
||||||
|
PRIMARY KEY ( node_id )
|
||||||
|
);
|
||||||
|
CREATE TABLE projects (
|
||||||
|
id bytea NOT NULL,
|
||||||
|
name text NOT NULL,
|
||||||
|
description text NOT NULL,
|
||||||
|
created_at timestamp with time zone NOT NULL,
|
||||||
|
PRIMARY KEY ( id )
|
||||||
|
);
|
||||||
|
CREATE TABLE registration_tokens (
|
||||||
|
secret bytea NOT NULL,
|
||||||
|
owner_id bytea,
|
||||||
|
project_limit integer NOT NULL,
|
||||||
|
created_at timestamp with time zone NOT NULL,
|
||||||
|
PRIMARY KEY ( secret ),
|
||||||
|
UNIQUE ( owner_id )
|
||||||
|
);
|
||||||
|
CREATE TABLE reset_password_tokens (
|
||||||
|
secret bytea NOT NULL,
|
||||||
|
owner_id bytea NOT NULL,
|
||||||
|
created_at timestamp with time zone NOT NULL,
|
||||||
|
PRIMARY KEY ( secret ),
|
||||||
|
UNIQUE ( owner_id )
|
||||||
|
);
|
||||||
|
CREATE TABLE serial_numbers (
|
||||||
|
id serial NOT NULL,
|
||||||
|
serial_number bytea NOT NULL,
|
||||||
|
bucket_id bytea NOT NULL,
|
||||||
|
expires_at timestamp NOT NULL,
|
||||||
|
PRIMARY KEY ( id )
|
||||||
|
);
|
||||||
|
CREATE TABLE storagenode_bandwidth_rollups (
|
||||||
|
storagenode_id bytea NOT NULL,
|
||||||
|
interval_start timestamp NOT NULL,
|
||||||
|
interval_seconds integer NOT NULL,
|
||||||
|
action integer NOT NULL,
|
||||||
|
allocated bigint NOT NULL,
|
||||||
|
settled bigint NOT NULL,
|
||||||
|
PRIMARY KEY ( storagenode_id, interval_start, action )
|
||||||
|
);
|
||||||
|
CREATE TABLE storagenode_storage_tallies (
|
||||||
|
id bigserial NOT NULL,
|
||||||
|
node_id bytea NOT NULL,
|
||||||
|
interval_end_time timestamp with time zone NOT NULL,
|
||||||
|
data_total double precision NOT NULL,
|
||||||
|
PRIMARY KEY ( id )
|
||||||
|
);
|
||||||
|
CREATE TABLE users (
|
||||||
|
id bytea NOT NULL,
|
||||||
|
full_name text NOT NULL,
|
||||||
|
short_name text,
|
||||||
|
email text NOT NULL,
|
||||||
|
password_hash bytea NOT NULL,
|
||||||
|
status integer NOT NULL,
|
||||||
|
created_at timestamp with time zone NOT NULL,
|
||||||
|
PRIMARY KEY ( id )
|
||||||
|
);
|
||||||
|
CREATE TABLE api_keys (
|
||||||
|
id bytea NOT NULL,
|
||||||
|
project_id bytea NOT NULL REFERENCES projects( id ) ON DELETE CASCADE,
|
||||||
|
head bytea NOT NULL,
|
||||||
|
name text NOT NULL,
|
||||||
|
secret bytea NOT NULL,
|
||||||
|
created_at timestamp with time zone NOT NULL,
|
||||||
|
PRIMARY KEY ( id ),
|
||||||
|
UNIQUE ( head ),
|
||||||
|
UNIQUE ( name, project_id )
|
||||||
|
);
|
||||||
|
CREATE TABLE project_members (
|
||||||
|
member_id bytea NOT NULL REFERENCES users( id ) ON DELETE CASCADE,
|
||||||
|
project_id bytea NOT NULL REFERENCES projects( id ) ON DELETE CASCADE,
|
||||||
|
created_at timestamp with time zone NOT NULL,
|
||||||
|
PRIMARY KEY ( member_id, project_id )
|
||||||
|
);
|
||||||
|
CREATE TABLE used_serials (
|
||||||
|
serial_number_id integer NOT NULL REFERENCES serial_numbers( id ) ON DELETE CASCADE,
|
||||||
|
storage_node_id bytea NOT NULL,
|
||||||
|
PRIMARY KEY ( serial_number_id, storage_node_id )
|
||||||
|
);
|
||||||
|
CREATE INDEX bucket_name_project_id_interval_start_interval_seconds ON bucket_bandwidth_rollups ( bucket_name, project_id, interval_start, interval_seconds );
|
||||||
|
CREATE UNIQUE INDEX bucket_id_rollup ON bucket_usages ( bucket_id, rollup_end_time );
|
||||||
|
CREATE INDEX node_last_ip ON nodes ( last_ip );
|
||||||
|
CREATE UNIQUE INDEX serial_number ON serial_numbers ( serial_number );
|
||||||
|
CREATE INDEX serial_numbers_expires_at_index ON serial_numbers ( expires_at );
|
||||||
|
CREATE INDEX storagenode_id_interval_start_interval_seconds ON storagenode_bandwidth_rollups ( storagenode_id, interval_start, interval_seconds );
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
INSERT INTO "accounting_rollups"("id", "node_id", "start_time", "put_total", "get_total", "get_audit_total", "get_repair_total", "put_repair_total", "at_rest_total") VALUES (1, E'\\367M\\177\\251]t/\\022\\256\\214\\265\\025\\224\\204:\\217\\212\\0102<\\321\\374\\020&\\271Qc\\325\\261\\354\\246\\233'::bytea, '2019-02-09 00:00:00+00', 1000, 2000, 3000, 4000, 0, 5000);
|
||||||
|
|
||||||
|
INSERT INTO "accounting_timestamps" VALUES ('LastAtRestTally', '0001-01-01 00:00:00+00');
|
||||||
|
INSERT INTO "accounting_timestamps" VALUES ('LastRollup', '0001-01-01 00:00:00+00');
|
||||||
|
INSERT INTO "accounting_timestamps" VALUES ('LastBandwidthTally', '0001-01-01 00:00:00+00');
|
||||||
|
|
||||||
|
INSERT INTO "nodes"("id", "address", "last_ip", "protocol", "type", "email", "wallet", "free_bandwidth", "free_disk", "major", "minor", "patch", "hash", "timestamp", "release","latency_90", "audit_success_count", "total_audit_count", "audit_success_ratio", "uptime_success_count", "total_uptime_count", "uptime_ratio", "created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained") VALUES (E'\\153\\313\\233\\074\\327\\177\\136\\070\\346\\001', '127.0.0.1:55516', '', 0, 4, '', '', -1, -1, 0, 1, 0, '', 'epoch', false, 0, 0, 5, 0, 0, 5, 0, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false);
|
||||||
|
INSERT INTO "nodes"("id", "address", "last_ip", "protocol", "type", "email", "wallet", "free_bandwidth", "free_disk", "major", "minor", "patch", "hash", "timestamp", "release","latency_90", "audit_success_count", "total_audit_count", "audit_success_ratio", "uptime_success_count", "total_uptime_count", "uptime_ratio", "created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained") VALUES (E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n', '127.0.0.1:55518', '', 0, 4, '', '', -1, -1, 0, 1, 0, '', 'epoch', false, 0, 0, 0, 1, 3, 3, 1, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false);
|
||||||
|
INSERT INTO "nodes"("id", "address", "last_ip", "protocol", "type", "email", "wallet", "free_bandwidth", "free_disk", "major", "minor", "patch", "hash", "timestamp", "release","latency_90", "audit_success_count", "total_audit_count", "audit_success_ratio", "uptime_success_count", "total_uptime_count", "uptime_ratio", "created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014', '127.0.0.1:55517', '', 0, 4, '', '', -1, -1, 0, 1, 0, '', 'epoch', false, 0, 0, 0, 1, 0, 0, 1, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false);
|
||||||
|
|
||||||
|
INSERT INTO "projects"("id", "name", "description", "created_at") VALUES (E'\\022\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, 'ProjectName', 'projects description', '2019-02-14 08:28:24.254934+00');
|
||||||
|
|
||||||
|
INSERT INTO "users"("id", "full_name", "short_name", "email", "password_hash", "status", "created_at") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 'Noahson', 'William', '1email1@ukr.net', E'some_readable_hash'::bytea, 1, '2019-02-14 08:28:24.614594+00');
|
||||||
|
INSERT INTO "projects"("id", "name", "description", "created_at") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea, 'projName1', 'Test project 1', '2019-02-14 08:28:24.636949+00');
|
||||||
|
INSERT INTO "project_members"("member_id", "project_id", "created_at") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea, '2019-02-14 08:28:24.677953+00');
|
||||||
|
|
||||||
|
INSERT INTO "bwagreements"("serialnum", "storage_node_id", "action", "total", "created_at", "expires_at", "uplink_id") VALUES ('8fc0ceaa-984c-4d52-bcf4-b5429e1e35e812FpiifDbcJkePa12jxjDEutKrfLmwzT7sz2jfVwpYqgtM8B74c', E'\\245Z[/\\333\\022\\011\\001\\036\\003\\204\\005\\032.\\206\\333E\\261\\342\\227=y,}aRaH6\\240\\370\\000'::bytea, 1, 666, '2019-02-14 15:09:54.420181+00', '2019-02-14 16:09:54+00', E'\\253Z+\\374eFm\\245$\\036\\206\\335\\247\\263\\350x\\\\\\304+\\364\\343\\364+\\276fIJQ\\361\\014\\232\\000'::bytea);
|
||||||
|
INSERT INTO "irreparabledbs" ("segmentpath", "segmentdetail", "pieces_lost_count", "seg_damaged_unix_sec", "repair_attempt_count") VALUES ('\x49616d5365676d656e746b6579696e666f30', '\x49616d5365676d656e7464657461696c696e666f30', 10, 1550159554, 10);
|
||||||
|
|
||||||
|
INSERT INTO "injuredsegments" ("path", "data") VALUES ('0', '\x0a0130120100');
|
||||||
|
INSERT INTO "injuredsegments" ("path", "data") VALUES ('here''s/a/great/path', '\x0a136865726527732f612f67726561742f70617468120a0102030405060708090a');
|
||||||
|
INSERT INTO "injuredsegments" ("path", "data") VALUES ('yet/another/cool/path', '\x0a157965742f616e6f746865722f636f6f6c2f70617468120a0102030405060708090a');
|
||||||
|
INSERT INTO "injuredsegments" ("path", "data") VALUES ('so/many/iconic/paths/to/choose/from', '\x0a23736f2f6d616e792f69636f6e69632f70617468732f746f2f63686f6f73652f66726f6d120a0102030405060708090a');
|
||||||
|
|
||||||
|
INSERT INTO "certrecords" VALUES (E'0Y0\\023\\006\\007*\\206H\\316=\\002\\001\\006\\010*\\206H\\316=\\003\\001\\007\\003B\\000\\004\\360\\267\\227\\377\\253u\\222\\337Y\\324C:GQ\\010\\277v\\010\\315D\\271\\333\\337.\\203\\023=C\\343\\014T%6\\027\\362?\\214\\326\\017U\\334\\000\\260\\224\\260J\\221\\304\\331F\\304\\221\\236zF,\\325\\326l\\215\\306\\365\\200\\022', E'L\\301|\\200\\247}F|1\\320\\232\\037n\\335\\241\\206\\244\\242\\207\\204.\\253\\357\\326\\352\\033Dt\\202`\\022\\325', '2019-02-14 08:07:31.335028+00');
|
||||||
|
|
||||||
|
INSERT INTO "bucket_usages" ("id", "bucket_id", "rollup_end_time", "remote_stored_data", "inline_stored_data", "remote_segments", "inline_segments", "objects", "metadata_size", "repair_egress", "get_egress", "audit_egress") VALUES (E'\\153\\313\\233\\074\\327\\177\\136\\070\\346\\001",'::bytea, E'\\366\\146\\032\\321\\316\\161\\070\\133\\302\\271",'::bytea, '2019-03-06 08:28:24.677953+00', 10, 11, 12, 13, 14, 15, 16, 17, 18);
|
||||||
|
|
||||||
|
INSERT INTO "registration_tokens" ("secret", "owner_id", "project_limit", "created_at") VALUES (E'\\070\\127\\144\\013\\332\\344\\102\\376\\306\\056\\303\\130\\106\\132\\321\\276\\321\\274\\170\\264\\054\\333\\221\\116\\154\\221\\335\\070\\220\\146\\344\\216'::bytea, null, 1, '2019-02-14 08:28:24.677953+00');
|
||||||
|
|
||||||
|
INSERT INTO "serial_numbers" ("id", "serial_number", "bucket_id", "expires_at") VALUES (1, E'0123456701234567'::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014/testbucket'::bytea, '2019-03-06 08:28:24.677953+00');
|
||||||
|
INSERT INTO "used_serials" ("serial_number_id", "storage_node_id") VALUES (1, E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n');
|
||||||
|
|
||||||
|
INSERT INTO "storagenode_bandwidth_rollups" ("storagenode_id", "interval_start", "interval_seconds", "action", "allocated", "settled") VALUES (E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n', '2019-03-06 08:00:00.000000+00', 3600, 1, 1024, 2024);
|
||||||
|
INSERT INTO "storagenode_storage_tallies" VALUES (1, E'\\3510\\323\\225"~\\036<\\342\\330m\\0253Jhr\\246\\233K\\246#\\2303\\351\\256\\275j\\212UM\\362\\207', '2019-02-14 08:16:57.812849+00', 1000);
|
||||||
|
|
||||||
|
INSERT INTO "bucket_bandwidth_rollups" ("bucket_name", "project_id", "interval_start", "interval_seconds", "action", "inline", "allocated", "settled") VALUES (E'testbucket'::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea,'2019-03-06 08:00:00.000000+00', 3600, 1, 1024, 2024, 3024);
|
||||||
|
INSERT INTO "bucket_storage_tallies" ("bucket_name", "project_id", "interval_start", "inline", "remote", "remote_segments_count", "inline_segments_count", "object_count", "metadata_size") VALUES (E'testbucket'::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea,'2019-03-06 08:00:00.000000+00', 4024, 5024, 0, 0, 0, 0);
|
||||||
|
|
||||||
|
INSERT INTO "reset_password_tokens" ("secret", "owner_id", "created_at") VALUES (E'\\070\\127\\144\\013\\332\\344\\102\\376\\306\\056\\303\\130\\106\\132\\321\\276\\321\\274\\170\\264\\054\\333\\221\\116\\154\\221\\335\\070\\220\\146\\344\\216'::bytea, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, '2019-05-08 08:28:24.677953+00');
|
||||||
|
|
||||||
|
INSERT INTO "pending_audits" ("node_id", "piece_id", "stripe_index", "share_size", "expected_share_hash", "reverify_count") VALUES (E'\\153\\313\\233\\074\\327\\177\\136\\070\\346\\001'::bytea, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 5, 1024, E'\\070\\127\\144\\013\\332\\344\\102\\376\\306\\056\\303\\130\\106\\132\\321\\276\\321\\274\\170\\264\\054\\333\\221\\116\\154\\221\\335\\070\\220\\146\\344\\216'::bytea, 1);
|
||||||
|
|
||||||
|
INSERT INTO "offers" ("id", "name", "description", "type", "credit_in_cents", "award_credit_duration_days", "invitee_credit_duration_days", "redeemable_cap", "expires_at", "created_at", "num_redeemed", "status") VALUES (1, 'testOffer', 'Test offer 1', 0, 1000, 14, 14, 50, '2019-03-14 08:28:24.636949+00', '2019-02-14 08:28:24.636949+00', 0, 0);
|
||||||
|
|
||||||
|
-- NEW DATA --
|
||||||
|
|
||||||
|
INSERT INTO "api_keys"("id", "project_id", "head", "name", "secret", "created_at") VALUES (E'\\334/\\302;\\225\\355O\\323\\276f\\247\\354/6\\241\\033'::bytea, E'\\022\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, E'\\111\\142\\147\\304\\132\\375\\070\\163\\270\\160\\251\\370\\126\\063\\351\\037\\257\\071\\143\\375\\351\\320\\253\\232\\220\\260\\075\\173\\306\\307\\115\\136'::bytea, 'key 2', E'\\254\\011\\315\\333\\273\\365\\001\\071\\024\\154\\253\\332\\301\\216\\361\\074\\221\\367\\251\\231\\274\\333\\300\\367\\001\\272\\327\\111\\315\\123\\042\\016'::bytea, '2019-02-14 08:28:24.267934+00');
|
@ -1,4 +1,4 @@
|
|||||||
#!/bin/bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
CHANGES=$(grep -r --include="*.dbx.go" regexp.MustCompile .)
|
CHANGES=$(grep -r --include="*.dbx.go" regexp.MustCompile .)
|
||||||
|
|
||||||
@ -8,4 +8,4 @@ then
|
|||||||
else
|
else
|
||||||
echo "please use latest dbx tool to generate code"
|
echo "please use latest dbx tool to generate code"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#!/bin/bash
|
#!/usr/bin/env bash
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#!/bin/bash -
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
# NOTE this script MUST BE EXECUTED from the same directory where it's located
|
# NOTE this script MUST BE EXECUTED from the same directory where it's located
|
||||||
# to always obtain the same paths in the satellite configuration file.
|
# to always obtain the same paths in the satellite configuration file.
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#!/bin/bash
|
#!/usr/bin/env bash
|
||||||
set -ueo pipefail
|
set -ueo pipefail
|
||||||
|
|
||||||
#setup tmpdir for testfiles and cleanup
|
#setup tmpdir for testfiles and cleanup
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#!/bin/bash
|
#!/usr/bin/env bash
|
||||||
set -ueo pipefail
|
set -ueo pipefail
|
||||||
set +x
|
set +x
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#!/bin/bash
|
#!/usr/bin/env bash
|
||||||
set -ueo pipefail
|
set -ueo pipefail
|
||||||
|
|
||||||
TMPDIR=$(mktemp -d -t tmp.XXXXXXXXXX)
|
TMPDIR=$(mktemp -d -t tmp.XXXXXXXXXX)
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#!/bin/bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
# NOTE this script MUST BE EXECUTED from the same directory where it's located
|
# NOTE this script MUST BE EXECUTED from the same directory where it's located
|
||||||
# to always obtain the same paths in the satellite configuration file.
|
# to always obtain the same paths in the satellite configuration file.
|
||||||
|
Loading…
Reference in New Issue
Block a user