satellite/{satellitedb,attribution,console}: value attribution changes that add userAgent field to buckets table and all tables that have partner_id

Change-Id: I36a13bb651b86bfc14fe5a0a2258f719e6cd2b48
This commit is contained in:
dlamarmorgan 2021-09-22 16:38:18 -07:00 committed by Damein Morgan
parent 8a0a233537
commit 4bbf667ad1
27 changed files with 1098 additions and 217 deletions

View File

@ -646,6 +646,7 @@ func cmdValueAttribution(cmd *cobra.Command, args []string) (err error) {
if err != nil { if err != nil {
return errs.Combine(errs.New("Invalid Partner ID format. %s", args[0]), err) return errs.Combine(errs.New("Invalid Partner ID format. %s", args[0]), err)
} }
userAgent := []byte(args[0])
start, end, err := reports.ParseRange(args[1], args[2]) start, end, err := reports.ParseRange(args[1], args[2])
if err != nil { if err != nil {
@ -654,7 +655,7 @@ func cmdValueAttribution(cmd *cobra.Command, args []string) (err error) {
// send output to stdout // send output to stdout
if partnerAttribtionCfg.Output == "" { if partnerAttribtionCfg.Output == "" {
return reports.GenerateAttributionCSV(ctx, partnerAttribtionCfg.Database, partnerID, start, end, os.Stdout) return reports.GenerateAttributionCSV(ctx, partnerAttribtionCfg.Database, partnerID, userAgent, start, end, os.Stdout)
} }
// send output to file // send output to file
@ -673,7 +674,7 @@ func cmdValueAttribution(cmd *cobra.Command, args []string) (err error) {
} }
}() }()
return reports.GenerateAttributionCSV(ctx, partnerAttribtionCfg.Database, partnerID, start, end, file) return reports.GenerateAttributionCSV(ctx, partnerAttribtionCfg.Database, partnerID, userAgent, start, end, file)
} }
func cmdPrepareCustomerInvoiceRecords(cmd *cobra.Command, args []string) (err error) { func cmdPrepareCustomerInvoiceRecords(cmd *cobra.Command, args []string) (err error) {

View File

@ -29,7 +29,7 @@ var headers = []string{
} }
// GenerateAttributionCSV creates a report with. // GenerateAttributionCSV creates a report with.
func GenerateAttributionCSV(ctx context.Context, database string, partnerID uuid.UUID, start time.Time, end time.Time, output io.Writer) error { func GenerateAttributionCSV(ctx context.Context, database string, partnerID uuid.UUID, userAgent []byte, start time.Time, end time.Time, output io.Writer) error {
log := zap.L().Named("db") log := zap.L().Named("db")
db, err := satellitedb.Open(ctx, log, database, satellitedb.Options{ApplicationName: "satellite-attribution"}) db, err := satellitedb.Open(ctx, log, database, satellitedb.Options{ApplicationName: "satellite-attribution"})
if err != nil { if err != nil {
@ -42,7 +42,7 @@ func GenerateAttributionCSV(ctx context.Context, database string, partnerID uuid
} }
}() }()
rows, err := db.Attribution().QueryAttribution(ctx, partnerID, start, end) rows, err := db.Attribution().QueryAttribution(ctx, partnerID, userAgent, start, end)
if err != nil { if err != nil {
return errs.Wrap(err) return errs.Wrap(err)
} }

View File

@ -45,7 +45,7 @@ func TestProjectGet(t *testing.T) {
t.Run("OK", func(t *testing.T) { t.Run("OK", func(t *testing.T) {
link := "http://" + address.String() + "/api/projects/" + project.ID.String() link := "http://" + address.String() + "/api/projects/" + project.ID.String()
expected := fmt.Sprintf( expected := fmt.Sprintf(
`{"id":"%s","name":"%s","description":"%s","partnerId":"%s","ownerId":"%s","rateLimit":null,"burstLimit":null,"maxBuckets":null,"createdAt":"%s","memberCount":0,"storageLimit":"25.00 GB","bandwidthLimit":"25.00 GB"}`, `{"id":"%s","name":"%s","description":"%s","partnerId":"%s","userAgent":null,"ownerId":"%s","rateLimit":null,"burstLimit":null,"maxBuckets":null,"createdAt":"%s","memberCount":0,"storageLimit":"25.00 GB","bandwidthLimit":"25.00 GB"}`,
project.ID.String(), project.ID.String(),
project.Name, project.Name,
project.Description, project.Description,

View File

@ -21,12 +21,14 @@ type Info struct {
ProjectID uuid.UUID ProjectID uuid.UUID
BucketName []byte BucketName []byte
PartnerID uuid.UUID PartnerID uuid.UUID
UserAgent []byte
CreatedAt time.Time CreatedAt time.Time
} }
// CSVRow represents data from QueryAttribution without exposing dbx. // CSVRow represents data from QueryAttribution without exposing dbx.
type CSVRow struct { type CSVRow struct {
PartnerID []byte PartnerID []byte
UserAgent []byte
ProjectID []byte ProjectID []byte
BucketName []byte BucketName []byte
TotalBytesPerHour float64 TotalBytesPerHour float64
@ -42,5 +44,5 @@ type DB interface {
// Insert creates and stores new Info // Insert creates and stores new Info
Insert(ctx context.Context, info *Info) (*Info, error) Insert(ctx context.Context, info *Info) (*Info, error)
// QueryAttribution queries partner bucket attribution data // QueryAttribution queries partner bucket attribution data
QueryAttribution(ctx context.Context, partnerID uuid.UUID, start time.Time, end time.Time) ([]*CSVRow, error) QueryAttribution(ctx context.Context, partnerID uuid.UUID, userAgent []byte, start time.Time, end time.Time) ([]*CSVRow, error)
} }

View File

@ -29,6 +29,7 @@ const (
type AttributionTestData struct { type AttributionTestData struct {
name string name string
partnerID uuid.UUID partnerID uuid.UUID
userAgent []byte
projectID uuid.UUID projectID uuid.UUID
bucketName []byte bucketName []byte
bucketID []byte bucketID []byte
@ -64,12 +65,13 @@ func TestDB(t *testing.T) {
attributionDB := db.Attribution() attributionDB := db.Attribution()
project1, project2 := testrand.UUID(), testrand.UUID() project1, project2 := testrand.UUID(), testrand.UUID()
partner1, partner2 := testrand.UUID(), testrand.UUID() partner1, partner2 := testrand.UUID(), testrand.UUID()
agent1, agent2 := []byte("agent1"), []byte("agent2")
infos := []*attribution.Info{ infos := []*attribution.Info{
{project1, []byte("alpha"), partner1, time.Time{}}, {project1, []byte("alpha"), partner1, agent1, time.Time{}},
{project1, []byte("beta"), partner2, time.Time{}}, {project1, []byte("beta"), partner2, agent2, time.Time{}},
{project2, []byte("alpha"), partner2, time.Time{}}, {project2, []byte("alpha"), partner2, agent2, time.Time{}},
{project2, []byte("beta"), partner1, time.Time{}}, {project2, []byte("beta"), partner1, agent1, time.Time{}},
} }
for _, info := range infos { for _, info := range infos {
@ -84,6 +86,7 @@ func TestDB(t *testing.T) {
got, err := attributionDB.Get(ctx, info.ProjectID, info.BucketName) got, err := attributionDB.Get(ctx, info.ProjectID, info.BucketName)
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, info.PartnerID, got.PartnerID) assert.Equal(t, info.PartnerID, got.PartnerID)
assert.Equal(t, info.UserAgent, got.UserAgent)
} }
}) })
} }
@ -94,12 +97,14 @@ func TestQueryAttribution(t *testing.T) {
projectID := testrand.UUID() projectID := testrand.UUID()
partnerID := testrand.UUID() partnerID := testrand.UUID()
userAgent := []byte("agent1")
alphaBucket := []byte("alpha") alphaBucket := []byte("alpha")
betaBucket := []byte("beta") betaBucket := []byte("beta")
testData := []AttributionTestData{ testData := []AttributionTestData{
{ {
name: "new partnerID, projectID, alpha", name: "new partnerID, userAgent, projectID, alpha",
partnerID: testrand.UUID(), partnerID: testrand.UUID(),
userAgent: []byte("agent2"),
projectID: projectID, projectID: projectID,
bucketName: alphaBucket, bucketName: alphaBucket,
@ -112,8 +117,9 @@ func TestQueryAttribution(t *testing.T) {
padding: 2, padding: 2,
}, },
{ {
name: "partnerID, new projectID, alpha", name: "partnerID, userAgent, new projectID, alpha",
partnerID: partnerID, partnerID: partnerID,
userAgent: userAgent,
projectID: testrand.UUID(), projectID: testrand.UUID(),
bucketName: alphaBucket, bucketName: alphaBucket,
@ -126,8 +132,9 @@ func TestQueryAttribution(t *testing.T) {
padding: 2, padding: 2,
}, },
{ {
name: "new partnerID, projectID, beta", name: "new partnerID, userAgent, projectID, beta",
partnerID: testrand.UUID(), partnerID: testrand.UUID(),
userAgent: []byte("agent3"),
projectID: projectID, projectID: projectID,
bucketName: betaBucket, bucketName: betaBucket,
@ -140,8 +147,9 @@ func TestQueryAttribution(t *testing.T) {
padding: 2, padding: 2,
}, },
{ {
name: "partnerID, new projectID, beta", name: "partnerID, userAgent new projectID, beta",
partnerID: partnerID, partnerID: partnerID,
userAgent: userAgent,
projectID: testrand.UUID(), projectID: testrand.UUID(),
bucketName: betaBucket, bucketName: betaBucket,
@ -157,7 +165,7 @@ func TestQueryAttribution(t *testing.T) {
for _, td := range testData { for _, td := range testData {
td := td td := td
td.init() td.init()
info := attribution.Info{td.projectID, td.bucketName, td.partnerID, time.Time{}} info := attribution.Info{td.projectID, td.bucketName, td.partnerID, td.userAgent, time.Time{}}
_, err := db.Attribution().Insert(ctx, &info) _, err := db.Attribution().Insert(ctx, &info)
require.NoError(t, err) require.NoError(t, err)
@ -171,7 +179,7 @@ func TestQueryAttribution(t *testing.T) {
} }
func verifyData(ctx *testcontext.Context, t *testing.T, attributionDB attribution.DB, testData *AttributionTestData) { func verifyData(ctx *testcontext.Context, t *testing.T, attributionDB attribution.DB, testData *AttributionTestData) {
results, err := attributionDB.QueryAttribution(ctx, testData.partnerID, testData.start, testData.end) results, err := attributionDB.QueryAttribution(ctx, testData.partnerID, testData.userAgent, testData.start, testData.end)
require.NoError(t, err) require.NoError(t, err)
require.NotEqual(t, 0, len(results), "Results must not be empty.") require.NotEqual(t, 0, len(results), "Results must not be empty.")
count := 0 count := 0
@ -184,6 +192,7 @@ func verifyData(ctx *testcontext.Context, t *testing.T, attributionDB attributio
count++ count++
assert.Equal(t, testData.partnerID[:], r.PartnerID, testData.name) assert.Equal(t, testData.partnerID[:], r.PartnerID, testData.name)
assert.Equal(t, testData.userAgent, r.UserAgent, testData.name)
assert.Equal(t, testData.projectID[:], r.ProjectID, testData.name) assert.Equal(t, testData.projectID[:], r.ProjectID, testData.name)
assert.Equal(t, testData.bucketName, r.BucketName, testData.name) assert.Equal(t, testData.bucketName, r.BucketName, testData.name)
assert.Equal(t, float64(testData.expectedTotalBytes/testData.hours), r.TotalBytesPerHour, testData.name) assert.Equal(t, float64(testData.expectedTotalBytes/testData.hours), r.TotalBytesPerHour, testData.name)

View File

@ -35,6 +35,7 @@ type APIKeyInfo struct {
ID uuid.UUID `json:"id"` ID uuid.UUID `json:"id"`
ProjectID uuid.UUID `json:"projectId"` ProjectID uuid.UUID `json:"projectId"`
PartnerID uuid.UUID `json:"partnerId"` PartnerID uuid.UUID `json:"partnerId"`
UserAgent []byte `json:"userAgent"`
Name string `json:"name"` Name string `json:"name"`
Secret []byte `json:"-"` Secret []byte `json:"-"`
CreatedAt time.Time `json:"createdAt"` CreatedAt time.Time `json:"createdAt"`

View File

@ -24,7 +24,7 @@ func TestUsers(t *testing.T) {
// create user // create user
userPassHash := testrand.Bytes(8) userPassHash := testrand.Bytes(8)
// create an user with partnerID // create a user with partnerID
_, err := consoleDB.Users().Insert(ctx, &console.User{ _, err := consoleDB.Users().Insert(ctx, &console.User{
ID: testrand.UUID(), ID: testrand.UUID(),
FullName: "John Doe", FullName: "John Doe",

View File

@ -136,6 +136,7 @@ func (a *Auth) Register(w http.ResponseWriter, r *http.Request) {
Email string `json:"email"` Email string `json:"email"`
Partner string `json:"partner"` Partner string `json:"partner"`
PartnerID string `json:"partnerId"` PartnerID string `json:"partnerId"`
UserAgent []byte `json:"userAgent"`
Password string `json:"password"` Password string `json:"password"`
SecretInput string `json:"secret"` SecretInput string `json:"secret"`
ReferrerUserID string `json:"referrerUserId"` ReferrerUserID string `json:"referrerUserId"`
@ -160,12 +161,7 @@ func (a *Auth) Register(w http.ResponseWriter, r *http.Request) {
} }
if registerData.Partner != "" { if registerData.Partner != "" {
info, err := a.partners.ByName(ctx, registerData.Partner) registerData.UserAgent = []byte(registerData.Partner)
if err != nil {
a.log.Warn("Invalid partner name", zap.String("Partner name", registerData.Partner), zap.String("User email", registerData.Email), zap.Error(err))
} else {
registerData.PartnerID = info.ID
}
} }
ip, err := web.GetRequestIP(r) ip, err := web.GetRequestIP(r)
@ -180,6 +176,7 @@ func (a *Auth) Register(w http.ResponseWriter, r *http.Request) {
ShortName: registerData.ShortName, ShortName: registerData.ShortName,
Email: registerData.Email, Email: registerData.Email,
PartnerID: registerData.PartnerID, PartnerID: registerData.PartnerID,
UserAgent: registerData.UserAgent,
Password: registerData.Password, Password: registerData.Password,
IsProfessional: registerData.IsProfessional, IsProfessional: registerData.IsProfessional,
Position: registerData.Position, Position: registerData.Position,
@ -286,6 +283,7 @@ func (a *Auth) GetAccount(w http.ResponseWriter, r *http.Request) {
ShortName string `json:"shortName"` ShortName string `json:"shortName"`
Email string `json:"email"` Email string `json:"email"`
PartnerID uuid.UUID `json:"partnerId"` PartnerID uuid.UUID `json:"partnerId"`
UserAgent []byte `json:"userAgent"`
ProjectLimit int `json:"projectLimit"` ProjectLimit int `json:"projectLimit"`
IsProfessional bool `json:"isProfessional"` IsProfessional bool `json:"isProfessional"`
Position string `json:"position"` Position string `json:"position"`
@ -308,6 +306,7 @@ func (a *Auth) GetAccount(w http.ResponseWriter, r *http.Request) {
user.Email = auth.User.Email user.Email = auth.User.Email
user.ID = auth.User.ID user.ID = auth.User.ID
user.PartnerID = auth.User.PartnerID user.PartnerID = auth.User.PartnerID
user.UserAgent = auth.User.UserAgent
user.ProjectLimit = auth.User.ProjectLimit user.ProjectLimit = auth.User.ProjectLimit
user.IsProfessional = auth.User.IsProfessional user.IsProfessional = auth.User.IsProfessional
user.CompanyName = auth.User.CompanyName user.CompanyName = auth.User.CompanyName

View File

@ -58,7 +58,7 @@ func TestAuth_Register(t *testing.T) {
ShortName string `json:"shortName"` ShortName string `json:"shortName"`
Email string `json:"email"` Email string `json:"email"`
Partner string `json:"partner"` Partner string `json:"partner"`
PartnerID string `json:"partnerId"` UserAgent []byte `json:"userAgent"`
Password string `json:"password"` Password string `json:"password"`
SecretInput string `json:"secret"` SecretInput string `json:"secret"`
ReferrerUserID string `json:"referrerUserId"` ReferrerUserID string `json:"referrerUserId"`
@ -103,14 +103,7 @@ func TestAuth_Register(t *testing.T) {
user, err := planet.Satellites[0].API.Console.Service.GetUser(ctx, userID) user, err := planet.Satellites[0].API.Console.Service.GetUser(ctx, userID)
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, []byte(test.Partner), user.UserAgent)
if test.ValidPartner {
info, err := planet.Satellites[0].API.Marketing.PartnersService.ByName(ctx, test.Partner)
require.NoError(t, err)
require.Equal(t, info.UUID, user.PartnerID)
} else {
require.Equal(t, uuid.UUID{}, user.PartnerID)
}
}() }()
} }
}) })

View File

@ -125,7 +125,7 @@ func TestGraphqlMutation(t *testing.T) {
FullName: "John Roll", FullName: "John Roll",
ShortName: "Roll", ShortName: "Roll",
Email: "test@mail.test", Email: "test@mail.test",
PartnerID: "120bf202-8252-437e-ac12-0e364bee852e", UserAgent: []byte("120bf202-8252-437e-ac12-0e364bee852e"),
Password: "123a123", Password: "123a123",
} }
@ -134,7 +134,7 @@ func TestGraphqlMutation(t *testing.T) {
rootUser, err := service.CreateUser(ctx, createUser, regToken.Secret) rootUser, err := service.CreateUser(ctx, createUser, regToken.Secret)
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, createUser.PartnerID, rootUser.PartnerID.String()) require.Equal(t, createUser.UserAgent, rootUser.UserAgent)
err = paymentsService.Accounts().Setup(ctx, rootUser.ID, rootUser.Email) err = paymentsService.Accounts().Setup(ctx, rootUser.ID, rootUser.Email)
require.NoError(t, err) require.NoError(t, err)

View File

@ -74,6 +74,7 @@ type Project struct {
Name string `json:"name"` Name string `json:"name"`
Description string `json:"description"` Description string `json:"description"`
PartnerID uuid.UUID `json:"partnerId"` PartnerID uuid.UUID `json:"partnerId"`
UserAgent []byte `json:"userAgent"`
OwnerID uuid.UUID `json:"ownerId"` OwnerID uuid.UUID `json:"ownerId"`
RateLimit *int `json:"rateLimit"` RateLimit *int `json:"rateLimit"`
BurstLimit *int `json:"burstLimit"` BurstLimit *int `json:"burstLimit"`

View File

@ -622,11 +622,8 @@ func (s *Service) CreateUser(ctx context.Context, user CreateUser, tokenSecret R
HaveSalesContact: user.HaveSalesContact, HaveSalesContact: user.HaveSalesContact,
} }
if user.PartnerID != "" { if user.UserAgent != nil {
newUser.PartnerID, err = uuid.FromString(user.PartnerID) newUser.UserAgent = user.UserAgent
if err != nil {
return Error.Wrap(err)
}
} }
if registrationToken != nil { if registrationToken != nil {
@ -1111,6 +1108,7 @@ func (s *Service) CreateProject(ctx context.Context, projectInfo ProjectInfo) (p
Name: projectInfo.Name, Name: projectInfo.Name,
OwnerID: auth.User.ID, OwnerID: auth.User.ID,
PartnerID: auth.User.PartnerID, PartnerID: auth.User.PartnerID,
UserAgent: auth.User.UserAgent,
StorageLimit: &storageLimit, StorageLimit: &storageLimit,
BandwidthLimit: &bandwidthLimit, BandwidthLimit: &bandwidthLimit,
}, },
@ -1392,6 +1390,7 @@ func (s *Service) CreateAPIKey(ctx context.Context, projectID uuid.UUID, name st
ProjectID: projectID, ProjectID: projectID,
Secret: secret, Secret: secret,
PartnerID: auth.User.PartnerID, PartnerID: auth.User.PartnerID,
UserAgent: auth.User.UserAgent,
} }
info, err := s.store.APIKeys().Create(ctx, key.Head(), apikey) info, err := s.store.APIKeys().Create(ctx, key.Head(), apikey)

View File

@ -55,6 +55,7 @@ type CreateUser struct {
ShortName string `json:"shortName"` ShortName string `json:"shortName"`
Email string `json:"email"` Email string `json:"email"`
PartnerID string `json:"partnerId"` PartnerID string `json:"partnerId"`
UserAgent []byte `json:"userAgent"`
Password string `json:"password"` Password string `json:"password"`
IsProfessional bool `json:"isProfessional"` IsProfessional bool `json:"isProfessional"`
Position string `json:"position"` Position string `json:"position"`
@ -119,6 +120,7 @@ type User struct {
Status UserStatus `json:"status"` Status UserStatus `json:"status"`
PartnerID uuid.UUID `json:"partnerId"` PartnerID uuid.UUID `json:"partnerId"`
UserAgent []byte `json:"userAgent"`
CreatedAt time.Time `json:"createdAt"` CreatedAt time.Time `json:"createdAt"`

View File

@ -29,7 +29,7 @@ func (endpoint *Endpoint) ensureAttribution(ctx context.Context, header *pb.Requ
if header == nil { if header == nil {
return rpcstatus.Error(rpcstatus.InvalidArgument, "header is nil") return rpcstatus.Error(rpcstatus.InvalidArgument, "header is nil")
} }
if len(header.UserAgent) == 0 && keyInfo.PartnerID.IsZero() { if len(header.UserAgent) == 0 && keyInfo.PartnerID.IsZero() && keyInfo.UserAgent == nil {
return nil return nil
} }
@ -43,19 +43,18 @@ func (endpoint *Endpoint) ensureAttribution(ctx context.Context, header *pb.Requ
} }
} }
var err error
partnerID := keyInfo.PartnerID partnerID := keyInfo.PartnerID
if partnerID.IsZero() { userAgent := keyInfo.UserAgent
partnerID, err = endpoint.ResolvePartnerID(ctx, header) // first check keyInfo (user) attribution
if err != nil { if partnerID.IsZero() && userAgent == nil {
return err // otherwise, use header (partner tool) as attribution
} userAgent = header.UserAgent
if partnerID.IsZero() { if userAgent == nil {
return nil return nil
} }
} }
err = endpoint.tryUpdateBucketAttribution(ctx, header, keyInfo.ProjectID, bucketName, partnerID) err := endpoint.tryUpdateBucketAttribution(ctx, header, keyInfo.ProjectID, bucketName, partnerID, userAgent)
if errs2.IsRPC(err, rpcstatus.NotFound) || errs2.IsRPC(err, rpcstatus.AlreadyExists) { if errs2.IsRPC(err, rpcstatus.NotFound) || errs2.IsRPC(err, rpcstatus.AlreadyExists) {
return nil return nil
} }
@ -116,7 +115,7 @@ func removeUplinkUserAgent(entries []useragent.Entry) []useragent.Entry {
return xs return xs
} }
func (endpoint *Endpoint) tryUpdateBucketAttribution(ctx context.Context, header *pb.RequestHeader, projectID uuid.UUID, bucketName []byte, partnerID uuid.UUID) error { func (endpoint *Endpoint) tryUpdateBucketAttribution(ctx context.Context, header *pb.RequestHeader, projectID uuid.UUID, bucketName []byte, partnerID uuid.UUID, userAgent []byte) error {
if header == nil { if header == nil {
return rpcstatus.Error(rpcstatus.InvalidArgument, "header is nil") return rpcstatus.Error(rpcstatus.InvalidArgument, "header is nil")
} }
@ -151,12 +150,13 @@ func (endpoint *Endpoint) tryUpdateBucketAttribution(ctx context.Context, header
endpoint.log.Error("error while getting bucket", zap.ByteString("bucketName", bucketName), zap.Error(err)) endpoint.log.Error("error while getting bucket", zap.ByteString("bucketName", bucketName), zap.Error(err))
return rpcstatus.Error(rpcstatus.Internal, "unable to set bucket attribution") return rpcstatus.Error(rpcstatus.Internal, "unable to set bucket attribution")
} }
if !bucket.PartnerID.IsZero() { if !bucket.PartnerID.IsZero() || bucket.UserAgent != nil {
return rpcstatus.Errorf(rpcstatus.AlreadyExists, "bucket %q already has attribution, PartnerID %q cannot be attributed", bucketName, partnerID) return rpcstatus.Errorf(rpcstatus.AlreadyExists, "bucket %q already has attribution, PartnerID %q cannot be attributed", bucketName, partnerID)
} }
// update bucket information // update bucket information
bucket.PartnerID = partnerID bucket.PartnerID = partnerID
bucket.UserAgent = userAgent
_, err = endpoint.buckets.UpdateBucket(ctx, bucket) _, err = endpoint.buckets.UpdateBucket(ctx, bucket)
if err != nil { if err != nil {
endpoint.log.Error("error while updating bucket", zap.ByteString("bucketName", bucketName), zap.Error(err)) endpoint.log.Error("error while updating bucket", zap.ByteString("bucketName", bucketName), zap.Error(err))
@ -168,6 +168,7 @@ func (endpoint *Endpoint) tryUpdateBucketAttribution(ctx context.Context, header
ProjectID: projectID, ProjectID: projectID,
BucketName: bucketName, BucketName: bucketName,
PartnerID: partnerID, PartnerID: partnerID,
UserAgent: userAgent,
}) })
if err != nil { if err != nil {
endpoint.log.Error("error while inserting attribution to DB", zap.Error(err)) endpoint.log.Error("error while inserting attribution to DB", zap.Error(err))

View File

@ -82,31 +82,25 @@ func TestBucketAttribution(t *testing.T) {
UplinkCount: 1, UplinkCount: 1,
}, func(t *testing.T, ctx *testcontext.Context, planet *testplanet.Planet) { }, func(t *testing.T, ctx *testcontext.Context, planet *testplanet.Planet) {
for i, tt := range []struct { for i, tt := range []struct {
signupPartner string signupPartner []byte
userAgent string userAgent []byte
expectedAttribution string expectedAttribution []byte
}{ }{
{signupPartner: "", userAgent: "", expectedAttribution: ""}, {signupPartner: nil, userAgent: nil, expectedAttribution: nil},
{signupPartner: "Minio", userAgent: "", expectedAttribution: "Minio"}, {signupPartner: []byte("Minio"), userAgent: nil, expectedAttribution: []byte("Minio")},
{signupPartner: "Minio", userAgent: "Minio", expectedAttribution: "Minio"}, {signupPartner: []byte("Minio"), userAgent: []byte("Minio"), expectedAttribution: []byte("Minio")},
{signupPartner: "Minio", userAgent: "Zenko", expectedAttribution: "Minio"}, {signupPartner: []byte("Minio"), userAgent: []byte("Zenko"), expectedAttribution: []byte("Minio")},
{signupPartner: "", userAgent: "Zenko", expectedAttribution: "Zenko"}, {signupPartner: nil, userAgent: []byte("Zenko"), expectedAttribution: []byte("Zenko")},
} { } {
errTag := fmt.Sprintf("%d. %+v", i, tt) errTag := fmt.Sprintf("%d. %+v", i, tt)
satellite := planet.Satellites[0] satellite := planet.Satellites[0]
var signupPartnerID string
if tt.signupPartner != "" {
partner, err := satellite.API.Marketing.PartnersService.ByName(ctx, tt.signupPartner)
require.NoError(t, err, errTag)
signupPartnerID = partner.ID
}
user, err := satellite.AddUser(ctx, console.CreateUser{ user, err := satellite.AddUser(ctx, console.CreateUser{
FullName: "Test User " + strconv.Itoa(i), FullName: "Test User " + strconv.Itoa(i),
Email: "user@test" + strconv.Itoa(i), Email: "user@test" + strconv.Itoa(i),
PartnerID: signupPartnerID, PartnerID: "",
UserAgent: tt.signupPartner,
}, 1) }, 1)
require.NoError(t, err, errTag) require.NoError(t, err, errTag)
@ -120,7 +114,7 @@ func TestBucketAttribution(t *testing.T) {
require.NoError(t, err, errTag) require.NoError(t, err, errTag)
config := uplink.Config{ config := uplink.Config{
UserAgent: tt.userAgent, UserAgent: string(tt.userAgent),
} }
access, err := config.RequestAccessWithPassphrase(ctx, satellite.NodeURL().String(), apiKeyInfo.Serialize(), "mypassphrase") access, err := config.RequestAccessWithPassphrase(ctx, satellite.NodeURL().String(), apiKeyInfo.Serialize(), "mypassphrase")
require.NoError(t, err, errTag) require.NoError(t, err, errTag)
@ -131,23 +125,16 @@ func TestBucketAttribution(t *testing.T) {
_, err = project.CreateBucket(ctx, "bucket") _, err = project.CreateBucket(ctx, "bucket")
require.NoError(t, err, errTag) require.NoError(t, err, errTag)
var expectedPartnerID uuid.UUID
if tt.expectedAttribution != "" {
expectedPartner, err := planet.Satellites[0].API.Marketing.PartnersService.ByName(ctx, tt.expectedAttribution)
require.NoError(t, err, errTag)
expectedPartnerID = expectedPartner.UUID
}
bucketInfo, err := satellite.DB.Buckets().GetBucket(ctx, []byte("bucket"), satProject.ID) bucketInfo, err := satellite.DB.Buckets().GetBucket(ctx, []byte("bucket"), satProject.ID)
require.NoError(t, err, errTag) require.NoError(t, err, errTag)
assert.Equal(t, expectedPartnerID, bucketInfo.PartnerID, errTag) assert.Equal(t, tt.expectedAttribution, bucketInfo.UserAgent, errTag)
attributionInfo, err := planet.Satellites[0].DB.Attribution().Get(ctx, satProject.ID, []byte("bucket")) attributionInfo, err := planet.Satellites[0].DB.Attribution().Get(ctx, satProject.ID, []byte("bucket"))
if tt.expectedAttribution == "" { if tt.expectedAttribution == nil {
assert.True(t, attribution.ErrBucketNotAttributed.Has(err), errTag) assert.True(t, attribution.ErrBucketNotAttributed.Has(err), errTag)
} else { } else {
require.NoError(t, err, errTag) require.NoError(t, err, errTag)
assert.Equal(t, expectedPartnerID, attributionInfo.PartnerID, errTag) assert.Equal(t, tt.expectedAttribution, attributionInfo.UserAgent, errTag)
} }
} }
}) })
@ -168,13 +155,13 @@ func TestQueryAttribution(t *testing.T) {
now := time.Now() now := time.Now()
tomorrow := now.Add(24 * time.Hour) tomorrow := now.Add(24 * time.Hour)
partner, err := satellite.API.Marketing.PartnersService.ByName(ctx, "Minio") userAgent := "Minio"
require.NoError(t, err)
user, err := satellite.AddUser(ctx, console.CreateUser{ user, err := satellite.AddUser(ctx, console.CreateUser{
FullName: "user@test", FullName: "user@test",
Email: "user@test", Email: "user@test",
PartnerID: partner.ID, PartnerID: "",
UserAgent: []byte(userAgent),
}, 1) }, 1)
require.NoError(t, err) require.NoError(t, err)
@ -240,10 +227,12 @@ func TestQueryAttribution(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
require.NotZero(t, usage.Egress) require.NotZero(t, usage.Egress)
partner, err := planet.Satellites[0].API.Marketing.PartnersService.ByName(ctx, "Minio") partner, _ := planet.Satellites[0].API.Marketing.PartnersService.ByName(ctx, "")
userAgent := []byte("Minio")
require.NoError(t, err) require.NoError(t, err)
rows, err := planet.Satellites[0].DB.Attribution().QueryAttribution(ctx, partner.UUID, before, after) rows, err := planet.Satellites[0].DB.Attribution().QueryAttribution(ctx, partner.UUID, userAgent, before, after)
require.NoError(t, err) require.NoError(t, err)
require.NotZero(t, rows[0].TotalBytesPerHour) require.NotZero(t, rows[0].TotalBytesPerHour)
require.Equal(t, rows[0].EgressData, usage.Egress) require.Equal(t, rows[0].EgressData, usage.Egress)
@ -314,19 +303,19 @@ func TestAttributionReport(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
require.NotZero(t, usage.Egress) require.NotZero(t, usage.Egress)
partner, err := planet.Satellites[0].API.Marketing.PartnersService.ByUserAgent(ctx, "Zenko") partner, _ := planet.Satellites[0].API.Marketing.PartnersService.ByUserAgent(ctx, "")
require.NoError(t, err) userAgent := []byte("Zenko/1.0")
rows, err := planet.Satellites[0].DB.Attribution().QueryAttribution(ctx, partner.UUID, before, after) rows, err := planet.Satellites[0].DB.Attribution().QueryAttribution(ctx, partner.UUID, userAgent, before, after)
require.NoError(t, err) require.NoError(t, err)
require.NotZero(t, rows[0].TotalBytesPerHour) require.NotZero(t, rows[0].TotalBytesPerHour)
require.Equal(t, rows[0].EgressData, usage.Egress) require.Equal(t, rows[0].EgressData, usage.Egress)
// Minio should have no attribution because bucket was created by Zenko // Minio should have no attribution because bucket was created by Zenko
partner, err = planet.Satellites[0].API.Marketing.PartnersService.ByUserAgent(ctx, "Minio") partner, _ = planet.Satellites[0].API.Marketing.PartnersService.ByUserAgent(ctx, "")
require.NoError(t, err) userAgent = []byte("Minio/1.0")
rows, err = planet.Satellites[0].DB.Attribution().QueryAttribution(ctx, partner.UUID, before, after) rows, err = planet.Satellites[0].DB.Attribution().QueryAttribution(ctx, partner.UUID, userAgent, before, after)
require.NoError(t, err) require.NoError(t, err)
require.Empty(t, rows) require.Empty(t, rows)
} }
@ -367,13 +356,8 @@ func TestBucketAttributionConcurrentUpload(t *testing.T) {
ctx.Wait() ctx.Wait()
expectedPartnerID, err := satellite.Metainfo.Endpoint.ResolvePartnerID(ctx, &pb.RequestHeader{
UserAgent: []byte("Minio"),
})
require.NoError(t, err)
attributionInfo, err := planet.Satellites[0].DB.Attribution().Get(ctx, planet.Uplinks[0].Projects[0].ID, []byte("attr-bucket")) attributionInfo, err := planet.Satellites[0].DB.Attribution().Get(ctx, planet.Uplinks[0].Projects[0].ID, []byte("attr-bucket"))
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, expectedPartnerID, attributionInfo.PartnerID) require.Equal(t, []byte(config.UserAgent), attributionInfo.UserAgent)
}) })
} }

View File

@ -176,6 +176,10 @@ func (keys *apikeys) Create(ctx context.Context, head []byte, info console.APIKe
optional.PartnerId = dbx.ApiKey_PartnerId(info.PartnerID[:]) optional.PartnerId = dbx.ApiKey_PartnerId(info.PartnerID[:])
} }
if info.UserAgent != nil {
optional.UserAgent = dbx.ApiKey_UserAgent(info.UserAgent)
}
dbKey, err := keys.methods.Create_ApiKey( dbKey, err := keys.methods.Create_ApiKey(
ctx, ctx,
dbx.ApiKey_Id(id[:]), dbx.ApiKey_Id(id[:]),
@ -240,6 +244,10 @@ func fromDBXAPIKey(ctx context.Context, key *dbx.ApiKey) (_ *console.APIKeyInfo,
} }
} }
if key.UserAgent != nil {
result.UserAgent = key.UserAgent
}
return result, nil return result, nil
} }

View File

@ -22,6 +22,7 @@ const (
-- Should be 1 row per project/bucket by partner within the timeframe specified -- Should be 1 row per project/bucket by partner within the timeframe specified
SELECT SELECT
o.partner_id as partner_id, o.partner_id as partner_id,
o.user_agent as user_agent,
o.project_id as project_id, o.project_id as project_id,
o.bucket_name as bucket_name, o.bucket_name as bucket_name,
SUM(o.total) / SUM(o.hours) as total, SUM(o.total) / SUM(o.hours) as total,
@ -34,6 +35,7 @@ const (
-- Hours are used to calculate byte hours above -- Hours are used to calculate byte hours above
SELECT SELECT
bsti.partner_id as partner_id, bsti.partner_id as partner_id,
bsti.user_agent as user_agent,
bsto.project_id as project_id, bsto.project_id as project_id,
bsto.bucket_name as bucket_name, bsto.bucket_name as bucket_name,
SUM(bsto.total_bytes) as total, SUM(bsto.total_bytes) as total,
@ -47,6 +49,7 @@ const (
-- If there are more than 1 records within the hour, only the latest will be considered -- If there are more than 1 records within the hour, only the latest will be considered
SELECT SELECT
va.partner_id, va.partner_id,
va.user_agent,
date_trunc('hour', bst.interval_start) as hours, date_trunc('hour', bst.interval_start) as hours,
bst.project_id, bst.project_id,
bst.bucket_name, bst.bucket_name,
@ -59,10 +62,12 @@ const (
) )
WHERE WHERE
va.partner_id = ? va.partner_id = ?
AND va.user_agent = ?
AND bst.interval_start >= ? AND bst.interval_start >= ?
AND bst.interval_start < ? AND bst.interval_start < ?
GROUP BY GROUP BY
va.partner_id, va.partner_id,
va.user_agent,
bst.project_id, bst.project_id,
bst.bucket_name, bst.bucket_name,
date_trunc('hour', bst.interval_start) date_trunc('hour', bst.interval_start)
@ -76,12 +81,14 @@ const (
) )
GROUP BY GROUP BY
bsti.partner_id, bsti.partner_id,
bsti.user_agent,
bsto.project_id, bsto.project_id,
bsto.bucket_name bsto.bucket_name
UNION UNION
-- SUM the bandwidth for the timeframe specified grouping by the partner_id, project_id, and bucket_name -- SUM the bandwidth for the timeframe specified grouping by the partner_id, user_agent, project_id, and bucket_name
SELECT SELECT
va.partner_id as partner_id, va.partner_id as partner_id,
va.user_agent as user_agent,
bbr.project_id as project_id, bbr.project_id as project_id,
bbr.bucket_name as bucket_name, bbr.bucket_name as bucket_name,
0 as total, 0 as total,
@ -97,16 +104,19 @@ const (
) )
WHERE WHERE
va.partner_id = ? va.partner_id = ?
AND va.user_agent = ?
AND bbr.interval_start >= ? AND bbr.interval_start >= ?
AND bbr.interval_start < ? AND bbr.interval_start < ?
AND bbr.action = 2 AND bbr.action = 2
GROUP BY GROUP BY
va.partner_id, va.partner_id,
va.user_agent,
bbr.project_id, bbr.project_id,
bbr.bucket_name bbr.bucket_name
) AS o ) AS o
GROUP BY GROUP BY
o.partner_id, o.partner_id,
o.user_agent,
o.project_id, o.project_id,
o.bucket_name; o.bucket_name;
` `
@ -139,11 +149,11 @@ func (keys *attributionDB) Insert(ctx context.Context, info *attribution.Info) (
defer mon.Task()(&ctx)(&err) defer mon.Task()(&ctx)(&err)
err = keys.db.QueryRowContext(ctx, ` err = keys.db.QueryRowContext(ctx, `
INSERT INTO value_attributions (project_id, bucket_name, partner_id, last_updated) INSERT INTO value_attributions (project_id, bucket_name, partner_id, user_agent, last_updated)
VALUES ($1, $2, $3, now()) VALUES ($1, $2, $3, $4, now())
ON CONFLICT (project_id, bucket_name) DO NOTHING ON CONFLICT (project_id, bucket_name) DO NOTHING
RETURNING last_updated RETURNING last_updated
`, info.ProjectID[:], info.BucketName, info.PartnerID[:]).Scan(&info.CreatedAt) `, info.ProjectID[:], info.BucketName, info.PartnerID[:], info.UserAgent).Scan(&info.CreatedAt)
// TODO when sql.ErrNoRows is returned then CreatedAt is not set // TODO when sql.ErrNoRows is returned then CreatedAt is not set
if errors.Is(err, sql.ErrNoRows) { if errors.Is(err, sql.ErrNoRows) {
return info, nil return info, nil
@ -156,10 +166,10 @@ func (keys *attributionDB) Insert(ctx context.Context, info *attribution.Info) (
} }
// QueryAttribution queries partner bucket attribution data. // QueryAttribution queries partner bucket attribution data.
func (keys *attributionDB) QueryAttribution(ctx context.Context, partnerID uuid.UUID, start time.Time, end time.Time) (_ []*attribution.CSVRow, err error) { func (keys *attributionDB) QueryAttribution(ctx context.Context, partnerID uuid.UUID, userAgent []byte, start time.Time, end time.Time) (_ []*attribution.CSVRow, err error) {
defer mon.Task()(&ctx)(&err) defer mon.Task()(&ctx)(&err)
rows, err := keys.db.DB.QueryContext(ctx, keys.db.Rebind(valueAttrQuery), partnerID[:], start.UTC(), end.UTC(), partnerID[:], start.UTC(), end.UTC()) rows, err := keys.db.DB.QueryContext(ctx, keys.db.Rebind(valueAttrQuery), partnerID[:], userAgent, start.UTC(), end.UTC(), partnerID[:], userAgent, start.UTC(), end.UTC())
if err != nil { if err != nil {
return nil, Error.Wrap(err) return nil, Error.Wrap(err)
} }
@ -169,7 +179,7 @@ func (keys *attributionDB) QueryAttribution(ctx context.Context, partnerID uuid.
for rows.Next() { for rows.Next() {
r := &attribution.CSVRow{} r := &attribution.CSVRow{}
var inline, remote float64 var inline, remote float64
err := rows.Scan(&r.PartnerID, &r.ProjectID, &r.BucketName, &r.TotalBytesPerHour, &inline, &remote, &r.EgressData) err := rows.Scan(&r.PartnerID, &r.UserAgent, &r.ProjectID, &r.BucketName, &r.TotalBytesPerHour, &inline, &remote, &r.EgressData)
if err != nil { if err != nil {
return results, Error.Wrap(err) return results, Error.Wrap(err)
} }
@ -188,6 +198,10 @@ func attributionFromDBX(info *dbx.ValueAttribution) (*attribution.Info, error) {
if err != nil { if err != nil {
return nil, Error.Wrap(err) return nil, Error.Wrap(err)
} }
userAgent := info.UserAgent
if err != nil {
return nil, Error.Wrap(err)
}
projectID, err := uuid.FromBytes(info.ProjectId) projectID, err := uuid.FromBytes(info.ProjectId)
if err != nil { if err != nil {
return nil, Error.Wrap(err) return nil, Error.Wrap(err)
@ -197,6 +211,7 @@ func attributionFromDBX(info *dbx.ValueAttribution) (*attribution.Info, error) {
ProjectID: projectID, ProjectID: projectID,
BucketName: info.BucketName, BucketName: info.BucketName,
PartnerID: partnerID, PartnerID: partnerID,
UserAgent: userAgent,
CreatedAt: info.LastUpdated, CreatedAt: info.LastUpdated,
}, nil }, nil
} }

View File

@ -24,9 +24,10 @@ func (db *bucketsDB) CreateBucket(ctx context.Context, bucket storj.Bucket) (_ s
defer mon.Task()(&ctx)(&err) defer mon.Task()(&ctx)(&err)
partnerID := dbx.BucketMetainfo_Create_Fields{} partnerID := dbx.BucketMetainfo_Create_Fields{}
if !bucket.PartnerID.IsZero() { if !bucket.PartnerID.IsZero() || bucket.UserAgent != nil {
partnerID = dbx.BucketMetainfo_Create_Fields{ partnerID = dbx.BucketMetainfo_Create_Fields{
PartnerId: dbx.BucketMetainfo_PartnerId(bucket.PartnerID[:]), PartnerId: dbx.BucketMetainfo_PartnerId(bucket.PartnerID[:]),
UserAgent: dbx.BucketMetainfo_UserAgent(bucket.UserAgent),
} }
} }
@ -109,12 +110,18 @@ func (db *bucketsDB) GetBucketID(ctx context.Context, bucket metabase.BucketLoca
func (db *bucketsDB) UpdateBucket(ctx context.Context, bucket storj.Bucket) (_ storj.Bucket, err error) { func (db *bucketsDB) UpdateBucket(ctx context.Context, bucket storj.Bucket) (_ storj.Bucket, err error) {
defer mon.Task()(&ctx)(&err) defer mon.Task()(&ctx)(&err)
if bucket.PartnerID.IsZero() { if bucket.PartnerID.IsZero() && bucket.UserAgent == nil {
return storj.Bucket{}, Error.New("partnerId is zero") return storj.Bucket{}, Error.New("no partner ID or user agent found")
} }
var updateFields dbx.BucketMetainfo_Update_Fields var updateFields dbx.BucketMetainfo_Update_Fields
if !bucket.PartnerID.IsZero() {
updateFields.PartnerId = dbx.BucketMetainfo_PartnerId(bucket.PartnerID[:]) updateFields.PartnerId = dbx.BucketMetainfo_PartnerId(bucket.PartnerID[:])
}
if bucket.UserAgent != nil {
updateFields.UserAgent = dbx.BucketMetainfo_UserAgent(bucket.UserAgent)
}
dbxBucket, err := db.db.Update_BucketMetainfo_By_ProjectId_And_Name(ctx, dbx.BucketMetainfo_ProjectId(bucket.ProjectID[:]), dbx.BucketMetainfo_Name([]byte(bucket.Name)), updateFields) dbxBucket, err := db.db.Update_BucketMetainfo_By_ProjectId_And_Name(ctx, dbx.BucketMetainfo_ProjectId(bucket.ProjectID[:]), dbx.BucketMetainfo_Name([]byte(bucket.Name)), updateFields)
if err != nil { if err != nil {
@ -264,5 +271,9 @@ func convertDBXtoBucket(dbxBucket *dbx.BucketMetainfo) (bucket storj.Bucket, err
bucket.PartnerID = partnerID bucket.PartnerID = partnerID
} }
if dbxBucket.UserAgent != nil {
bucket.UserAgent = dbxBucket.UserAgent
}
return bucket, nil return bucket, nil
} }

View File

@ -7,6 +7,7 @@ model value_attribution (
field project_id blob field project_id blob
field bucket_name blob field bucket_name blob
field partner_id blob field partner_id blob
field user_agent blob ( nullable )
field last_updated timestamp ( autoinsert, autoupdate ) field last_updated timestamp ( autoinsert, autoupdate )
) )
@ -283,6 +284,7 @@ model user (
field status int ( updatable, autoinsert ) field status int ( updatable, autoinsert )
field partner_id blob ( nullable ) field partner_id blob ( nullable )
field user_agent blob ( nullable )
field created_at timestamp ( autoinsert ) field created_at timestamp ( autoinsert )
field project_limit int ( updatable, default 0 ) field project_limit int ( updatable, default 0 )
field paid_tier bool ( updatable, default false ) field paid_tier bool ( updatable, default false )
@ -331,6 +333,7 @@ model project (
field burst_limit int ( nullable, updatable ) field burst_limit int ( nullable, updatable )
field max_buckets int ( nullable, updatable ) field max_buckets int ( nullable, updatable )
field partner_id blob ( nullable ) field partner_id blob ( nullable )
field user_agent blob ( nullable )
field owner_id blob field owner_id blob
field created_at timestamp ( autoinsert ) field created_at timestamp ( autoinsert )
@ -418,6 +421,7 @@ model api_key (
field name text (updatable) field name text (updatable)
field secret blob field secret blob
field partner_id blob (nullable) field partner_id blob (nullable)
field user_agent blob (nullable)
field created_at timestamp (autoinsert) field created_at timestamp (autoinsert)
) )
@ -897,6 +901,7 @@ model bucket_metainfo (
field project_id project.id restrict field project_id project.id restrict
field name blob field name blob
field partner_id blob (nullable, updatable) field partner_id blob (nullable, updatable)
field user_agent blob (nullable, updatable)
field path_cipher int field path_cipher int

File diff suppressed because it is too large Load Diff

View File

@ -194,6 +194,7 @@ CREATE TABLE projects (
burst_limit integer, burst_limit integer,
max_buckets integer, max_buckets integer,
partner_id bytea, partner_id bytea,
user_agent bytea,
owner_id bytea NOT NULL, owner_id bytea NOT NULL,
created_at timestamp with time zone NOT NULL, created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id ) PRIMARY KEY ( id )
@ -375,6 +376,7 @@ CREATE TABLE users (
password_hash bytea NOT NULL, password_hash bytea NOT NULL,
status integer NOT NULL, status integer NOT NULL,
partner_id bytea, partner_id bytea,
user_agent bytea,
created_at timestamp with time zone NOT NULL, created_at timestamp with time zone NOT NULL,
project_limit integer NOT NULL DEFAULT 0, project_limit integer NOT NULL DEFAULT 0,
paid_tier boolean NOT NULL DEFAULT false, paid_tier boolean NOT NULL DEFAULT false,
@ -394,6 +396,7 @@ CREATE TABLE value_attributions (
project_id bytea NOT NULL, project_id bytea NOT NULL,
bucket_name bytea NOT NULL, bucket_name bytea NOT NULL,
partner_id bytea NOT NULL, partner_id bytea NOT NULL,
user_agent bytea,
last_updated timestamp with time zone NOT NULL, last_updated timestamp with time zone NOT NULL,
PRIMARY KEY ( project_id, bucket_name ) PRIMARY KEY ( project_id, bucket_name )
); );
@ -404,6 +407,7 @@ CREATE TABLE api_keys (
name text NOT NULL, name text NOT NULL,
secret bytea NOT NULL, secret bytea NOT NULL,
partner_id bytea, partner_id bytea,
user_agent bytea,
created_at timestamp with time zone NOT NULL, created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id ), PRIMARY KEY ( id ),
UNIQUE ( head ), UNIQUE ( head ),
@ -414,6 +418,7 @@ CREATE TABLE bucket_metainfos (
project_id bytea NOT NULL REFERENCES projects( id ), project_id bytea NOT NULL REFERENCES projects( id ),
name bytea NOT NULL, name bytea NOT NULL,
partner_id bytea, partner_id bytea,
user_agent bytea,
path_cipher integer NOT NULL, path_cipher integer NOT NULL,
created_at timestamp with time zone NOT NULL, created_at timestamp with time zone NOT NULL,
default_segment_size integer NOT NULL, default_segment_size integer NOT NULL,

View File

@ -194,6 +194,7 @@ CREATE TABLE projects (
burst_limit integer, burst_limit integer,
max_buckets integer, max_buckets integer,
partner_id bytea, partner_id bytea,
user_agent bytea,
owner_id bytea NOT NULL, owner_id bytea NOT NULL,
created_at timestamp with time zone NOT NULL, created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id ) PRIMARY KEY ( id )
@ -375,6 +376,7 @@ CREATE TABLE users (
password_hash bytea NOT NULL, password_hash bytea NOT NULL,
status integer NOT NULL, status integer NOT NULL,
partner_id bytea, partner_id bytea,
user_agent bytea,
created_at timestamp with time zone NOT NULL, created_at timestamp with time zone NOT NULL,
project_limit integer NOT NULL DEFAULT 0, project_limit integer NOT NULL DEFAULT 0,
paid_tier boolean NOT NULL DEFAULT false, paid_tier boolean NOT NULL DEFAULT false,
@ -394,6 +396,7 @@ CREATE TABLE value_attributions (
project_id bytea NOT NULL, project_id bytea NOT NULL,
bucket_name bytea NOT NULL, bucket_name bytea NOT NULL,
partner_id bytea NOT NULL, partner_id bytea NOT NULL,
user_agent bytea,
last_updated timestamp with time zone NOT NULL, last_updated timestamp with time zone NOT NULL,
PRIMARY KEY ( project_id, bucket_name ) PRIMARY KEY ( project_id, bucket_name )
); );
@ -404,6 +407,7 @@ CREATE TABLE api_keys (
name text NOT NULL, name text NOT NULL,
secret bytea NOT NULL, secret bytea NOT NULL,
partner_id bytea, partner_id bytea,
user_agent bytea,
created_at timestamp with time zone NOT NULL, created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id ), PRIMARY KEY ( id ),
UNIQUE ( head ), UNIQUE ( head ),
@ -414,6 +418,7 @@ CREATE TABLE bucket_metainfos (
project_id bytea NOT NULL REFERENCES projects( id ), project_id bytea NOT NULL REFERENCES projects( id ),
name bytea NOT NULL, name bytea NOT NULL,
partner_id bytea, partner_id bytea,
user_agent bytea,
path_cipher integer NOT NULL, path_cipher integer NOT NULL,
created_at timestamp with time zone NOT NULL, created_at timestamp with time zone NOT NULL,
default_segment_size integer NOT NULL, default_segment_size integer NOT NULL,

View File

@ -1643,6 +1643,18 @@ func (db *satelliteDB) PostgresMigration() *migrate.Migration {
`DROP TABLE graceful_exit_transfer_queue`, `DROP TABLE graceful_exit_transfer_queue`,
}, },
}, },
{
DB: &db.migrationDB,
Description: "add user_agent bytes to the value_attributions, users, projects, api_keys and bucket_metainfos tables",
Version: 174,
Action: migrate.SQL{
`ALTER TABLE value_attributions ADD COLUMN user_agent bytea;`,
`ALTER TABLE users ADD COLUMN user_agent bytea;`,
`ALTER TABLE projects ADD COLUMN user_agent bytea;`,
`ALTER TABLE api_keys ADD COLUMN user_agent bytea;`,
`ALTER TABLE bucket_metainfos ADD COLUMN user_agent bytea;`,
},
},
// NB: after updating testdata in `testdata`, run // NB: after updating testdata in `testdata`, run
// `go generate` to update `migratez.go`. // `go generate` to update `migratez.go`.
}, },

View File

@ -13,7 +13,7 @@ func (db *satelliteDB) testMigration() *migrate.Migration {
{ {
DB: &db.migrationDB, DB: &db.migrationDB,
Description: "Testing setup", Description: "Testing setup",
Version: 173, Version: 174,
Action: migrate.SQL{`-- AUTOGENERATED BY storj.io/dbx Action: migrate.SQL{`-- AUTOGENERATED BY storj.io/dbx
-- DO NOT EDIT -- DO NOT EDIT
CREATE TABLE accounting_rollups ( CREATE TABLE accounting_rollups (
@ -211,6 +211,7 @@ CREATE TABLE projects (
burst_limit integer, burst_limit integer,
max_buckets integer, max_buckets integer,
partner_id bytea, partner_id bytea,
user_agent bytea,
owner_id bytea NOT NULL, owner_id bytea NOT NULL,
created_at timestamp with time zone NOT NULL, created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id ) PRIMARY KEY ( id )
@ -393,6 +394,7 @@ CREATE TABLE users (
password_hash bytea NOT NULL, password_hash bytea NOT NULL,
status integer NOT NULL, status integer NOT NULL,
partner_id bytea, partner_id bytea,
user_agent bytea,
created_at timestamp with time zone NOT NULL, created_at timestamp with time zone NOT NULL,
project_limit integer NOT NULL DEFAULT 0, project_limit integer NOT NULL DEFAULT 0,
paid_tier boolean NOT NULL DEFAULT false, paid_tier boolean NOT NULL DEFAULT false,
@ -412,6 +414,7 @@ CREATE TABLE value_attributions (
project_id bytea NOT NULL, project_id bytea NOT NULL,
bucket_name bytea NOT NULL, bucket_name bytea NOT NULL,
partner_id bytea NOT NULL, partner_id bytea NOT NULL,
user_agent bytea,
last_updated timestamp with time zone NOT NULL, last_updated timestamp with time zone NOT NULL,
PRIMARY KEY ( project_id, bucket_name ) PRIMARY KEY ( project_id, bucket_name )
); );
@ -422,6 +425,7 @@ CREATE TABLE api_keys (
name text NOT NULL, name text NOT NULL,
secret bytea NOT NULL, secret bytea NOT NULL,
partner_id bytea, partner_id bytea,
user_agent bytea,
created_at timestamp with time zone NOT NULL, created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id ), PRIMARY KEY ( id ),
UNIQUE ( head ), UNIQUE ( head ),
@ -432,6 +436,7 @@ CREATE TABLE bucket_metainfos (
project_id bytea NOT NULL REFERENCES projects( id ), project_id bytea NOT NULL REFERENCES projects( id ),
name bytea NOT NULL, name bytea NOT NULL,
partner_id bytea, partner_id bytea,
user_agent bytea,
path_cipher integer NOT NULL, path_cipher integer NOT NULL,
created_at timestamp with time zone NOT NULL, created_at timestamp with time zone NOT NULL,
default_segment_size integer NOT NULL, default_segment_size integer NOT NULL,

View File

@ -97,6 +97,9 @@ func (projects *projects) Insert(ctx context.Context, project *console.Project)
if !project.PartnerID.IsZero() { if !project.PartnerID.IsZero() {
createFields.PartnerId = dbx.Project_PartnerId(project.PartnerID[:]) createFields.PartnerId = dbx.Project_PartnerId(project.PartnerID[:])
} }
if project.UserAgent != nil {
createFields.UserAgent = dbx.Project_UserAgent(project.UserAgent)
}
if project.StorageLimit != nil { if project.StorageLimit != nil {
createFields.UsageLimit = dbx.Project_UsageLimit(project.StorageLimit.Int64()) createFields.UsageLimit = dbx.Project_UsageLimit(project.StorageLimit.Int64())
} }
@ -328,6 +331,11 @@ func projectFromDBX(ctx context.Context, project *dbx.Project) (_ *console.Proje
} }
} }
var userAgent []byte
if len(project.UserAgent) > 0 {
userAgent = project.UserAgent
}
ownerID, err := uuid.FromBytes(project.OwnerId) ownerID, err := uuid.FromBytes(project.OwnerId)
if err != nil { if err != nil {
return nil, err return nil, err
@ -338,6 +346,7 @@ func projectFromDBX(ctx context.Context, project *dbx.Project) (_ *console.Proje
Name: project.Name, Name: project.Name,
Description: project.Description, Description: project.Description,
PartnerID: partnerID, PartnerID: partnerID,
UserAgent: userAgent,
OwnerID: ownerID, OwnerID: ownerID,
RateLimit: project.RateLimit, RateLimit: project.RateLimit,
BurstLimit: project.BurstLimit, BurstLimit: project.BurstLimit,

View File

@ -0,0 +1,599 @@
-- AUTOGENERATED BY storj.io/dbx
-- DO NOT EDIT
CREATE TABLE accounting_rollups (
node_id bytea NOT NULL,
start_time timestamp with time zone NOT NULL,
put_total bigint NOT NULL,
get_total bigint NOT NULL,
get_audit_total bigint NOT NULL,
get_repair_total bigint NOT NULL,
put_repair_total bigint NOT NULL,
at_rest_total double precision NOT NULL,
PRIMARY KEY ( node_id, start_time )
);
CREATE TABLE accounting_timestamps (
name text NOT NULL,
value timestamp with time zone NOT NULL,
PRIMARY KEY ( name )
);
CREATE TABLE bucket_bandwidth_rollups (
bucket_name bytea NOT NULL,
project_id bytea NOT NULL,
interval_start timestamp with time zone NOT NULL,
interval_seconds integer NOT NULL,
action integer NOT NULL,
inline bigint NOT NULL,
allocated bigint NOT NULL,
settled bigint NOT NULL,
PRIMARY KEY ( bucket_name, project_id, interval_start, action )
);
CREATE TABLE bucket_bandwidth_rollup_archives (
bucket_name bytea NOT NULL,
project_id bytea NOT NULL,
interval_start timestamp with time zone NOT NULL,
interval_seconds integer NOT NULL,
action integer NOT NULL,
inline bigint NOT NULL,
allocated bigint NOT NULL,
settled bigint NOT NULL,
PRIMARY KEY ( bucket_name, project_id, interval_start, action )
);
CREATE TABLE bucket_storage_tallies (
bucket_name bytea NOT NULL,
project_id bytea NOT NULL,
interval_start timestamp with time zone NOT NULL,
total_bytes bigint NOT NULL DEFAULT 0,
inline bigint NOT NULL,
remote bigint NOT NULL,
total_segments_count integer NOT NULL DEFAULT 0,
remote_segments_count integer NOT NULL,
inline_segments_count integer NOT NULL,
object_count integer NOT NULL,
metadata_size bigint NOT NULL,
PRIMARY KEY ( bucket_name, project_id, interval_start )
);
CREATE TABLE coinpayments_transactions (
id text NOT NULL,
user_id bytea NOT NULL,
address text NOT NULL,
amount bytea NOT NULL,
received bytea NOT NULL,
status integer NOT NULL,
key text NOT NULL,
timeout integer NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id )
);
CREATE TABLE coupons (
id bytea NOT NULL,
user_id bytea NOT NULL,
amount bigint NOT NULL,
description text NOT NULL,
type integer NOT NULL,
status integer NOT NULL,
duration bigint NOT NULL,
billing_periods bigint,
coupon_code_name text,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id )
);
CREATE TABLE coupon_codes (
id bytea NOT NULL,
name text NOT NULL,
amount bigint NOT NULL,
description text NOT NULL,
type integer NOT NULL,
billing_periods bigint,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id ),
UNIQUE ( name )
);
CREATE TABLE coupon_usages (
coupon_id bytea NOT NULL,
amount bigint NOT NULL,
status integer NOT NULL,
period timestamp with time zone NOT NULL,
PRIMARY KEY ( coupon_id, period )
);
CREATE TABLE graceful_exit_progress (
node_id bytea NOT NULL,
bytes_transferred bigint NOT NULL,
pieces_transferred bigint NOT NULL DEFAULT 0,
pieces_failed bigint NOT NULL DEFAULT 0,
updated_at timestamp with time zone NOT NULL,
uses_segment_transfer_queue boolean NOT NULL DEFAULT false,
PRIMARY KEY ( node_id )
);
CREATE TABLE graceful_exit_segment_transfer_queue (
node_id bytea NOT NULL,
stream_id bytea NOT NULL,
position bigint NOT NULL,
piece_num integer NOT NULL,
root_piece_id bytea,
durability_ratio double precision NOT NULL,
queued_at timestamp with time zone NOT NULL,
requested_at timestamp with time zone,
last_failed_at timestamp with time zone,
last_failed_code integer,
failed_count integer,
finished_at timestamp with time zone,
order_limit_send_count integer NOT NULL DEFAULT 0,
PRIMARY KEY ( node_id, stream_id, position, piece_num )
);
CREATE TABLE nodes (
id bytea NOT NULL,
address text NOT NULL DEFAULT '',
last_net text NOT NULL,
last_ip_port text,
protocol integer NOT NULL DEFAULT 0,
type integer NOT NULL DEFAULT 0,
email text NOT NULL,
wallet text NOT NULL,
wallet_features text NOT NULL DEFAULT '',
free_disk bigint NOT NULL DEFAULT -1,
piece_count bigint NOT NULL DEFAULT 0,
major bigint NOT NULL DEFAULT 0,
minor bigint NOT NULL DEFAULT 0,
patch bigint NOT NULL DEFAULT 0,
hash text NOT NULL DEFAULT '',
timestamp timestamp with time zone NOT NULL DEFAULT '0001-01-01 00:00:00+00',
release boolean NOT NULL DEFAULT false,
latency_90 bigint NOT NULL DEFAULT 0,
vetted_at timestamp with time zone,
created_at timestamp with time zone NOT NULL DEFAULT current_timestamp,
updated_at timestamp with time zone NOT NULL DEFAULT current_timestamp,
last_contact_success timestamp with time zone NOT NULL DEFAULT 'epoch',
last_contact_failure timestamp with time zone NOT NULL DEFAULT 'epoch',
contained boolean NOT NULL DEFAULT false,
disqualified timestamp with time zone,
suspended timestamp with time zone,
unknown_audit_suspended timestamp with time zone,
offline_suspended timestamp with time zone,
under_review timestamp with time zone,
exit_initiated_at timestamp with time zone,
exit_loop_completed_at timestamp with time zone,
exit_finished_at timestamp with time zone,
exit_success boolean NOT NULL DEFAULT false,
PRIMARY KEY ( id )
);
CREATE TABLE node_api_versions (
id bytea NOT NULL,
api_version integer NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id )
);
CREATE TABLE offers (
id serial NOT NULL,
name text NOT NULL,
description text NOT NULL,
award_credit_in_cents integer NOT NULL DEFAULT 0,
invitee_credit_in_cents integer NOT NULL DEFAULT 0,
award_credit_duration_days integer,
invitee_credit_duration_days integer,
redeemable_cap integer,
expires_at timestamp with time zone NOT NULL,
created_at timestamp with time zone NOT NULL,
status integer NOT NULL,
type integer NOT NULL,
PRIMARY KEY ( id )
);
CREATE TABLE peer_identities (
node_id bytea NOT NULL,
leaf_serial_number bytea NOT NULL,
chain bytea NOT NULL,
updated_at timestamp with time zone NOT NULL,
PRIMARY KEY ( node_id )
);
CREATE TABLE projects (
id bytea NOT NULL,
name text NOT NULL,
description text NOT NULL,
usage_limit bigint,
bandwidth_limit bigint,
rate_limit integer,
burst_limit integer,
max_buckets integer,
partner_id bytea,
user_agent bytea,
owner_id bytea NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id )
);
CREATE TABLE project_bandwidth_daily_rollups (
project_id bytea NOT NULL,
interval_day date NOT NULL,
egress_allocated bigint NOT NULL,
egress_settled bigint NOT NULL,
egress_dead bigint NOT NULL DEFAULT 0,
PRIMARY KEY ( project_id, interval_day )
);
CREATE TABLE project_bandwidth_rollups (
project_id bytea NOT NULL,
interval_month date NOT NULL,
egress_allocated bigint NOT NULL,
PRIMARY KEY ( project_id, interval_month )
);
CREATE TABLE registration_tokens (
secret bytea NOT NULL,
owner_id bytea,
project_limit integer NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( secret ),
UNIQUE ( owner_id )
);
CREATE TABLE repair_queue (
stream_id bytea NOT NULL,
position bigint NOT NULL,
attempted_at timestamp with time zone,
updated_at timestamp with time zone NOT NULL DEFAULT current_timestamp,
inserted_at timestamp with time zone NOT NULL DEFAULT current_timestamp,
segment_health double precision NOT NULL DEFAULT 1,
PRIMARY KEY ( stream_id, position )
);
CREATE TABLE reputations (
id bytea NOT NULL,
audit_success_count bigint NOT NULL DEFAULT 0,
total_audit_count bigint NOT NULL DEFAULT 0,
vetted_at timestamp with time zone,
created_at timestamp with time zone NOT NULL DEFAULT current_timestamp,
updated_at timestamp with time zone NOT NULL DEFAULT current_timestamp,
contained boolean NOT NULL DEFAULT false,
disqualified timestamp with time zone,
suspended timestamp with time zone,
unknown_audit_suspended timestamp with time zone,
offline_suspended timestamp with time zone,
under_review timestamp with time zone,
online_score double precision NOT NULL DEFAULT 1,
audit_history bytea NOT NULL,
audit_reputation_alpha double precision NOT NULL DEFAULT 1,
audit_reputation_beta double precision NOT NULL DEFAULT 0,
unknown_audit_reputation_alpha double precision NOT NULL DEFAULT 1,
unknown_audit_reputation_beta double precision NOT NULL DEFAULT 0,
PRIMARY KEY ( id )
);
CREATE TABLE reset_password_tokens (
secret bytea NOT NULL,
owner_id bytea NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( secret ),
UNIQUE ( owner_id )
);
CREATE TABLE revocations (
revoked bytea NOT NULL,
api_key_id bytea NOT NULL,
PRIMARY KEY ( revoked )
);
CREATE TABLE segment_pending_audits (
node_id bytea NOT NULL,
stream_id bytea NOT NULL,
position bigint NOT NULL,
piece_id bytea NOT NULL,
stripe_index bigint NOT NULL,
share_size bigint NOT NULL,
expected_share_hash bytea NOT NULL,
reverify_count bigint NOT NULL,
PRIMARY KEY ( node_id )
);
CREATE TABLE storagenode_bandwidth_rollups (
storagenode_id bytea NOT NULL,
interval_start timestamp with time zone NOT NULL,
interval_seconds integer NOT NULL,
action integer NOT NULL,
allocated bigint DEFAULT 0,
settled bigint NOT NULL,
PRIMARY KEY ( storagenode_id, interval_start, action )
);
CREATE TABLE storagenode_bandwidth_rollup_archives (
storagenode_id bytea NOT NULL,
interval_start timestamp with time zone NOT NULL,
interval_seconds integer NOT NULL,
action integer NOT NULL,
allocated bigint DEFAULT 0,
settled bigint NOT NULL,
PRIMARY KEY ( storagenode_id, interval_start, action )
);
CREATE TABLE storagenode_bandwidth_rollups_phase2 (
storagenode_id bytea NOT NULL,
interval_start timestamp with time zone NOT NULL,
interval_seconds integer NOT NULL,
action integer NOT NULL,
allocated bigint DEFAULT 0,
settled bigint NOT NULL,
PRIMARY KEY ( storagenode_id, interval_start, action )
);
CREATE TABLE storagenode_payments (
id bigserial NOT NULL,
created_at timestamp with time zone NOT NULL,
node_id bytea NOT NULL,
period text NOT NULL,
amount bigint NOT NULL,
receipt text,
notes text,
PRIMARY KEY ( id )
);
CREATE TABLE storagenode_paystubs (
period text NOT NULL,
node_id bytea NOT NULL,
created_at timestamp with time zone NOT NULL,
codes text NOT NULL,
usage_at_rest double precision NOT NULL,
usage_get bigint NOT NULL,
usage_put bigint NOT NULL,
usage_get_repair bigint NOT NULL,
usage_put_repair bigint NOT NULL,
usage_get_audit bigint NOT NULL,
comp_at_rest bigint NOT NULL,
comp_get bigint NOT NULL,
comp_put bigint NOT NULL,
comp_get_repair bigint NOT NULL,
comp_put_repair bigint NOT NULL,
comp_get_audit bigint NOT NULL,
surge_percent bigint NOT NULL,
held bigint NOT NULL,
owed bigint NOT NULL,
disposed bigint NOT NULL,
paid bigint NOT NULL,
distributed bigint NOT NULL,
PRIMARY KEY ( period, node_id )
);
CREATE TABLE storagenode_storage_tallies (
node_id bytea NOT NULL,
interval_end_time timestamp with time zone NOT NULL,
data_total double precision NOT NULL,
PRIMARY KEY ( interval_end_time, node_id )
);
CREATE TABLE stripe_customers (
user_id bytea NOT NULL,
customer_id text NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( user_id ),
UNIQUE ( customer_id )
);
CREATE TABLE stripecoinpayments_invoice_project_records (
id bytea NOT NULL,
project_id bytea NOT NULL,
storage double precision NOT NULL,
egress bigint NOT NULL,
objects bigint NOT NULL,
period_start timestamp with time zone NOT NULL,
period_end timestamp with time zone NOT NULL,
state integer NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id ),
UNIQUE ( project_id, period_start, period_end )
);
CREATE TABLE stripecoinpayments_tx_conversion_rates (
tx_id text NOT NULL,
rate bytea NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( tx_id )
);
CREATE TABLE users (
id bytea NOT NULL,
email text NOT NULL,
normalized_email text NOT NULL,
full_name text NOT NULL,
short_name text,
password_hash bytea NOT NULL,
status integer NOT NULL,
partner_id bytea,
user_agent bytea,
created_at timestamp with time zone NOT NULL,
project_limit integer NOT NULL DEFAULT 0,
paid_tier boolean NOT NULL DEFAULT false,
position text,
company_name text,
company_size integer,
working_on text,
is_professional boolean NOT NULL DEFAULT false,
employee_count text,
have_sales_contact boolean NOT NULL DEFAULT false,
mfa_enabled boolean NOT NULL DEFAULT false,
mfa_secret_key text,
mfa_recovery_codes text,
PRIMARY KEY ( id )
);
CREATE TABLE value_attributions (
project_id bytea NOT NULL,
bucket_name bytea NOT NULL,
partner_id bytea NOT NULL,
user_agent bytea,
last_updated timestamp with time zone NOT NULL,
PRIMARY KEY ( project_id, bucket_name )
);
CREATE TABLE api_keys (
id bytea NOT NULL,
project_id bytea NOT NULL REFERENCES projects( id ) ON DELETE CASCADE,
head bytea NOT NULL,
name text NOT NULL,
secret bytea NOT NULL,
partner_id bytea,
user_agent bytea,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id ),
UNIQUE ( head ),
UNIQUE ( name, project_id )
);
CREATE TABLE bucket_metainfos (
id bytea NOT NULL,
project_id bytea NOT NULL REFERENCES projects( id ),
name bytea NOT NULL,
partner_id bytea,
user_agent bytea,
path_cipher integer NOT NULL,
created_at timestamp with time zone NOT NULL,
default_segment_size integer NOT NULL,
default_encryption_cipher_suite integer NOT NULL,
default_encryption_block_size integer NOT NULL,
default_redundancy_algorithm integer NOT NULL,
default_redundancy_share_size integer NOT NULL,
default_redundancy_required_shares integer NOT NULL,
default_redundancy_repair_shares integer NOT NULL,
default_redundancy_optimal_shares integer NOT NULL,
default_redundancy_total_shares integer NOT NULL,
PRIMARY KEY ( id ),
UNIQUE ( project_id, name )
);
CREATE TABLE project_members (
member_id bytea NOT NULL REFERENCES users( id ) ON DELETE CASCADE,
project_id bytea NOT NULL REFERENCES projects( id ) ON DELETE CASCADE,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( member_id, project_id )
);
CREATE TABLE stripecoinpayments_apply_balance_intents (
tx_id text NOT NULL REFERENCES coinpayments_transactions( id ) ON DELETE CASCADE,
state integer NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( tx_id )
);
CREATE TABLE user_credits (
id serial NOT NULL,
user_id bytea NOT NULL REFERENCES users( id ) ON DELETE CASCADE,
offer_id integer NOT NULL REFERENCES offers( id ),
referred_by bytea REFERENCES users( id ) ON DELETE SET NULL,
type text NOT NULL,
credits_earned_in_cents integer NOT NULL,
credits_used_in_cents integer NOT NULL,
expires_at timestamp with time zone NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id ),
UNIQUE ( id, offer_id )
);
CREATE INDEX accounting_rollups_start_time_index ON accounting_rollups ( start_time ) ;
CREATE INDEX bucket_bandwidth_rollups_project_id_action_interval_index ON bucket_bandwidth_rollups ( project_id, action, interval_start ) ;
CREATE INDEX bucket_bandwidth_rollups_action_interval_project_id_index ON bucket_bandwidth_rollups ( action, interval_start, project_id ) ;
CREATE INDEX bucket_bandwidth_rollups_archive_project_id_action_interval_index ON bucket_bandwidth_rollup_archives ( project_id, action, interval_start ) ;
CREATE INDEX bucket_bandwidth_rollups_archive_action_interval_project_id_index ON bucket_bandwidth_rollup_archives ( action, interval_start, project_id ) ;
CREATE INDEX bucket_storage_tallies_project_id_interval_start_index ON bucket_storage_tallies ( project_id, interval_start ) ;
CREATE INDEX graceful_exit_segment_transfer_nid_dr_qa_fa_lfa_index ON graceful_exit_segment_transfer_queue ( node_id, durability_ratio, queued_at, finished_at, last_failed_at ) ;
CREATE INDEX node_last_ip ON nodes ( last_net ) ;
CREATE INDEX nodes_dis_unk_off_exit_fin_last_success_index ON nodes ( disqualified, unknown_audit_suspended, offline_suspended, exit_finished_at, last_contact_success ) ;
CREATE INDEX nodes_type_last_cont_success_free_disk_ma_mi_patch_vetted_partial_index ON nodes ( type, last_contact_success, free_disk, major, minor, patch, vetted_at ) WHERE nodes.disqualified is NULL AND nodes.unknown_audit_suspended is NULL AND nodes.exit_initiated_at is NULL AND nodes.release = true AND nodes.last_net != '' ;
CREATE INDEX nodes_dis_unk_aud_exit_init_rel_type_last_cont_success_stored_index ON nodes ( disqualified, unknown_audit_suspended, exit_initiated_at, release, type, last_contact_success ) WHERE nodes.disqualified is NULL AND nodes.unknown_audit_suspended is NULL AND nodes.exit_initiated_at is NULL AND nodes.release = true ;
CREATE INDEX repair_queue_updated_at_index ON repair_queue ( updated_at ) ;
CREATE INDEX repair_queue_num_healthy_pieces_attempted_at_index ON repair_queue ( segment_health, attempted_at ) ;
CREATE INDEX storagenode_bandwidth_rollups_interval_start_index ON storagenode_bandwidth_rollups ( interval_start ) ;
CREATE INDEX storagenode_bandwidth_rollup_archives_interval_start_index ON storagenode_bandwidth_rollup_archives ( interval_start ) ;
CREATE INDEX storagenode_payments_node_id_period_index ON storagenode_payments ( node_id, period ) ;
CREATE INDEX storagenode_paystubs_node_id_index ON storagenode_paystubs ( node_id ) ;
CREATE INDEX storagenode_storage_tallies_node_id_index ON storagenode_storage_tallies ( node_id ) ;
CREATE UNIQUE INDEX credits_earned_user_id_offer_id ON user_credits ( id, offer_id ) ;
INSERT INTO "offers" ("id", "name", "description", "award_credit_in_cents", "invitee_credit_in_cents", "expires_at", "created_at", "status", "type", "award_credit_duration_days", "invitee_credit_duration_days") VALUES (1, 'Default referral offer', 'Is active when no other active referral offer', 300, 600, '2119-03-14 08:28:24.636949+00', '2019-07-14 08:28:24.636949+00', 1, 2, 365, 14);
INSERT INTO "offers" ("id", "name", "description", "award_credit_in_cents", "invitee_credit_in_cents", "expires_at", "created_at", "status", "type", "award_credit_duration_days", "invitee_credit_duration_days") VALUES (2, 'Default free credit offer', 'Is active when no active free credit offer', 0, 300, '2119-03-14 08:28:24.636949+00', '2019-07-14 08:28:24.636949+00', 1, 1, NULL, 14);
-- MAIN DATA --
INSERT INTO "accounting_rollups"("node_id", "start_time", "put_total", "get_total", "get_audit_total", "get_repair_total", "put_repair_total", "at_rest_total") VALUES (E'\\367M\\177\\251]t/\\022\\256\\214\\265\\025\\224\\204:\\217\\212\\0102<\\321\\374\\020&\\271Qc\\325\\261\\354\\246\\233'::bytea, '2019-02-09 00:00:00+00', 3000, 6000, 9000, 12000, 0, 15000);
INSERT INTO "accounting_timestamps" VALUES ('LastAtRestTally', '0001-01-01 00:00:00+00');
INSERT INTO "accounting_timestamps" VALUES ('LastRollup', '0001-01-01 00:00:00+00');
INSERT INTO "accounting_timestamps" VALUES ('LastBandwidthTally', '0001-01-01 00:00:00+00');
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90", "created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "suspended", "exit_success") VALUES (E'\\153\\313\\233\\074\\327\\177\\136\\070\\346\\001', '127.0.0.1:55516', '', 0, 4, '', '', -1, 0, 0, 1, 0, '', 'epoch', false, 0, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, NULL, false);
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90","created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "suspended","exit_success") VALUES (E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n', '127.0.0.1:55518', '', 0, 4, '', '', -1, 0, 0, 1, 0, '', 'epoch', false, 0, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, NULL, false);
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90","created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "suspended","exit_success") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014', '127.0.0.1:55517', '', 0, 4, '', '', -1, 0, 0, 1, 0, '', 'epoch', false, 0, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, NULL,false);
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90","created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "suspended","exit_success") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\015', '127.0.0.1:55519', '', 0, 4, '', '', -1, 0, 0, 1, 0, '', 'epoch', false, 0, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, NULL,false);
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90","created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "suspended","exit_success", "vetted_at") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', '127.0.0.1:55520', '', 0, 4, '', '', -1, 0, 0, 1, 0, '', 'epoch', false, 0, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, NULL, false, '2020-03-18 12:00:00.000000+00');
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90","created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "suspended","exit_success") VALUES (E'\\154\\313\\233\\074\\327\\177\\136\\070\\346\\001', '127.0.0.1:55516', '', 0, 4, '', '', -1, 0, 0, 1, 0, '', 'epoch', false, 0, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, NULL, false);
INSERT INTO "nodes"("id", "address", "last_net", "last_ip_port", "protocol", "type", "email", "wallet", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90", "created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "suspended", "exit_success") VALUES (E'\\154\\313\\233\\074\\327\\177\\136\\070\\346\\002', '127.0.0.1:55516', '127.0.0.0', '127.0.0.1:55516', 0, 4, '', '', -1, 0, 0, 1, 0, '', 'epoch', false, 0, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, NULL, false);
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90","created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "suspended", "exit_success") VALUES (E'\\363\\341\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', '127.0.0.1:55516', '', 0, 4, '', '', -1, 0, 0, 1, 0, '', 'epoch', false, 0, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, NULL, false);
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "wallet_features", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90","created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "suspended", "exit_success") VALUES (E'\\362\\341\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', '127.0.0.1:55516', '', 0, 4, '', '', '', -1, 0, 0, 1, 0, '', 'epoch', false, 0, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, NULL, false);
INSERT INTO "users"("id", "full_name", "short_name", "email", "normalized_email", "password_hash", "status", "partner_id", "created_at", "is_professional", "project_limit", "paid_tier") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 'Noahson', 'William', '1email1@mail.test', '1EMAIL1@MAIL.TEST', E'some_readable_hash'::bytea, 1, NULL, '2019-02-14 08:28:24.614594+00', false, 10, false);
INSERT INTO "users"("id", "full_name", "short_name", "email", "normalized_email", "password_hash", "status", "partner_id", "created_at", "position", "company_name", "working_on", "company_size", "is_professional", "employee_count", "project_limit", "have_sales_contact") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\304\\313\\206\\311",'::bytea, 'Ian', 'Pires', '3email3@mail.test', '3EMAIL3@MAIL.TEST', E'some_readable_hash'::bytea, 2, NULL, '2020-03-18 10:28:24.614594+00', 'engineer', 'storj', 'data storage', 51, true, '1-50', 10, true);
INSERT INTO "users"("id", "full_name", "short_name", "email", "normalized_email", "password_hash", "status", "partner_id", "created_at", "position", "company_name", "working_on", "company_size", "is_professional", "employee_count", "project_limit") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\205\\312",'::bytea, 'Campbell', 'Wright', '4email4@mail.test', '4EMAIL4@MAIL.TEST', E'some_readable_hash'::bytea, 2, NULL, '2020-07-17 10:28:24.614594+00', 'engineer', 'storj', 'data storage', 82, true, '1-50', 10);
INSERT INTO "users"("id", "full_name", "short_name", "email", "normalized_email", "password_hash", "status", "partner_id", "created_at", "position", "company_name", "working_on", "company_size", "is_professional", "project_limit", "paid_tier", "mfa_enabled", "mfa_secret_key", "mfa_recovery_codes") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\205\\311",'::bytea, 'Thierry', 'Berg', '2email2@mail.test', '2EMAIL2@MAIL.TEST', E'some_readable_hash'::bytea, 2, NULL, '2020-05-16 10:28:24.614594+00', 'engineer', 'storj', 'data storage', 55, true, 10, false, false, NULL, NULL);
INSERT INTO "projects"("id", "name", "description", "usage_limit", "bandwidth_limit", "max_buckets", "partner_id", "owner_id", "created_at") VALUES (E'\\022\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, 'ProjectName', 'projects description', 5e11, 5e11, NULL, NULL, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, '2019-02-14 08:28:24.254934+00');
INSERT INTO "projects"("id", "name", "description", "usage_limit", "bandwidth_limit", "max_buckets", "partner_id", "owner_id", "created_at") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea, 'projName1', 'Test project 1', 5e11, 5e11, NULL, NULL, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, '2019-02-14 08:28:24.636949+00');
INSERT INTO "project_members"("member_id", "project_id", "created_at") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea, '2019-02-14 08:28:24.677953+00');
INSERT INTO "project_members"("member_id", "project_id", "created_at") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, E'\\022\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, '2019-02-13 08:28:24.677953+00');
INSERT INTO "registration_tokens" ("secret", "owner_id", "project_limit", "created_at") VALUES (E'\\070\\127\\144\\013\\332\\344\\102\\376\\306\\056\\303\\130\\106\\132\\321\\276\\321\\274\\170\\264\\054\\333\\221\\116\\154\\221\\335\\070\\220\\146\\344\\216'::bytea, null, 1, '2019-02-14 08:28:24.677953+00');
INSERT INTO "storagenode_bandwidth_rollups" ("storagenode_id", "interval_start", "interval_seconds", "action", "allocated", "settled") VALUES (E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n', '2019-03-06 08:00:00.000000' AT TIME ZONE current_setting('TIMEZONE'), 3600, 1, 1024, 2024);
INSERT INTO "storagenode_storage_tallies" VALUES (E'\\3510\\323\\225"~\\036<\\342\\330m\\0253Jhr\\246\\233K\\246#\\2303\\351\\256\\275j\\212UM\\362\\207', '2019-02-14 08:16:57.812849+00', 1000);
INSERT INTO "bucket_bandwidth_rollups" ("bucket_name", "project_id", "interval_start", "interval_seconds", "action", "inline", "allocated", "settled") VALUES (E'testbucket'::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea,'2019-03-06 08:00:00.000000' AT TIME ZONE current_setting('TIMEZONE'), 3600, 1, 1024, 2024, 3024);
INSERT INTO "bucket_storage_tallies" ("bucket_name", "project_id", "interval_start", "inline", "remote", "remote_segments_count", "inline_segments_count", "object_count", "metadata_size") VALUES (E'testbucket'::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea,'2019-03-06 08:00:00.000000' AT TIME ZONE current_setting('TIMEZONE'), 4024, 5024, 0, 0, 0, 0);
INSERT INTO "bucket_bandwidth_rollups" ("bucket_name", "project_id", "interval_start", "interval_seconds", "action", "inline", "allocated", "settled") VALUES (E'testbucket'::bytea, E'\\170\\160\\157\\370\\274\\366\\113\\364\\272\\235\\301\\243\\321\\102\\321\\136'::bytea,'2019-03-06 08:00:00.000000' AT TIME ZONE current_setting('TIMEZONE'), 3600, 1, 1024, 2024, 3024);
INSERT INTO "bucket_storage_tallies" ("bucket_name", "project_id", "interval_start", "inline", "remote", "remote_segments_count", "inline_segments_count", "object_count", "metadata_size") VALUES (E'testbucket'::bytea, E'\\170\\160\\157\\370\\274\\366\\113\\364\\272\\235\\301\\243\\321\\102\\321\\136'::bytea,'2019-03-06 08:00:00.000000' AT TIME ZONE current_setting('TIMEZONE'), 4024, 5024, 0, 0, 0, 0);
INSERT INTO "reset_password_tokens" ("secret", "owner_id", "created_at") VALUES (E'\\070\\127\\144\\013\\332\\344\\102\\376\\306\\056\\303\\130\\106\\132\\321\\276\\321\\274\\170\\264\\054\\333\\221\\116\\154\\221\\335\\070\\220\\146\\344\\216'::bytea, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, '2019-05-08 08:28:24.677953+00');
INSERT INTO "api_keys" ("id", "project_id", "head", "name", "secret", "partner_id", "created_at") VALUES (E'\\334/\\302;\\225\\355O\\323\\276f\\247\\354/6\\241\\033'::bytea, E'\\022\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, E'\\111\\142\\147\\304\\132\\375\\070\\163\\270\\160\\251\\370\\126\\063\\351\\037\\257\\071\\143\\375\\351\\320\\253\\232\\220\\260\\075\\173\\306\\307\\115\\136'::bytea, 'key 2', E'\\254\\011\\315\\333\\273\\365\\001\\071\\024\\154\\253\\332\\301\\216\\361\\074\\221\\367\\251\\231\\274\\333\\300\\367\\001\\272\\327\\111\\315\\123\\042\\016'::bytea, NULL, '2019-02-14 08:28:24.267934+00');
INSERT INTO "value_attributions" ("project_id", "bucket_name", "partner_id", "user_agent", "last_updated") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, E''::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea, NULL, '2019-02-14 08:07:31.028103+00');
INSERT INTO "user_credits" ("id", "user_id", "offer_id", "referred_by", "credits_earned_in_cents", "credits_used_in_cents", "type", "expires_at", "created_at") VALUES (1, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 1, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 200, 0, 'invalid', '2019-10-01 08:28:24.267934+00', '2019-06-01 08:28:24.267934+00');
INSERT INTO "bucket_metainfos" ("id", "project_id", "name", "partner_id", "created_at", "path_cipher", "default_segment_size", "default_encryption_cipher_suite", "default_encryption_block_size", "default_redundancy_algorithm", "default_redundancy_share_size", "default_redundancy_required_shares", "default_redundancy_repair_shares", "default_redundancy_optimal_shares", "default_redundancy_total_shares") VALUES (E'\\334/\\302;\\225\\355O\\323\\276f\\247\\354/6\\241\\033'::bytea, E'\\022\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, E'testbucketuniquename'::bytea, NULL, '2019-06-14 08:28:24.677953+00', 1, 65536, 1, 8192, 1, 4096, 4, 6, 8, 10);
INSERT INTO "peer_identities" VALUES (E'\\334/\\302;\\225\\355O\\323\\276f\\247\\354/6\\241\\033'::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, '2019-02-14 08:07:31.335028+00');
INSERT INTO "graceful_exit_progress" ("node_id", "bytes_transferred", "pieces_transferred", "pieces_failed", "updated_at", "uses_segment_transfer_queue") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', 1000000000000000, 0, 0, '2019-09-12 10:07:31.028103+00', false);
INSERT INTO "stripe_customers" ("user_id", "customer_id", "created_at") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 'stripe_id', '2019-06-01 08:28:24.267934+00');
INSERT INTO "stripecoinpayments_invoice_project_records"("id", "project_id", "storage", "egress", "objects", "period_start", "period_end", "state", "created_at") VALUES (E'\\022\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, E'\\021\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, 0, 0, 0, '2019-06-01 08:28:24.267934+00', '2019-06-01 08:28:24.267934+00', 0, '2019-06-01 08:28:24.267934+00');
INSERT INTO "stripecoinpayments_tx_conversion_rates" ("tx_id", "rate", "created_at") VALUES ('tx_id', E'\\363\\311\\033w\\222\\303Ci,'::bytea, '2019-06-01 08:28:24.267934+00');
INSERT INTO "coinpayments_transactions" ("id", "user_id", "address", "amount", "received", "status", "key", "timeout", "created_at") VALUES ('tx_id', E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 'address', E'\\363\\311\\033w'::bytea, E'\\363\\311\\033w'::bytea, 1, 'key', 60, '2019-06-01 08:28:24.267934+00');
INSERT INTO "storagenode_bandwidth_rollups" ("storagenode_id", "interval_start", "interval_seconds", "action", "settled") VALUES (E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n', '2020-01-11 08:00:00.000000' AT TIME ZONE current_setting('TIMEZONE'), 3600, 1, 2024);
INSERT INTO "coupons" ("id", "user_id", "amount", "description", "type", "status", "duration", "billing_periods", "created_at") VALUES (E'\\362\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 50, 'description', 0, 0, 2, 2, '2019-06-01 08:28:24.267934+00');
INSERT INTO "coupons" ("id", "user_id", "amount", "description", "type", "status", "duration", "billing_periods", "created_at") VALUES (E'\\362\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\012'::bytea, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 50, 'description', 0, 0, 2, 2, '2019-06-01 08:28:24.267934+00');
INSERT INTO "coupons" ("id", "user_id", "amount", "description", "type", "status", "duration", "billing_periods", "created_at") VALUES (E'\\362\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\015'::bytea, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 50, 'description', 0, 0, 2, 2, '2019-06-01 08:28:24.267934+00');
INSERT INTO "coupon_usages" ("coupon_id", "amount", "status", "period") VALUES (E'\\362\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea, 22, 0, '2019-06-01 09:28:24.267934+00');
INSERT INTO "coupon_codes" ("id", "name", "amount", "description", "type", "billing_periods", "created_at") VALUES (E'\\362\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea, 'STORJ50', 50, '$50 for your first 5 months', 0, NULL, '2019-06-01 08:28:24.267934+00');
INSERT INTO "coupon_codes" ("id", "name", "amount", "description", "type", "billing_periods", "created_at") VALUES (E'\\362\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\015'::bytea, 'STORJ75', 75, '$75 for your first 5 months', 0, 2, '2019-06-01 08:28:24.267934+00');
INSERT INTO "stripecoinpayments_apply_balance_intents" ("tx_id", "state", "created_at") VALUES ('tx_id', 0, '2019-06-01 08:28:24.267934+00');
INSERT INTO "projects"("id", "name", "description", "usage_limit", "bandwidth_limit", "max_buckets", "rate_limit", "partner_id", "owner_id", "created_at") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\347'::bytea, 'projName1', 'Test project 1', 5e11, 5e11, NULL, 2000000, NULL, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, '2020-01-15 08:28:24.636949+00');
INSERT INTO "project_bandwidth_rollups"("project_id", "interval_month", egress_allocated) VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\347'::bytea, '2020-04-01', 10000);
INSERT INTO "project_bandwidth_daily_rollups"("project_id", "interval_day", egress_allocated, egress_settled, egress_dead) VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\347'::bytea, '2021-04-22', 10000, 5000, 0);
INSERT INTO "projects"("id", "name", "description", "usage_limit", "bandwidth_limit", "max_buckets","rate_limit", "partner_id", "owner_id", "created_at") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\345'::bytea, 'egress101', 'High Bandwidth Project', 5e11, 5e11, NULL, 2000000, NULL, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, '2020-05-15 08:46:24.000000+00');
INSERT INTO "storagenode_paystubs"("period", "node_id", "created_at", "codes", "usage_at_rest", "usage_get", "usage_put", "usage_get_repair", "usage_put_repair", "usage_get_audit", "comp_at_rest", "comp_get", "comp_put", "comp_get_repair", "comp_put_repair", "comp_get_audit", "surge_percent", "held", "owed", "disposed", "paid", "distributed") VALUES ('2020-01', '\xf2a3b4c4dfdf7221310382fd5db5aa73e1d227d6df09734ec4e5305000000000', '2020-04-07T20:14:21.479141Z', '', 1327959864508416, 294054066688, 159031363328, 226751, 0, 836608, 2861984, 5881081, 0, 226751, 0, 8, 300, 0, 26909472, 0, 26909472, 0);
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90","created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "suspended", "exit_success", "unknown_audit_suspended", "offline_suspended", "under_review") VALUES (E'\\153\\313\\233\\074\\327\\255\\136\\070\\346\\001', '127.0.0.1:55516', '', 0, 4, '', '', -1, 0, 0, 1, 0, '', 'epoch', false, 0, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, NULL, false, '2019-02-14 08:07:31.108963+00', '2019-02-14 08:07:31.108963+00', '2019-02-14 08:07:31.108963+00');
INSERT INTO "node_api_versions"("id", "api_version", "created_at", "updated_at") VALUES (E'\\153\\313\\233\\074\\327\\177\\136\\070\\346\\001', 1, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00');
INSERT INTO "node_api_versions"("id", "api_version", "created_at", "updated_at") VALUES (E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n', 2, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00');
INSERT INTO "node_api_versions"("id", "api_version", "created_at", "updated_at") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014', 3, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00');
INSERT INTO "projects"("id", "name", "description", "usage_limit", "bandwidth_limit", "rate_limit", "partner_id", "owner_id", "created_at", "max_buckets") VALUES (E'300\\273|\\342N\\347\\347\\363\\342\\363\\371>+F\\256\\263'::bytea, 'egress102', 'High Bandwidth Project 2', 5e11, 5e11, 2000000, NULL, E'265\\343U\\303\\312\\312\\363\\311\\033w\\222\\303Ci",'::bytea, '2020-05-15 08:46:24.000000+00', 1000);
INSERT INTO "projects"("id", "name", "description", "usage_limit", "bandwidth_limit", "rate_limit", "partner_id", "owner_id", "created_at", "max_buckets") VALUES (E'300\\273|\\342N\\347\\347\\363\\342\\363\\371>+F\\255\\244'::bytea, 'egress103', 'High Bandwidth Project 3', 5e11, 5e11, 2000000, NULL, E'265\\343U\\303\\312\\312\\363\\311\\033w\\222\\303Ci",'::bytea, '2020-05-15 08:46:24.000000+00', 1000);
INSERT INTO "projects"("id", "name", "description", "usage_limit", "bandwidth_limit", "rate_limit", "partner_id", "owner_id", "created_at", "max_buckets") VALUES (E'300\\273|\\342N\\347\\347\\363\\342\\363\\371>+F\\253\\231'::bytea, 'Limit Test 1', 'This project is above the default', 50000000001, 50000000001, 2000000, NULL, E'265\\343U\\303\\312\\312\\363\\311\\033w\\222\\303Ci",'::bytea, '2020-10-14 10:10:10.000000+00', 101);
INSERT INTO "projects"("id", "name", "description", "usage_limit", "bandwidth_limit", "rate_limit", "partner_id", "owner_id", "created_at", "max_buckets") VALUES (E'300\\273|\\342N\\347\\347\\363\\342\\363\\371>+F\\252\\230'::bytea, 'Limit Test 2', 'This project is below the default', 5e11, 5e11, 2000000, NULL, E'265\\343U\\303\\312\\312\\363\\311\\033w\\222\\303Ci",'::bytea, '2020-10-14 10:10:11.000000+00', NULL);
INSERT INTO "storagenode_bandwidth_rollups_phase2" ("storagenode_id", "interval_start", "interval_seconds", "action", "allocated", "settled") VALUES (E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n', '2019-03-06 08:00:00.000000' AT TIME ZONE current_setting('TIMEZONE'), 3600, 1, 1024, 2024);
INSERT INTO "storagenode_bandwidth_rollup_archives" ("storagenode_id", "interval_start", "interval_seconds", "action", "allocated", "settled") VALUES (E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n', '2019-03-06 08:00:00.000000' AT TIME ZONE current_setting('TIMEZONE'), 3600, 1, 1024, 2024);
INSERT INTO "bucket_bandwidth_rollup_archives" ("bucket_name", "project_id", "interval_start", "interval_seconds", "action", "inline", "allocated", "settled") VALUES (E'testbucket'::bytea, E'\\170\\160\\157\\370\\274\\366\\113\\364\\272\\235\\301\\243\\321\\102\\321\\136'::bytea,'2019-03-06 08:00:00.000000' AT TIME ZONE current_setting('TIMEZONE'), 3600, 1, 1024, 2024, 3024);
INSERT INTO "storagenode_paystubs"("period", "node_id", "created_at", "codes", "usage_at_rest", "usage_get", "usage_put", "usage_get_repair", "usage_put_repair", "usage_get_audit", "comp_at_rest", "comp_get", "comp_put", "comp_get_repair", "comp_put_repair", "comp_get_audit", "surge_percent", "held", "owed", "disposed", "paid", "distributed") VALUES ('2020-12', '\x1111111111111111111111111111111111111111111111111111111111111111', '2020-04-07T20:14:21.479141Z', '', 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 117);
INSERT INTO "storagenode_payments"("id", "created_at", "period", "node_id", "amount") VALUES (1, '2020-04-07T20:14:21.479141Z', '2020-12', '\x1111111111111111111111111111111111111111111111111111111111111111', 117);
INSERT INTO "reputations"("id", "audit_success_count", "total_audit_count", "created_at", "updated_at", "contained", "disqualified", "suspended", "audit_reputation_alpha", "audit_reputation_beta", "unknown_audit_reputation_alpha", "unknown_audit_reputation_beta", "online_score", "audit_history") VALUES (E'\\153\\313\\233\\074\\327\\177\\136\\070\\346\\001', 0, 5, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', false, NULL, NULL, 50, 0, 1, 0, 1, '\x0a23736f2f6d616e792f69636f6e69632f70617468732f746f2f63686f6f73652f66726f6d120a0102030405060708090a');
INSERT INTO "graceful_exit_segment_transfer_queue" ("node_id", "stream_id", "position", "piece_num", "durability_ratio", "queued_at", "requested_at", "last_failed_at", "last_failed_code", "failed_count", "finished_at", "order_limit_send_count") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 10 , 8, 1.0, '2019-09-12 10:07:31.028103+00', '2019-09-12 10:07:32.028103+00', null, null, 0, '2019-09-12 10:07:33.028103+00', 0);
INSERT INTO "segment_pending_audits" ("node_id", "piece_id", "stripe_index", "share_size", "expected_share_hash", "reverify_count", "stream_id", position) VALUES (E'\\153\\313\\233\\074\\327\\177\\136\\070\\346\\001'::bytea, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 5, 1024, E'\\070\\127\\144\\013\\332\\344\\102\\376\\306\\056\\303\\130\\106\\132\\321\\276\\321\\274\\170\\264\\054\\333\\221\\116\\154\\221\\335\\070\\220\\146\\344\\216'::bytea, 1, '\x010101', 1);
INSERT INTO "users"("id", "full_name", "short_name", "email", "normalized_email", "password_hash", "status", "partner_id", "created_at", "is_professional", "project_limit", "paid_tier") VALUES (E'\\363\\311\\033w\\222\\303Ci\\266\\342U\\303\\312\\204",'::bytea, 'Noahson', 'William', '100email1@mail.test', '100EMAIL1@MAIL.TEST', E'some_readable_hash'::bytea, 1, NULL, '2019-02-14 08:28:24.614594+00', false, 10, true);
INSERT INTO "repair_queue" ("stream_id", "position", "attempted_at", "segment_health", "updated_at", "inserted_at") VALUES ('\x01', 1, null, 1, '2020-09-01 00:00:00.000000+00', '2021-09-01 00:00:00.000000+00');
INSERT INTO "users"("id", "full_name", "email", "normalized_email", "password_hash", "status", "created_at", "mfa_enabled", "mfa_secret_key", "mfa_recovery_codes") VALUES (E'\\363\\311\\033w\\222\\303Ci\\266\\344U\\303\\312\\204",'::bytea, 'Noahson William', '101email1@mail.test', '101EMAIL1@MAIL.TEST', E'some_readable_hash'::bytea, 1, '2019-02-14 08:28:24.614594+00', true, 'mfa secret key', '["1a2b3c4d","e5f6g7h8"]');
INSERT INTO "projects"("id", "name", "description", "usage_limit", "bandwidth_limit", "rate_limit", "burst_limit", "partner_id", "owner_id", "created_at", "max_buckets") VALUES (E'300\\273|\\342N\\347\\347\\363\\342\\363\\371>+F\\251\\247'::bytea, 'Limit Test 2', 'This project is below the default', 5e11, 5e11, 2000000, 4000000, NULL, E'265\\343U\\303\\312\\312\\363\\311\\033w\\222\\303Ci",'::bytea, '2020-10-14 10:10:11.000000+00', NULL);
-- NEW DATA --

View File

@ -61,6 +61,9 @@ func (users *users) Insert(ctx context.Context, user *console.User) (_ *console.
if !user.PartnerID.IsZero() { if !user.PartnerID.IsZero() {
optional.PartnerId = dbx.User_PartnerId(user.PartnerID[:]) optional.PartnerId = dbx.User_PartnerId(user.PartnerID[:])
} }
if user.UserAgent != nil {
optional.UserAgent = dbx.User_UserAgent(user.UserAgent)
}
if user.ProjectLimit != 0 { if user.ProjectLimit != 0 {
optional.ProjectLimit = dbx.User_ProjectLimit(user.ProjectLimit) optional.ProjectLimit = dbx.User_ProjectLimit(user.ProjectLimit)
} }
@ -209,6 +212,10 @@ func userFromDBX(ctx context.Context, user *dbx.User) (_ *console.User, err erro
} }
} }
if user.UserAgent != nil {
result.UserAgent = user.UserAgent
}
if user.ShortName != nil { if user.ShortName != nil {
result.ShortName = *user.ShortName result.ShortName = *user.ShortName
} }