satellite/payments: Convert coupon duration to be nullable

* Add a nullable billing_periods column in the coupons table
* Add nullable billing_periods column to the currently unused
coupon_codes table
* Drop the duration column from the coupon_codes table
* Replace duration config type so that the default promotional coupon
can be configured to never expire

Zero downtime migration plan:
* Add billing_periods column to coupons and coupon_codes tables (this change)
* After one release, remove all references to the old duration column,
replacing with references to billing_periods. At this point, we can also
change the defult promotional coupon to never expire and migrate over
values from the old duration column.
* After another release, drop the duration column.

Change-Id: I374e8dc9fab9f81b4a5bc681771955662d4c007a
This commit is contained in:
Moby von Briesen 2021-03-29 19:37:46 -04:00
parent c4293f5e52
commit c334fd090e
26 changed files with 884 additions and 137 deletions

View File

@ -85,7 +85,7 @@ func setupPayments(log *zap.Logger, db satellite.DB) (*stripecoinpayments.Servic
pc.ObjectPrice,
pc.BonusRate,
pc.CouponValue,
pc.CouponDuration,
pc.CouponDuration.IntPointer(),
pc.CouponProjectLimit,
pc.MinCoinPayment,
pc.PaywallProportion)

View File

@ -519,7 +519,10 @@ func (planet *Planet) newSatellite(ctx context.Context, prefix string, index int
ConversionRatesCycleInterval: defaultInterval,
ListingLimit: 100,
},
CouponDuration: 2,
CouponDuration: paymentsconfig.CouponDuration{
Enabled: true,
BillingPeriods: 2,
},
CouponValue: 275,
PaywallProportion: 1,
},

View File

@ -136,7 +136,7 @@ func NewAdmin(log *zap.Logger, full *identity.FullIdentity, db DB,
pc.ObjectPrice,
pc.BonusRate,
pc.CouponValue,
pc.CouponDuration,
pc.CouponDuration.IntPointer(),
pc.CouponProjectLimit,
pc.MinCoinPayment,
pc.PaywallProportion)

View File

@ -62,7 +62,7 @@ func (server *Server) addCoupon(w http.ResponseWriter, r *http.Request) {
coupon, err := server.db.StripeCoinPayments().Coupons().Insert(ctx, payments.Coupon{
UserID: input.UserID,
Amount: input.Amount,
Duration: input.Duration,
Duration: &input.Duration,
Description: input.Description,
})
if err != nil {

View File

@ -56,7 +56,8 @@ func TestAddCoupon(t *testing.T) {
coupon, err := planet.Satellites[0].DB.StripeCoinPayments().Coupons().Get(ctx, output)
require.NoError(t, err)
require.Equal(t, user.ID, coupon.UserID)
require.Equal(t, 2, coupon.Duration)
require.NotNil(t, coupon.Duration)
require.Equal(t, 2, *coupon.Duration)
require.Equal(t, "testcoupon-alice", coupon.Description)
require.Equal(t, int64(3000), coupon.Amount)
})
@ -111,7 +112,8 @@ func TestCouponInfo(t *testing.T) {
err = json.Unmarshal(responseBody, &output)
require.NoError(t, err)
require.Equal(t, id, output.ID)
require.Equal(t, 2, output.Duration)
require.NotNil(t, output.Duration)
require.Equal(t, 2, *output.Duration)
require.Equal(t, int64(3000), output.Amount)
require.Equal(t, "testcoupon-alice", output.Description)
})

View File

@ -532,7 +532,7 @@ func NewAPI(log *zap.Logger, full *identity.FullIdentity, db DB,
pc.ObjectPrice,
pc.BonusRate,
pc.CouponValue,
pc.CouponDuration,
pc.CouponDuration.IntPointer(),
pc.CouponProjectLimit,
pc.MinCoinPayment,
pc.PaywallProportion)

View File

@ -93,7 +93,7 @@ func TestGraphqlMutation(t *testing.T) {
pc.ObjectPrice,
pc.BonusRate,
pc.CouponValue,
pc.CouponDuration,
pc.CouponDuration.IntPointer(),
pc.CouponProjectLimit,
pc.MinCoinPayment,
pc.PaywallProportion)

View File

@ -77,7 +77,7 @@ func TestGraphqlQuery(t *testing.T) {
pc.ObjectPrice,
pc.BonusRate,
pc.CouponValue,
pc.CouponDuration,
pc.CouponDuration.IntPointer(),
pc.CouponProjectLimit,
pc.MinCoinPayment,
pc.PaywallProportion)

View File

@ -379,19 +379,20 @@ func (paymentService PaymentsService) BillingHistory(ctx context.Context) (billi
couponStatus = "Expired"
}
billingHistory = append(billingHistory,
&BillingHistoryItem{
ID: coupon.ID.String(),
Description: coupon.Description,
Amount: coupon.Amount,
Remaining: remaining,
Status: couponStatus,
Link: "",
Start: coupon.Created,
End: coupon.ExpirationDate(),
Type: Coupon,
},
)
billingHistoryItem := &BillingHistoryItem{
ID: coupon.ID.String(),
Description: coupon.Description,
Amount: coupon.Amount,
Remaining: remaining,
Status: couponStatus,
Link: "",
Start: coupon.Created,
Type: Coupon,
}
if coupon.ExpirationDate() != nil {
billingHistoryItem.End = *coupon.ExpirationDate()
}
billingHistory = append(billingHistory, billingHistoryItem)
}
bonuses, err := paymentService.service.accounts.StorjTokens().ListDepositBonuses(ctx, auth.User.ID)
@ -480,10 +481,12 @@ func (paymentService PaymentsService) checkProjectInvoicingStatus(ctx context.Co
// PopulatePromotionalCoupons is used to populate promotional coupons through all active users who already have
// a project, payment method and do not have a promotional coupon yet.
// And updates project limits to selected size.
// This functionality is deprecated and will be removed.
func (paymentService PaymentsService) PopulatePromotionalCoupons(ctx context.Context) (err error) {
defer mon.Task()(&ctx)(&err)
return Error.Wrap(paymentService.service.accounts.Coupons().PopulatePromotionalCoupons(ctx, 2, 5500, memory.TB))
duration := 2
return Error.Wrap(paymentService.service.accounts.Coupons().PopulatePromotionalCoupons(ctx, &duration, 5500, memory.TB))
}
// AddPromotionalCoupon creates new coupon for specified user.

View File

@ -473,7 +473,7 @@ func New(log *zap.Logger, full *identity.FullIdentity, db DB,
pc.ObjectPrice,
pc.BonusRate,
pc.CouponValue,
pc.CouponDuration,
pc.CouponDuration.IntPointer(),
pc.CouponProjectLimit,
pc.MinCoinPayment,
pc.PaywallProportion)

View File

@ -32,7 +32,7 @@ type Coupons interface {
// PopulatePromotionalCoupons is used to populate promotional coupons through all active users who already have
// a project, payment method and do not have a promotional coupon yet.
// And updates project limits to selected size.
PopulatePromotionalCoupons(ctx context.Context, duration int, amount int64, projectLimit memory.Size) error
PopulatePromotionalCoupons(ctx context.Context, duration *int, amount int64, projectLimit memory.Size) error
}
// Coupon is an entity that adds some funds to Accounts balance for some fixed period.
@ -42,7 +42,7 @@ type Coupon struct {
ID uuid.UUID `json:"id"`
UserID uuid.UUID `json:"userId"`
Amount int64 `json:"amount"` // Amount is stored in cents.
Duration int `json:"duration"` // Duration is stored in number ob billing periods.
Duration *int `json:"duration"` // Duration is stored in number of billing periods.
Description string `json:"description"`
Type CouponType `json:"type"`
Status CouponStatus `json:"status"`
@ -54,8 +54,13 @@ type Coupon struct {
// A coupon is valid for Duration number of full months. The month the user
// signs up is not counted in the duration. The expirated date is at the last
// day of the last valid month.
func (coupon *Coupon) ExpirationDate() time.Time {
return time.Date(coupon.Created.Year(), coupon.Created.Month()+time.Month(coupon.Duration)+1, 0, 0, 0, 0, 0, time.UTC)
func (coupon *Coupon) ExpirationDate() *time.Time {
if coupon.Duration == nil {
return nil
}
expireDate := time.Date(coupon.Created.Year(), coupon.Created.Month()+time.Month(*coupon.Duration)+1, 0, 0, 0, 0, 0, time.UTC)
return &expireDate
}
// CouponType indicates the type of the coupon.

View File

@ -33,9 +33,11 @@ func TestCoupon_ExpirationDate(t *testing.T) {
},
} {
coupon := Coupon{
Duration: tt.duration,
Duration: &tt.duration,
Created: tt.created,
}
require.Equal(t, tt.expires, coupon.ExpirationDate())
expirationDate := coupon.ExpirationDate()
require.NotNil(t, expirationDate)
require.Equal(t, tt.expires, *expirationDate)
}
}

View File

@ -4,6 +4,8 @@
package paymentsconfig
import (
"strconv"
"storj.io/common/memory"
"storj.io/storj/satellite/payments/stripecoinpayments"
)
@ -12,17 +14,64 @@ import (
type Config struct {
Provider string `help:"payments provider to use" default:""`
StripeCoinPayments stripecoinpayments.Config
StorageTBPrice string `help:"price user should pay for storing TB per month" default:"10"`
EgressTBPrice string `help:"price user should pay for each TB of egress" default:"45"`
ObjectPrice string `help:"price user should pay for each object stored in network per month" default:"0.0000022"`
BonusRate int64 `help:"amount of percents that user will earn as bonus credits by depositing in STORJ tokens" default:"10"`
CouponValue int64 `help:"coupon value in cents" default:"275"`
CouponDuration int64 `help:"duration a new coupon is valid in months/billing cycles" default:"2"`
CouponProjectLimit memory.Size `help:"project limit to which increase to after applying the coupon, 0 B means not changing it from the default" default:"0 B"`
MinCoinPayment int64 `help:"minimum value of coin payments in cents before coupon is applied" default:"1000"`
NodeEgressBandwidthPrice int64 `help:"price node receive for storing TB of egress in cents" default:"2000"`
NodeRepairBandwidthPrice int64 `help:"price node receive for storing TB of repair in cents" default:"1000"`
NodeAuditBandwidthPrice int64 `help:"price node receive for storing TB of audit in cents" default:"1000"`
NodeDiskSpacePrice int64 `help:"price node receive for storing disk space in cents/TB" default:"150"`
PaywallProportion float64 `help:"proportion of users which require a balance to create projects [0-1]" default:"0"`
StorageTBPrice string `help:"price user should pay for storing TB per month" default:"10"`
EgressTBPrice string `help:"price user should pay for each TB of egress" default:"45"`
ObjectPrice string `help:"price user should pay for each object stored in network per month" default:"0.0000022"`
BonusRate int64 `help:"amount of percents that user will earn as bonus credits by depositing in STORJ tokens" default:"10"`
CouponValue int64 `help:"coupon value in cents" default:"275"`
CouponDuration CouponDuration `help:"duration a new coupon is valid in months/billing cycles" default:"2"`
CouponProjectLimit memory.Size `help:"project limit to which increase to after applying the coupon, 0 B means not changing it from the default" default:"0 B"`
MinCoinPayment int64 `help:"minimum value of coin payments in cents before coupon is applied" default:"1000"`
NodeEgressBandwidthPrice int64 `help:"price node receive for storing TB of egress in cents" default:"2000"`
NodeRepairBandwidthPrice int64 `help:"price node receive for storing TB of repair in cents" default:"1000"`
NodeAuditBandwidthPrice int64 `help:"price node receive for storing TB of audit in cents" default:"1000"`
NodeDiskSpacePrice int64 `help:"price node receive for storing disk space in cents/TB" default:"150"`
PaywallProportion float64 `help:"proportion of users which require a balance to create projects [0-1]" default:"0"`
}
// CouponDuration is a configuration struct that keeps details about default
// promotional coupon duration.
//
// Can be used as a flag.
type CouponDuration struct {
Enabled bool
BillingPeriods int64
}
// Type implements pflag.Value.
func (CouponDuration) Type() string { return "paymentsconfig.CouponDuration" }
// String is required for pflag.Value.
func (cd *CouponDuration) String() string {
if !cd.Enabled {
return ""
}
return strconv.FormatInt(cd.BillingPeriods, 10)
}
// Set sets the value from a string.
func (cd *CouponDuration) Set(s string) error {
if s == "" {
cd.Enabled = false
return nil
}
cd.Enabled = true
billingPeriods, err := strconv.ParseInt(s, 10, 64)
if err != nil {
return err
}
cd.BillingPeriods = billingPeriods
return nil
}
// IntPointer returns an int64 pointer representation of the config value.
func (cd *CouponDuration) IntPointer() *int64 {
if !cd.Enabled {
return nil
}
return &cd.BillingPeriods
}

View File

@ -48,7 +48,7 @@ type CouponsDB interface {
// PopulatePromotionalCoupons is used to populate promotional coupons through all active users who already have a project
// and do not have a promotional coupon yet. And updates project limits to selected size.
PopulatePromotionalCoupons(ctx context.Context, users []uuid.UUID, duration int, amount int64, projectLimit memory.Size) error
PopulatePromotionalCoupons(ctx context.Context, users []uuid.UUID, duration *int, amount int64, projectLimit memory.Size) error
}
// CouponUsage stores amount of money that should be charged from coupon for billing period.
@ -118,7 +118,7 @@ func (coupons *coupons) TotalUsage(ctx context.Context, couponID uuid.UUID) (_ i
// PopulatePromotionalCoupons is used to populate promotional coupons through all active users who already have
// a project, payment method and do not have a promotional coupon yet.
// And updates project limits to selected size.
func (coupons *coupons) PopulatePromotionalCoupons(ctx context.Context, duration int, amount int64, projectLimit memory.Size) (err error) {
func (coupons *coupons) PopulatePromotionalCoupons(ctx context.Context, duration *int, amount int64, projectLimit memory.Size) (err error) {
defer mon.Task()(&ctx, duration, amount, projectLimit)(&err)
const limit = 50
@ -201,5 +201,12 @@ func (coupons *coupons) PopulatePromotionalCoupons(ctx context.Context, duration
func (coupons *coupons) AddPromotionalCoupon(ctx context.Context, userID uuid.UUID) (err error) {
defer mon.Task()(&ctx, userID)(&err)
return Error.Wrap(coupons.service.db.Coupons().PopulatePromotionalCoupons(ctx, []uuid.UUID{userID}, int(coupons.service.CouponDuration), coupons.service.CouponValue, coupons.service.CouponProjectLimit))
// convert *int64 to *int
var couponDuration *int
if coupons.service.CouponDuration != nil {
value := int(*coupons.service.CouponDuration)
couponDuration = &value
}
return Error.Wrap(coupons.service.db.Coupons().PopulatePromotionalCoupons(ctx, []uuid.UUID{userID}, couponDuration, coupons.service.CouponValue, coupons.service.CouponProjectLimit))
}

View File

@ -22,9 +22,10 @@ import (
func TestCouponRepository(t *testing.T) {
satellitedbtest.Run(t, func(ctx *testcontext.Context, t *testing.T, db satellite.DB) {
duration := 2
couponsRepo := db.StripeCoinPayments().Coupons()
coupon := payments.Coupon{
Duration: 2,
Duration: &duration,
Amount: 10,
Status: payments.CouponActive,
Description: "description",
@ -205,12 +206,13 @@ func TestPopulatePromotionalCoupons(t *testing.T) {
})
require.NoError(t, err)
duration := 2
couponID := testrand.UUID()
_, err = couponsRepo.Insert(ctx, payments.Coupon{
ID: couponID,
UserID: user5.ID,
Amount: 5500,
Duration: 2,
Duration: &duration,
Description: "qw",
Type: payments.CouponTypePromotional,
Status: payments.CouponActive,
@ -250,7 +252,8 @@ func TestPopulatePromotionalCoupons(t *testing.T) {
user4.ID,
user5.ID,
}
err := couponsRepo.PopulatePromotionalCoupons(ctx, usersIds, 2, 5500, memory.TB)
duration := 2
err := couponsRepo.PopulatePromotionalCoupons(ctx, usersIds, &duration, 5500, memory.TB)
require.NoError(t, err)
user1Coupons, err := couponsRepo.ListByUserID(ctx, user1.ID)
@ -296,7 +299,8 @@ func TestPopulatePromotionalCoupons(t *testing.T) {
user5.ID,
user6.ID,
}
err := couponsRepo.PopulatePromotionalCoupons(ctx, usersIds, 2, 5500, memory.TB)
duration := 2
err := couponsRepo.PopulatePromotionalCoupons(ctx, usersIds, &duration, 5500, memory.TB)
require.NoError(t, err)
user1Coupons, err := couponsRepo.ListByUserID(ctx, user1.ID)

View File

@ -69,7 +69,7 @@ type Service struct {
BonusRate int64
// Coupon Values
CouponValue int64
CouponDuration int64
CouponDuration *int64
CouponProjectLimit memory.Size
// Minimum CoinPayment to create a coupon
MinCoinPayment int64
@ -87,7 +87,7 @@ type Service struct {
}
// NewService creates a Service instance.
func NewService(log *zap.Logger, stripeClient StripeClient, config Config, db DB, projectsDB console.Projects, usageDB accounting.ProjectAccounting, storageTBPrice, egressTBPrice, objectPrice string, bonusRate, couponValue, couponDuration int64, couponProjectLimit memory.Size, minCoinPayment int64, paywallProportion float64) (*Service, error) {
func NewService(log *zap.Logger, stripeClient StripeClient, config Config, db DB, projectsDB console.Projects, usageDB accounting.ProjectAccounting, storageTBPrice, egressTBPrice, objectPrice string, bonusRate, couponValue int64, couponDuration *int64, couponProjectLimit memory.Size, minCoinPayment int64, paywallProportion float64) (*Service, error) {
coinPaymentsClient := coinpayments.NewClient(
coinpayments.Credentials{
@ -491,7 +491,10 @@ func (service *Service) processCustomers(ctx context.Context, customers []Custom
continue
}
if end.After(coupon.ExpirationDate()) {
expirationDate := coupon.ExpirationDate()
if expirationDate != nil &&
end.After(*expirationDate) {
// this coupon is identified as expired for first time, mark it in the database
if _, err = service.db.Coupons().Update(ctx, coupon.ID, payments.CouponExpired); err != nil {
return 0, 0, err
@ -521,7 +524,7 @@ func (service *Service) processCustomers(ctx context.Context, customers []Custom
leftToCharge -= amountToChargeFromCoupon
}
if amountToChargeFromCoupon < remaining && end.Equal(coupon.ExpirationDate()) {
if amountToChargeFromCoupon < remaining && expirationDate != nil && end.Equal(*expirationDate) {
// the coupon was not fully spent, but this is the last month
// it is valid for, so mark it as expired in database
if _, err = service.db.Coupons().Update(ctx, coupon.ID, payments.CouponExpired); err != nil {

View File

@ -231,13 +231,14 @@ func TestService_InvoiceUserWithManyCoupons(t *testing.T) {
project, err := satellite.AddProject(ctx, user.ID, "testproject")
require.NoError(t, err)
duration := 2
sumOfCoupons := int64(0)
for i := 0; i < 5; i++ {
coupon, err := satellite.API.Payments.Accounts.Coupons().Create(ctx, payments.Coupon{
ID: testrand.UUID(),
UserID: user.ID,
Amount: int64(i + 4),
Duration: 2,
Duration: &duration,
Status: payments.CouponActive,
Type: payments.CouponTypePromotional,
})
@ -346,11 +347,12 @@ func TestService_ApplyCouponsInTheOrder(t *testing.T) {
additionalCoupons := 3
// we will have coupons with duration 5, 4, 3 and 2 from coupon create with AddUser
for i := 0; i < additionalCoupons; i++ {
duration := additionalCoupons - i + 2
_, err = satellite.API.Payments.Accounts.Coupons().Create(ctx, payments.Coupon{
ID: testrand.UUID(),
UserID: user.ID,
Amount: 24,
Duration: additionalCoupons - i + 2,
Duration: &duration,
Status: payments.CouponActive,
Type: payments.CouponTypePromotional,
})
@ -380,7 +382,8 @@ func TestService_ApplyCouponsInTheOrder(t *testing.T) {
require.Equal(t, 2, len(usedCoupons))
// coupons with duration 2 and 3 should be used
for _, coupon := range usedCoupons {
require.Less(t, coupon.Duration, 4)
require.NotNil(t, coupon.Duration)
require.Less(t, *coupon.Duration, 4)
}
activeCoupons, err := satellite.DB.StripeCoinPayments().Coupons().ListByUserIDAndStatus(ctx, user.ID, payments.CouponActive)
@ -388,7 +391,8 @@ func TestService_ApplyCouponsInTheOrder(t *testing.T) {
require.Equal(t, 2, len(activeCoupons))
// coupons with duration 4 and 5 should be NOT used
for _, coupon := range activeCoupons {
require.Greater(t, coupon.Duration, 3)
require.NotNil(t, coupon.Duration)
require.Greater(t, *coupon.Duration, 3)
require.EqualValues(t, 24, coupon.Amount)
}
})
@ -485,7 +489,7 @@ func TestService_CouponStatus(t *testing.T) {
ID: testrand.UUID(),
UserID: user.ID,
Amount: tt.amount,
Duration: tt.duration,
Duration: &tt.duration,
})
require.NoError(t, err, errTag)

View File

@ -42,6 +42,12 @@ func (coupons *coupons) Insert(ctx context.Context, coupon payments.Coupon) (_ p
return payments.Coupon{}, err
}
duration := 0
createFields := dbx.Coupon_Create_Fields{}
if coupon.Duration != nil {
duration = *coupon.Duration
createFields.BillingPeriods = dbx.Coupon_BillingPeriods(int64(duration))
}
cpx, err := coupons.db.Create_Coupon(
ctx,
dbx.Coupon_Id(id[:]),
@ -50,8 +56,8 @@ func (coupons *coupons) Insert(ctx context.Context, coupon payments.Coupon) (_ p
dbx.Coupon_Description(coupon.Description),
dbx.Coupon_Type(int(coupon.Type)),
dbx.Coupon_Status(int(coupon.Status)),
dbx.Coupon_Duration(int64(coupon.Duration)),
dbx.Coupon_Create_Fields{},
dbx.Coupon_Duration(int64(duration)),
createFields,
)
if err != nil {
return payments.Coupon{}, err
@ -132,7 +138,16 @@ func (coupons *coupons) ListByUserIDAndStatus(ctx context.Context, userID uuid.U
sort.Slice(result, func(i, k int) bool {
iDate := result[i].ExpirationDate()
kDate := result[k].ExpirationDate()
return iDate.Before(kDate)
if iDate == nil && kDate == nil {
return false
}
if iDate == nil && kDate != nil {
return false
}
if iDate != nil && kDate == nil {
return true
}
return iDate.Before(*kDate)
})
return result, nil
@ -197,7 +212,8 @@ func fromDBXCoupon(dbxCoupon *dbx.Coupon) (coupon payments.Coupon, err error) {
return payments.Coupon{}, err
}
coupon.Duration = int(dbxCoupon.Duration)
duration := int(dbxCoupon.Duration)
coupon.Duration = &duration
coupon.Description = dbxCoupon.Description
coupon.Amount = dbxCoupon.Amount
coupon.Created = dbxCoupon.CreatedAt
@ -366,7 +382,7 @@ func couponUsageFromDbxSlice(couponUsageDbx *dbx.CouponUsage) (usage stripecoinp
// PopulatePromotionalCoupons is used to populate promotional coupons through all active users who already have a project
// and do not have a promotional coupon yet. And updates project limits to selected size.
func (coupons *coupons) PopulatePromotionalCoupons(ctx context.Context, users []uuid.UUID, duration int, amount int64, projectLimit memory.Size) (err error) {
func (coupons *coupons) PopulatePromotionalCoupons(ctx context.Context, users []uuid.UUID, duration *int, amount int64, projectLimit memory.Size) (err error) {
defer mon.Task()(&ctx, users, duration, amount, projectLimit)(&err)
ids, err := coupons.activeUserWithProjectAndWithoutCoupon(ctx, users)

View File

@ -1092,12 +1092,12 @@ model coupon_code (
key id
unique name
field id blob
field name text
field amount int64
field description text
field type int
field duration int64
field id blob
field name text
field amount int64
field description text
field type int
field billing_periods int64 ( nullable )
field created_at timestamp ( autoinsert )
)
@ -1121,6 +1121,7 @@ model coupon (
field type int
field status int ( updatable )
field duration int64
field billing_periods int64 ( nullable )
field coupon_code_name text ( nullable )
field created_at timestamp ( autoinsert )

View File

@ -395,6 +395,7 @@ CREATE TABLE coupons (
type integer NOT NULL,
status integer NOT NULL,
duration bigint NOT NULL,
billing_periods bigint,
coupon_code_name text,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id )
@ -405,7 +406,7 @@ CREATE TABLE coupon_codes (
amount bigint NOT NULL,
description text NOT NULL,
type integer NOT NULL,
duration bigint NOT NULL,
billing_periods bigint,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id ),
UNIQUE ( name )
@ -947,6 +948,7 @@ CREATE TABLE coupons (
type integer NOT NULL,
status integer NOT NULL,
duration bigint NOT NULL,
billing_periods bigint,
coupon_code_name text,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id )
@ -957,7 +959,7 @@ CREATE TABLE coupon_codes (
amount bigint NOT NULL,
description text NOT NULL,
type integer NOT NULL,
duration bigint NOT NULL,
billing_periods bigint,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id ),
UNIQUE ( name )
@ -2382,6 +2384,7 @@ type Coupon struct {
Type int
Status int
Duration int64
BillingPeriods *int64
CouponCodeName *string
CreatedAt time.Time
}
@ -2389,6 +2392,7 @@ type Coupon struct {
func (Coupon) _Table() string { return "coupons" }
type Coupon_Create_Fields struct {
BillingPeriods Coupon_BillingPeriods_Field
CouponCodeName Coupon_CouponCodeName_Field
}
@ -2529,6 +2533,38 @@ func (f Coupon_Duration_Field) value() interface{} {
func (Coupon_Duration_Field) _Column() string { return "duration" }
type Coupon_BillingPeriods_Field struct {
_set bool
_null bool
_value *int64
}
func Coupon_BillingPeriods(v int64) Coupon_BillingPeriods_Field {
return Coupon_BillingPeriods_Field{_set: true, _value: &v}
}
func Coupon_BillingPeriods_Raw(v *int64) Coupon_BillingPeriods_Field {
if v == nil {
return Coupon_BillingPeriods_Null()
}
return Coupon_BillingPeriods(*v)
}
func Coupon_BillingPeriods_Null() Coupon_BillingPeriods_Field {
return Coupon_BillingPeriods_Field{_set: true, _null: true}
}
func (f Coupon_BillingPeriods_Field) isnull() bool { return !f._set || f._null || f._value == nil }
func (f Coupon_BillingPeriods_Field) value() interface{} {
if !f._set || f._null {
return nil
}
return f._value
}
func (Coupon_BillingPeriods_Field) _Column() string { return "billing_periods" }
type Coupon_CouponCodeName_Field struct {
_set bool
_null bool
@ -2581,17 +2617,21 @@ func (f Coupon_CreatedAt_Field) value() interface{} {
func (Coupon_CreatedAt_Field) _Column() string { return "created_at" }
type CouponCode struct {
Id []byte
Name string
Amount int64
Description string
Type int
Duration int64
CreatedAt time.Time
Id []byte
Name string
Amount int64
Description string
Type int
BillingPeriods *int64
CreatedAt time.Time
}
func (CouponCode) _Table() string { return "coupon_codes" }
type CouponCode_Create_Fields struct {
BillingPeriods CouponCode_BillingPeriods_Field
}
type CouponCode_Update_Fields struct {
}
@ -2690,24 +2730,37 @@ func (f CouponCode_Type_Field) value() interface{} {
func (CouponCode_Type_Field) _Column() string { return "type" }
type CouponCode_Duration_Field struct {
type CouponCode_BillingPeriods_Field struct {
_set bool
_null bool
_value int64
_value *int64
}
func CouponCode_Duration(v int64) CouponCode_Duration_Field {
return CouponCode_Duration_Field{_set: true, _value: v}
func CouponCode_BillingPeriods(v int64) CouponCode_BillingPeriods_Field {
return CouponCode_BillingPeriods_Field{_set: true, _value: &v}
}
func (f CouponCode_Duration_Field) value() interface{} {
func CouponCode_BillingPeriods_Raw(v *int64) CouponCode_BillingPeriods_Field {
if v == nil {
return CouponCode_BillingPeriods_Null()
}
return CouponCode_BillingPeriods(*v)
}
func CouponCode_BillingPeriods_Null() CouponCode_BillingPeriods_Field {
return CouponCode_BillingPeriods_Field{_set: true, _null: true}
}
func (f CouponCode_BillingPeriods_Field) isnull() bool { return !f._set || f._null || f._value == nil }
func (f CouponCode_BillingPeriods_Field) value() interface{} {
if !f._set || f._null {
return nil
}
return f._value
}
func (CouponCode_Duration_Field) _Column() string { return "duration" }
func (CouponCode_BillingPeriods_Field) _Column() string { return "billing_periods" }
type CouponCode_CreatedAt_Field struct {
_set bool
@ -9965,7 +10018,7 @@ func (obj *pgxImpl) Create_CouponCode(ctx context.Context,
coupon_code_amount CouponCode_Amount_Field,
coupon_code_description CouponCode_Description_Field,
coupon_code_type CouponCode_Type_Field,
coupon_code_duration CouponCode_Duration_Field) (
optional CouponCode_Create_Fields) (
coupon_code *CouponCode, err error) {
defer mon.Task()(&ctx)(&err)
@ -9975,19 +10028,19 @@ func (obj *pgxImpl) Create_CouponCode(ctx context.Context,
__amount_val := coupon_code_amount.value()
__description_val := coupon_code_description.value()
__type_val := coupon_code_type.value()
__duration_val := coupon_code_duration.value()
__billing_periods_val := optional.BillingPeriods.value()
__created_at_val := __now
var __embed_stmt = __sqlbundle_Literal("INSERT INTO coupon_codes ( id, name, amount, description, type, duration, created_at ) VALUES ( ?, ?, ?, ?, ?, ?, ? ) RETURNING coupon_codes.id, coupon_codes.name, coupon_codes.amount, coupon_codes.description, coupon_codes.type, coupon_codes.duration, coupon_codes.created_at")
var __embed_stmt = __sqlbundle_Literal("INSERT INTO coupon_codes ( id, name, amount, description, type, billing_periods, created_at ) VALUES ( ?, ?, ?, ?, ?, ?, ? ) RETURNING coupon_codes.id, coupon_codes.name, coupon_codes.amount, coupon_codes.description, coupon_codes.type, coupon_codes.billing_periods, coupon_codes.created_at")
var __values []interface{}
__values = append(__values, __id_val, __name_val, __amount_val, __description_val, __type_val, __duration_val, __created_at_val)
__values = append(__values, __id_val, __name_val, __amount_val, __description_val, __type_val, __billing_periods_val, __created_at_val)
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
obj.logStmt(__stmt, __values...)
coupon_code = &CouponCode{}
err = obj.queryRowContext(ctx, __stmt, __values...).Scan(&coupon_code.Id, &coupon_code.Name, &coupon_code.Amount, &coupon_code.Description, &coupon_code.Type, &coupon_code.Duration, &coupon_code.CreatedAt)
err = obj.queryRowContext(ctx, __stmt, __values...).Scan(&coupon_code.Id, &coupon_code.Name, &coupon_code.Amount, &coupon_code.Description, &coupon_code.Type, &coupon_code.BillingPeriods, &coupon_code.CreatedAt)
if err != nil {
return nil, obj.makeErr(err)
}
@ -10015,19 +10068,20 @@ func (obj *pgxImpl) Create_Coupon(ctx context.Context,
__type_val := coupon_type.value()
__status_val := coupon_status.value()
__duration_val := coupon_duration.value()
__billing_periods_val := optional.BillingPeriods.value()
__coupon_code_name_val := optional.CouponCodeName.value()
__created_at_val := __now
var __embed_stmt = __sqlbundle_Literal("INSERT INTO coupons ( id, user_id, amount, description, type, status, duration, coupon_code_name, created_at ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ? ) RETURNING coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.coupon_code_name, coupons.created_at")
var __embed_stmt = __sqlbundle_Literal("INSERT INTO coupons ( id, user_id, amount, description, type, status, duration, billing_periods, coupon_code_name, created_at ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ? ) RETURNING coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.billing_periods, coupons.coupon_code_name, coupons.created_at")
var __values []interface{}
__values = append(__values, __id_val, __user_id_val, __amount_val, __description_val, __type_val, __status_val, __duration_val, __coupon_code_name_val, __created_at_val)
__values = append(__values, __id_val, __user_id_val, __amount_val, __description_val, __type_val, __status_val, __duration_val, __billing_periods_val, __coupon_code_name_val, __created_at_val)
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
obj.logStmt(__stmt, __values...)
coupon = &Coupon{}
err = obj.queryRowContext(ctx, __stmt, __values...).Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CouponCodeName, &coupon.CreatedAt)
err = obj.queryRowContext(ctx, __stmt, __values...).Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.BillingPeriods, &coupon.CouponCodeName, &coupon.CreatedAt)
if err != nil {
return nil, obj.makeErr(err)
}
@ -12344,7 +12398,7 @@ func (obj *pgxImpl) Get_CouponCode_By_Name(ctx context.Context,
coupon_code *CouponCode, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupon_codes.id, coupon_codes.name, coupon_codes.amount, coupon_codes.description, coupon_codes.type, coupon_codes.duration, coupon_codes.created_at FROM coupon_codes WHERE coupon_codes.name = ?")
var __embed_stmt = __sqlbundle_Literal("SELECT coupon_codes.id, coupon_codes.name, coupon_codes.amount, coupon_codes.description, coupon_codes.type, coupon_codes.billing_periods, coupon_codes.created_at FROM coupon_codes WHERE coupon_codes.name = ?")
var __values []interface{}
__values = append(__values, coupon_code_name.value())
@ -12353,7 +12407,7 @@ func (obj *pgxImpl) Get_CouponCode_By_Name(ctx context.Context,
obj.logStmt(__stmt, __values...)
coupon_code = &CouponCode{}
err = obj.queryRowContext(ctx, __stmt, __values...).Scan(&coupon_code.Id, &coupon_code.Name, &coupon_code.Amount, &coupon_code.Description, &coupon_code.Type, &coupon_code.Duration, &coupon_code.CreatedAt)
err = obj.queryRowContext(ctx, __stmt, __values...).Scan(&coupon_code.Id, &coupon_code.Name, &coupon_code.Amount, &coupon_code.Description, &coupon_code.Type, &coupon_code.BillingPeriods, &coupon_code.CreatedAt)
if err != nil {
return (*CouponCode)(nil), obj.makeErr(err)
}
@ -12366,7 +12420,7 @@ func (obj *pgxImpl) Get_Coupon_By_Id(ctx context.Context,
coupon *Coupon, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.coupon_code_name, coupons.created_at FROM coupons WHERE coupons.id = ?")
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.billing_periods, coupons.coupon_code_name, coupons.created_at FROM coupons WHERE coupons.id = ?")
var __values []interface{}
__values = append(__values, coupon_id.value())
@ -12375,7 +12429,7 @@ func (obj *pgxImpl) Get_Coupon_By_Id(ctx context.Context,
obj.logStmt(__stmt, __values...)
coupon = &Coupon{}
err = obj.queryRowContext(ctx, __stmt, __values...).Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CouponCodeName, &coupon.CreatedAt)
err = obj.queryRowContext(ctx, __stmt, __values...).Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.BillingPeriods, &coupon.CouponCodeName, &coupon.CreatedAt)
if err != nil {
return (*Coupon)(nil), obj.makeErr(err)
}
@ -12388,7 +12442,7 @@ func (obj *pgxImpl) All_Coupon_By_UserId_OrderBy_Desc_CreatedAt(ctx context.Cont
rows []*Coupon, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.coupon_code_name, coupons.created_at FROM coupons WHERE coupons.user_id = ? ORDER BY coupons.created_at DESC")
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.billing_periods, coupons.coupon_code_name, coupons.created_at FROM coupons WHERE coupons.user_id = ? ORDER BY coupons.created_at DESC")
var __values []interface{}
__values = append(__values, coupon_user_id.value())
@ -12406,7 +12460,7 @@ func (obj *pgxImpl) All_Coupon_By_UserId_OrderBy_Desc_CreatedAt(ctx context.Cont
for __rows.Next() {
coupon := &Coupon{}
err = __rows.Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CouponCodeName, &coupon.CreatedAt)
err = __rows.Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.BillingPeriods, &coupon.CouponCodeName, &coupon.CreatedAt)
if err != nil {
return nil, err
}
@ -12434,7 +12488,7 @@ func (obj *pgxImpl) All_Coupon_By_UserId_And_Status_OrderBy_Desc_CreatedAt(ctx c
rows []*Coupon, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.coupon_code_name, coupons.created_at FROM coupons WHERE coupons.user_id = ? AND coupons.status = ? ORDER BY coupons.created_at DESC")
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.billing_periods, coupons.coupon_code_name, coupons.created_at FROM coupons WHERE coupons.user_id = ? AND coupons.status = ? ORDER BY coupons.created_at DESC")
var __values []interface{}
__values = append(__values, coupon_user_id.value(), coupon_status.value())
@ -12452,7 +12506,7 @@ func (obj *pgxImpl) All_Coupon_By_UserId_And_Status_OrderBy_Desc_CreatedAt(ctx c
for __rows.Next() {
coupon := &Coupon{}
err = __rows.Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CouponCodeName, &coupon.CreatedAt)
err = __rows.Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.BillingPeriods, &coupon.CouponCodeName, &coupon.CreatedAt)
if err != nil {
return nil, err
}
@ -12479,7 +12533,7 @@ func (obj *pgxImpl) All_Coupon_By_Status_OrderBy_Desc_CreatedAt(ctx context.Cont
rows []*Coupon, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.coupon_code_name, coupons.created_at FROM coupons WHERE coupons.status = ? ORDER BY coupons.created_at DESC")
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.billing_periods, coupons.coupon_code_name, coupons.created_at FROM coupons WHERE coupons.status = ? ORDER BY coupons.created_at DESC")
var __values []interface{}
__values = append(__values, coupon_status.value())
@ -12497,7 +12551,7 @@ func (obj *pgxImpl) All_Coupon_By_Status_OrderBy_Desc_CreatedAt(ctx context.Cont
for __rows.Next() {
coupon := &Coupon{}
err = __rows.Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CouponCodeName, &coupon.CreatedAt)
err = __rows.Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.BillingPeriods, &coupon.CouponCodeName, &coupon.CreatedAt)
if err != nil {
return nil, err
}
@ -12526,7 +12580,7 @@ func (obj *pgxImpl) Limited_Coupon_By_CreatedAt_LessOrEqual_And_Status_OrderBy_D
rows []*Coupon, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.coupon_code_name, coupons.created_at FROM coupons WHERE coupons.created_at <= ? AND coupons.status = ? ORDER BY coupons.created_at DESC LIMIT ? OFFSET ?")
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.billing_periods, coupons.coupon_code_name, coupons.created_at FROM coupons WHERE coupons.created_at <= ? AND coupons.status = ? ORDER BY coupons.created_at DESC LIMIT ? OFFSET ?")
var __values []interface{}
__values = append(__values, coupon_created_at_less_or_equal.value(), coupon_status.value())
@ -12546,7 +12600,7 @@ func (obj *pgxImpl) Limited_Coupon_By_CreatedAt_LessOrEqual_And_Status_OrderBy_D
for __rows.Next() {
coupon := &Coupon{}
err = __rows.Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CouponCodeName, &coupon.CreatedAt)
err = __rows.Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.BillingPeriods, &coupon.CouponCodeName, &coupon.CreatedAt)
if err != nil {
return nil, err
}
@ -13768,7 +13822,7 @@ func (obj *pgxImpl) Update_Coupon_By_Id(ctx context.Context,
defer mon.Task()(&ctx)(&err)
var __sets = &__sqlbundle_Hole{}
var __embed_stmt = __sqlbundle_Literals{Join: "", SQLs: []__sqlbundle_SQL{__sqlbundle_Literal("UPDATE coupons SET "), __sets, __sqlbundle_Literal(" WHERE coupons.id = ? RETURNING coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.coupon_code_name, coupons.created_at")}}
var __embed_stmt = __sqlbundle_Literals{Join: "", SQLs: []__sqlbundle_SQL{__sqlbundle_Literal("UPDATE coupons SET "), __sets, __sqlbundle_Literal(" WHERE coupons.id = ? RETURNING coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.billing_periods, coupons.coupon_code_name, coupons.created_at")}}
__sets_sql := __sqlbundle_Literals{Join: ", "}
var __values []interface{}
@ -13792,7 +13846,7 @@ func (obj *pgxImpl) Update_Coupon_By_Id(ctx context.Context,
obj.logStmt(__stmt, __values...)
coupon = &Coupon{}
err = obj.queryRowContext(ctx, __stmt, __values...).Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CouponCodeName, &coupon.CreatedAt)
err = obj.queryRowContext(ctx, __stmt, __values...).Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.BillingPeriods, &coupon.CouponCodeName, &coupon.CreatedAt)
if err == sql.ErrNoRows {
return nil, nil
}
@ -15458,7 +15512,7 @@ func (obj *pgxcockroachImpl) Create_CouponCode(ctx context.Context,
coupon_code_amount CouponCode_Amount_Field,
coupon_code_description CouponCode_Description_Field,
coupon_code_type CouponCode_Type_Field,
coupon_code_duration CouponCode_Duration_Field) (
optional CouponCode_Create_Fields) (
coupon_code *CouponCode, err error) {
defer mon.Task()(&ctx)(&err)
@ -15468,19 +15522,19 @@ func (obj *pgxcockroachImpl) Create_CouponCode(ctx context.Context,
__amount_val := coupon_code_amount.value()
__description_val := coupon_code_description.value()
__type_val := coupon_code_type.value()
__duration_val := coupon_code_duration.value()
__billing_periods_val := optional.BillingPeriods.value()
__created_at_val := __now
var __embed_stmt = __sqlbundle_Literal("INSERT INTO coupon_codes ( id, name, amount, description, type, duration, created_at ) VALUES ( ?, ?, ?, ?, ?, ?, ? ) RETURNING coupon_codes.id, coupon_codes.name, coupon_codes.amount, coupon_codes.description, coupon_codes.type, coupon_codes.duration, coupon_codes.created_at")
var __embed_stmt = __sqlbundle_Literal("INSERT INTO coupon_codes ( id, name, amount, description, type, billing_periods, created_at ) VALUES ( ?, ?, ?, ?, ?, ?, ? ) RETURNING coupon_codes.id, coupon_codes.name, coupon_codes.amount, coupon_codes.description, coupon_codes.type, coupon_codes.billing_periods, coupon_codes.created_at")
var __values []interface{}
__values = append(__values, __id_val, __name_val, __amount_val, __description_val, __type_val, __duration_val, __created_at_val)
__values = append(__values, __id_val, __name_val, __amount_val, __description_val, __type_val, __billing_periods_val, __created_at_val)
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
obj.logStmt(__stmt, __values...)
coupon_code = &CouponCode{}
err = obj.queryRowContext(ctx, __stmt, __values...).Scan(&coupon_code.Id, &coupon_code.Name, &coupon_code.Amount, &coupon_code.Description, &coupon_code.Type, &coupon_code.Duration, &coupon_code.CreatedAt)
err = obj.queryRowContext(ctx, __stmt, __values...).Scan(&coupon_code.Id, &coupon_code.Name, &coupon_code.Amount, &coupon_code.Description, &coupon_code.Type, &coupon_code.BillingPeriods, &coupon_code.CreatedAt)
if err != nil {
return nil, obj.makeErr(err)
}
@ -15508,19 +15562,20 @@ func (obj *pgxcockroachImpl) Create_Coupon(ctx context.Context,
__type_val := coupon_type.value()
__status_val := coupon_status.value()
__duration_val := coupon_duration.value()
__billing_periods_val := optional.BillingPeriods.value()
__coupon_code_name_val := optional.CouponCodeName.value()
__created_at_val := __now
var __embed_stmt = __sqlbundle_Literal("INSERT INTO coupons ( id, user_id, amount, description, type, status, duration, coupon_code_name, created_at ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ? ) RETURNING coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.coupon_code_name, coupons.created_at")
var __embed_stmt = __sqlbundle_Literal("INSERT INTO coupons ( id, user_id, amount, description, type, status, duration, billing_periods, coupon_code_name, created_at ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ? ) RETURNING coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.billing_periods, coupons.coupon_code_name, coupons.created_at")
var __values []interface{}
__values = append(__values, __id_val, __user_id_val, __amount_val, __description_val, __type_val, __status_val, __duration_val, __coupon_code_name_val, __created_at_val)
__values = append(__values, __id_val, __user_id_val, __amount_val, __description_val, __type_val, __status_val, __duration_val, __billing_periods_val, __coupon_code_name_val, __created_at_val)
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
obj.logStmt(__stmt, __values...)
coupon = &Coupon{}
err = obj.queryRowContext(ctx, __stmt, __values...).Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CouponCodeName, &coupon.CreatedAt)
err = obj.queryRowContext(ctx, __stmt, __values...).Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.BillingPeriods, &coupon.CouponCodeName, &coupon.CreatedAt)
if err != nil {
return nil, obj.makeErr(err)
}
@ -17837,7 +17892,7 @@ func (obj *pgxcockroachImpl) Get_CouponCode_By_Name(ctx context.Context,
coupon_code *CouponCode, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupon_codes.id, coupon_codes.name, coupon_codes.amount, coupon_codes.description, coupon_codes.type, coupon_codes.duration, coupon_codes.created_at FROM coupon_codes WHERE coupon_codes.name = ?")
var __embed_stmt = __sqlbundle_Literal("SELECT coupon_codes.id, coupon_codes.name, coupon_codes.amount, coupon_codes.description, coupon_codes.type, coupon_codes.billing_periods, coupon_codes.created_at FROM coupon_codes WHERE coupon_codes.name = ?")
var __values []interface{}
__values = append(__values, coupon_code_name.value())
@ -17846,7 +17901,7 @@ func (obj *pgxcockroachImpl) Get_CouponCode_By_Name(ctx context.Context,
obj.logStmt(__stmt, __values...)
coupon_code = &CouponCode{}
err = obj.queryRowContext(ctx, __stmt, __values...).Scan(&coupon_code.Id, &coupon_code.Name, &coupon_code.Amount, &coupon_code.Description, &coupon_code.Type, &coupon_code.Duration, &coupon_code.CreatedAt)
err = obj.queryRowContext(ctx, __stmt, __values...).Scan(&coupon_code.Id, &coupon_code.Name, &coupon_code.Amount, &coupon_code.Description, &coupon_code.Type, &coupon_code.BillingPeriods, &coupon_code.CreatedAt)
if err != nil {
return (*CouponCode)(nil), obj.makeErr(err)
}
@ -17859,7 +17914,7 @@ func (obj *pgxcockroachImpl) Get_Coupon_By_Id(ctx context.Context,
coupon *Coupon, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.coupon_code_name, coupons.created_at FROM coupons WHERE coupons.id = ?")
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.billing_periods, coupons.coupon_code_name, coupons.created_at FROM coupons WHERE coupons.id = ?")
var __values []interface{}
__values = append(__values, coupon_id.value())
@ -17868,7 +17923,7 @@ func (obj *pgxcockroachImpl) Get_Coupon_By_Id(ctx context.Context,
obj.logStmt(__stmt, __values...)
coupon = &Coupon{}
err = obj.queryRowContext(ctx, __stmt, __values...).Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CouponCodeName, &coupon.CreatedAt)
err = obj.queryRowContext(ctx, __stmt, __values...).Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.BillingPeriods, &coupon.CouponCodeName, &coupon.CreatedAt)
if err != nil {
return (*Coupon)(nil), obj.makeErr(err)
}
@ -17881,7 +17936,7 @@ func (obj *pgxcockroachImpl) All_Coupon_By_UserId_OrderBy_Desc_CreatedAt(ctx con
rows []*Coupon, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.coupon_code_name, coupons.created_at FROM coupons WHERE coupons.user_id = ? ORDER BY coupons.created_at DESC")
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.billing_periods, coupons.coupon_code_name, coupons.created_at FROM coupons WHERE coupons.user_id = ? ORDER BY coupons.created_at DESC")
var __values []interface{}
__values = append(__values, coupon_user_id.value())
@ -17899,7 +17954,7 @@ func (obj *pgxcockroachImpl) All_Coupon_By_UserId_OrderBy_Desc_CreatedAt(ctx con
for __rows.Next() {
coupon := &Coupon{}
err = __rows.Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CouponCodeName, &coupon.CreatedAt)
err = __rows.Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.BillingPeriods, &coupon.CouponCodeName, &coupon.CreatedAt)
if err != nil {
return nil, err
}
@ -17927,7 +17982,7 @@ func (obj *pgxcockroachImpl) All_Coupon_By_UserId_And_Status_OrderBy_Desc_Create
rows []*Coupon, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.coupon_code_name, coupons.created_at FROM coupons WHERE coupons.user_id = ? AND coupons.status = ? ORDER BY coupons.created_at DESC")
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.billing_periods, coupons.coupon_code_name, coupons.created_at FROM coupons WHERE coupons.user_id = ? AND coupons.status = ? ORDER BY coupons.created_at DESC")
var __values []interface{}
__values = append(__values, coupon_user_id.value(), coupon_status.value())
@ -17945,7 +18000,7 @@ func (obj *pgxcockroachImpl) All_Coupon_By_UserId_And_Status_OrderBy_Desc_Create
for __rows.Next() {
coupon := &Coupon{}
err = __rows.Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CouponCodeName, &coupon.CreatedAt)
err = __rows.Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.BillingPeriods, &coupon.CouponCodeName, &coupon.CreatedAt)
if err != nil {
return nil, err
}
@ -17972,7 +18027,7 @@ func (obj *pgxcockroachImpl) All_Coupon_By_Status_OrderBy_Desc_CreatedAt(ctx con
rows []*Coupon, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.coupon_code_name, coupons.created_at FROM coupons WHERE coupons.status = ? ORDER BY coupons.created_at DESC")
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.billing_periods, coupons.coupon_code_name, coupons.created_at FROM coupons WHERE coupons.status = ? ORDER BY coupons.created_at DESC")
var __values []interface{}
__values = append(__values, coupon_status.value())
@ -17990,7 +18045,7 @@ func (obj *pgxcockroachImpl) All_Coupon_By_Status_OrderBy_Desc_CreatedAt(ctx con
for __rows.Next() {
coupon := &Coupon{}
err = __rows.Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CouponCodeName, &coupon.CreatedAt)
err = __rows.Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.BillingPeriods, &coupon.CouponCodeName, &coupon.CreatedAt)
if err != nil {
return nil, err
}
@ -18019,7 +18074,7 @@ func (obj *pgxcockroachImpl) Limited_Coupon_By_CreatedAt_LessOrEqual_And_Status_
rows []*Coupon, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.coupon_code_name, coupons.created_at FROM coupons WHERE coupons.created_at <= ? AND coupons.status = ? ORDER BY coupons.created_at DESC LIMIT ? OFFSET ?")
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.billing_periods, coupons.coupon_code_name, coupons.created_at FROM coupons WHERE coupons.created_at <= ? AND coupons.status = ? ORDER BY coupons.created_at DESC LIMIT ? OFFSET ?")
var __values []interface{}
__values = append(__values, coupon_created_at_less_or_equal.value(), coupon_status.value())
@ -18039,7 +18094,7 @@ func (obj *pgxcockroachImpl) Limited_Coupon_By_CreatedAt_LessOrEqual_And_Status_
for __rows.Next() {
coupon := &Coupon{}
err = __rows.Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CouponCodeName, &coupon.CreatedAt)
err = __rows.Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.BillingPeriods, &coupon.CouponCodeName, &coupon.CreatedAt)
if err != nil {
return nil, err
}
@ -19261,7 +19316,7 @@ func (obj *pgxcockroachImpl) Update_Coupon_By_Id(ctx context.Context,
defer mon.Task()(&ctx)(&err)
var __sets = &__sqlbundle_Hole{}
var __embed_stmt = __sqlbundle_Literals{Join: "", SQLs: []__sqlbundle_SQL{__sqlbundle_Literal("UPDATE coupons SET "), __sets, __sqlbundle_Literal(" WHERE coupons.id = ? RETURNING coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.coupon_code_name, coupons.created_at")}}
var __embed_stmt = __sqlbundle_Literals{Join: "", SQLs: []__sqlbundle_SQL{__sqlbundle_Literal("UPDATE coupons SET "), __sets, __sqlbundle_Literal(" WHERE coupons.id = ? RETURNING coupons.id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.billing_periods, coupons.coupon_code_name, coupons.created_at")}}
__sets_sql := __sqlbundle_Literals{Join: ", "}
var __values []interface{}
@ -19285,7 +19340,7 @@ func (obj *pgxcockroachImpl) Update_Coupon_By_Id(ctx context.Context,
obj.logStmt(__stmt, __values...)
coupon = &Coupon{}
err = obj.queryRowContext(ctx, __stmt, __values...).Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CouponCodeName, &coupon.CreatedAt)
err = obj.queryRowContext(ctx, __stmt, __values...).Scan(&coupon.Id, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.BillingPeriods, &coupon.CouponCodeName, &coupon.CreatedAt)
if err == sql.ErrNoRows {
return nil, nil
}
@ -20599,13 +20654,13 @@ func (rx *Rx) Create_CouponCode(ctx context.Context,
coupon_code_amount CouponCode_Amount_Field,
coupon_code_description CouponCode_Description_Field,
coupon_code_type CouponCode_Type_Field,
coupon_code_duration CouponCode_Duration_Field) (
optional CouponCode_Create_Fields) (
coupon_code *CouponCode, err error) {
var tx *Tx
if tx, err = rx.getTx(ctx); err != nil {
return
}
return tx.Create_CouponCode(ctx, coupon_code_id, coupon_code_name, coupon_code_amount, coupon_code_description, coupon_code_type, coupon_code_duration)
return tx.Create_CouponCode(ctx, coupon_code_id, coupon_code_name, coupon_code_amount, coupon_code_description, coupon_code_type, optional)
}
@ -21861,7 +21916,7 @@ type Methods interface {
coupon_code_amount CouponCode_Amount_Field,
coupon_code_description CouponCode_Description_Field,
coupon_code_type CouponCode_Type_Field,
coupon_code_duration CouponCode_Duration_Field) (
optional CouponCode_Create_Fields) (
coupon_code *CouponCode, err error)
Create_CouponUsage(ctx context.Context,

View File

@ -75,6 +75,7 @@ CREATE TABLE coupons (
type integer NOT NULL,
status integer NOT NULL,
duration bigint NOT NULL,
billing_periods bigint,
coupon_code_name text,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id )
@ -85,7 +86,7 @@ CREATE TABLE coupon_codes (
amount bigint NOT NULL,
description text NOT NULL,
type integer NOT NULL,
duration bigint NOT NULL,
billing_periods bigint,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id ),
UNIQUE ( name )

View File

@ -75,6 +75,7 @@ CREATE TABLE coupons (
type integer NOT NULL,
status integer NOT NULL,
duration bigint NOT NULL,
billing_periods bigint,
coupon_code_name text,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id )
@ -85,7 +86,7 @@ CREATE TABLE coupon_codes (
amount bigint NOT NULL,
description text NOT NULL,
type integer NOT NULL,
duration bigint NOT NULL,
billing_periods bigint,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id ),
UNIQUE ( name )

View File

@ -1320,6 +1320,16 @@ func (db *satelliteDB) PostgresMigration() *migrate.Migration {
`DROP INDEX IF EXISTS nodes_dis_unk_exit_fin_last_success_index;`,
},
},
{
DB: &db.migrationDB,
Description: "add nullable coupons.duration_new, migrate coupon_codes.duration to be nullable",
Version: 153,
Action: migrate.SQL{
`ALTER TABLE coupons ADD COLUMN billing_periods bigint;`,
`ALTER TABLE coupon_codes ADD COLUMN billing_periods bigint;`,
`ALTER TABLE coupon_codes DROP COLUMN duration;`,
},
},
// NB: after updating testdata in `testdata`, run
// `go generate` to update `migratez.go`.
},

View File

@ -13,7 +13,7 @@ func (db *satelliteDB) testMigration() *migrate.Migration {
{
DB: &db.migrationDB,
Description: "Testing setup",
Version: 152,
Version: 153,
Action: migrate.SQL{`-- AUTOGENERATED BY storj.io/dbx
-- DO NOT EDIT
CREATE TABLE accounting_rollups (
@ -91,6 +91,7 @@ CREATE TABLE coupons (
type integer NOT NULL,
status integer NOT NULL,
duration bigint NOT NULL,
billing_periods bigint,
coupon_code_name text,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id )
@ -101,7 +102,7 @@ CREATE TABLE coupon_codes (
amount bigint NOT NULL,
description text NOT NULL,
type integer NOT NULL,
duration bigint NOT NULL,
billing_periods bigint,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id ),
UNIQUE ( name )

View File

@ -0,0 +1,580 @@
-- AUTOGENERATED BY storj.io/dbx
-- DO NOT EDIT
CREATE TABLE accounting_rollups (
node_id bytea NOT NULL,
start_time timestamp with time zone NOT NULL,
put_total bigint NOT NULL,
get_total bigint NOT NULL,
get_audit_total bigint NOT NULL,
get_repair_total bigint NOT NULL,
put_repair_total bigint NOT NULL,
at_rest_total double precision NOT NULL,
PRIMARY KEY ( node_id, start_time )
);
CREATE TABLE accounting_timestamps (
name text NOT NULL,
value timestamp with time zone NOT NULL,
PRIMARY KEY ( name )
);
CREATE TABLE audit_histories (
node_id bytea NOT NULL,
history bytea NOT NULL,
PRIMARY KEY ( node_id )
);
CREATE TABLE bucket_bandwidth_rollups (
bucket_name bytea NOT NULL,
project_id bytea NOT NULL,
interval_start timestamp with time zone NOT NULL,
interval_seconds integer NOT NULL,
action integer NOT NULL,
inline bigint NOT NULL,
allocated bigint NOT NULL,
settled bigint NOT NULL,
PRIMARY KEY ( bucket_name, project_id, interval_start, action )
);
CREATE TABLE bucket_bandwidth_rollup_archives (
bucket_name bytea NOT NULL,
project_id bytea NOT NULL,
interval_start timestamp with time zone NOT NULL,
interval_seconds integer NOT NULL,
action integer NOT NULL,
inline bigint NOT NULL,
allocated bigint NOT NULL,
settled bigint NOT NULL,
PRIMARY KEY ( bucket_name, project_id, interval_start, action )
);
CREATE TABLE bucket_storage_tallies (
bucket_name bytea NOT NULL,
project_id bytea NOT NULL,
interval_start timestamp with time zone NOT NULL,
inline bigint NOT NULL,
remote bigint NOT NULL,
remote_segments_count integer NOT NULL,
inline_segments_count integer NOT NULL,
object_count integer NOT NULL,
metadata_size bigint NOT NULL,
PRIMARY KEY ( bucket_name, project_id, interval_start )
);
CREATE TABLE coinpayments_transactions (
id text NOT NULL,
user_id bytea NOT NULL,
address text NOT NULL,
amount bytea NOT NULL,
received bytea NOT NULL,
status integer NOT NULL,
key text NOT NULL,
timeout integer NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id )
);
CREATE TABLE coupons (
id bytea NOT NULL,
user_id bytea NOT NULL,
amount bigint NOT NULL,
description text NOT NULL,
type integer NOT NULL,
status integer NOT NULL,
duration bigint NOT NULL,
billing_periods bigint,
coupon_code_name text,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id )
);
CREATE TABLE coupon_codes (
id bytea NOT NULL,
name text NOT NULL,
amount bigint NOT NULL,
description text NOT NULL,
type integer NOT NULL,
billing_periods bigint,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id ),
UNIQUE ( name )
);
CREATE TABLE coupon_usages (
coupon_id bytea NOT NULL,
amount bigint NOT NULL,
status integer NOT NULL,
period timestamp with time zone NOT NULL,
PRIMARY KEY ( coupon_id, period )
);
CREATE TABLE graceful_exit_progress (
node_id bytea NOT NULL,
bytes_transferred bigint NOT NULL,
pieces_transferred bigint NOT NULL DEFAULT 0,
pieces_failed bigint NOT NULL DEFAULT 0,
updated_at timestamp with time zone NOT NULL,
PRIMARY KEY ( node_id )
);
CREATE TABLE graceful_exit_transfer_queue (
node_id bytea NOT NULL,
path bytea NOT NULL,
piece_num integer NOT NULL,
root_piece_id bytea,
durability_ratio double precision NOT NULL,
queued_at timestamp with time zone NOT NULL,
requested_at timestamp with time zone,
last_failed_at timestamp with time zone,
last_failed_code integer,
failed_count integer,
finished_at timestamp with time zone,
order_limit_send_count integer NOT NULL DEFAULT 0,
PRIMARY KEY ( node_id, path, piece_num )
);
CREATE TABLE injuredsegments (
path bytea NOT NULL,
data bytea NOT NULL,
attempted timestamp with time zone,
updated_at timestamp with time zone NOT NULL DEFAULT current_timestamp,
segment_health double precision NOT NULL DEFAULT 1,
PRIMARY KEY ( path )
);
CREATE TABLE irreparabledbs (
segmentpath bytea NOT NULL,
segmentdetail bytea NOT NULL,
pieces_lost_count bigint NOT NULL,
seg_damaged_unix_sec bigint NOT NULL,
repair_attempt_count bigint NOT NULL,
PRIMARY KEY ( segmentpath )
);
CREATE TABLE nodes (
id bytea NOT NULL,
address text NOT NULL DEFAULT '',
last_net text NOT NULL,
last_ip_port text,
protocol integer NOT NULL DEFAULT 0,
type integer NOT NULL DEFAULT 0,
email text NOT NULL,
wallet text NOT NULL,
wallet_features text NOT NULL DEFAULT '',
free_disk bigint NOT NULL DEFAULT -1,
piece_count bigint NOT NULL DEFAULT 0,
major bigint NOT NULL DEFAULT 0,
minor bigint NOT NULL DEFAULT 0,
patch bigint NOT NULL DEFAULT 0,
hash text NOT NULL DEFAULT '',
timestamp timestamp with time zone NOT NULL DEFAULT '0001-01-01 00:00:00+00',
release boolean NOT NULL DEFAULT false,
latency_90 bigint NOT NULL DEFAULT 0,
audit_success_count bigint NOT NULL DEFAULT 0,
total_audit_count bigint NOT NULL DEFAULT 0,
vetted_at timestamp with time zone,
uptime_success_count bigint NOT NULL DEFAULT 0,
total_uptime_count bigint NOT NULL DEFAULT 0,
created_at timestamp with time zone NOT NULL DEFAULT current_timestamp,
updated_at timestamp with time zone NOT NULL DEFAULT current_timestamp,
last_contact_success timestamp with time zone NOT NULL DEFAULT 'epoch',
last_contact_failure timestamp with time zone NOT NULL DEFAULT 'epoch',
contained boolean NOT NULL DEFAULT false,
disqualified timestamp with time zone,
suspended timestamp with time zone,
unknown_audit_suspended timestamp with time zone,
offline_suspended timestamp with time zone,
under_review timestamp with time zone,
online_score double precision NOT NULL DEFAULT 1,
audit_reputation_alpha double precision NOT NULL DEFAULT 1,
audit_reputation_beta double precision NOT NULL DEFAULT 0,
unknown_audit_reputation_alpha double precision NOT NULL DEFAULT 1,
unknown_audit_reputation_beta double precision NOT NULL DEFAULT 0,
exit_initiated_at timestamp with time zone,
exit_loop_completed_at timestamp with time zone,
exit_finished_at timestamp with time zone,
exit_success boolean NOT NULL DEFAULT false,
PRIMARY KEY ( id )
);
CREATE TABLE node_api_versions (
id bytea NOT NULL,
api_version integer NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id )
);
CREATE TABLE offers (
id serial NOT NULL,
name text NOT NULL,
description text NOT NULL,
award_credit_in_cents integer NOT NULL DEFAULT 0,
invitee_credit_in_cents integer NOT NULL DEFAULT 0,
award_credit_duration_days integer,
invitee_credit_duration_days integer,
redeemable_cap integer,
expires_at timestamp with time zone NOT NULL,
created_at timestamp with time zone NOT NULL,
status integer NOT NULL,
type integer NOT NULL,
PRIMARY KEY ( id )
);
CREATE TABLE peer_identities (
node_id bytea NOT NULL,
leaf_serial_number bytea NOT NULL,
chain bytea NOT NULL,
updated_at timestamp with time zone NOT NULL,
PRIMARY KEY ( node_id )
);
CREATE TABLE pending_audits (
node_id bytea NOT NULL,
piece_id bytea NOT NULL,
stripe_index bigint NOT NULL,
share_size bigint NOT NULL,
expected_share_hash bytea NOT NULL,
reverify_count bigint NOT NULL,
path bytea NOT NULL,
PRIMARY KEY ( node_id )
);
CREATE TABLE projects (
id bytea NOT NULL,
name text NOT NULL,
description text NOT NULL,
usage_limit bigint,
bandwidth_limit bigint,
rate_limit integer,
max_buckets integer,
partner_id bytea,
owner_id bytea NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id )
);
CREATE TABLE project_bandwidth_rollups (
project_id bytea NOT NULL,
interval_month date NOT NULL,
egress_allocated bigint NOT NULL,
PRIMARY KEY ( project_id, interval_month )
);
CREATE TABLE registration_tokens (
secret bytea NOT NULL,
owner_id bytea,
project_limit integer NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( secret ),
UNIQUE ( owner_id )
);
CREATE TABLE reset_password_tokens (
secret bytea NOT NULL,
owner_id bytea NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( secret ),
UNIQUE ( owner_id )
);
CREATE TABLE revocations (
revoked bytea NOT NULL,
api_key_id bytea NOT NULL,
PRIMARY KEY ( revoked )
);
CREATE TABLE storagenode_bandwidth_rollups (
storagenode_id bytea NOT NULL,
interval_start timestamp with time zone NOT NULL,
interval_seconds integer NOT NULL,
action integer NOT NULL,
allocated bigint DEFAULT 0,
settled bigint NOT NULL,
PRIMARY KEY ( storagenode_id, interval_start, action )
);
CREATE TABLE storagenode_bandwidth_rollup_archives (
storagenode_id bytea NOT NULL,
interval_start timestamp with time zone NOT NULL,
interval_seconds integer NOT NULL,
action integer NOT NULL,
allocated bigint DEFAULT 0,
settled bigint NOT NULL,
PRIMARY KEY ( storagenode_id, interval_start, action )
);
CREATE TABLE storagenode_bandwidth_rollups_phase2 (
storagenode_id bytea NOT NULL,
interval_start timestamp with time zone NOT NULL,
interval_seconds integer NOT NULL,
action integer NOT NULL,
allocated bigint DEFAULT 0,
settled bigint NOT NULL,
PRIMARY KEY ( storagenode_id, interval_start, action )
);
CREATE TABLE storagenode_payments (
id bigserial NOT NULL,
created_at timestamp with time zone NOT NULL,
node_id bytea NOT NULL,
period text NOT NULL,
amount bigint NOT NULL,
receipt text,
notes text,
PRIMARY KEY ( id )
);
CREATE TABLE storagenode_paystubs (
period text NOT NULL,
node_id bytea NOT NULL,
created_at timestamp with time zone NOT NULL,
codes text NOT NULL,
usage_at_rest double precision NOT NULL,
usage_get bigint NOT NULL,
usage_put bigint NOT NULL,
usage_get_repair bigint NOT NULL,
usage_put_repair bigint NOT NULL,
usage_get_audit bigint NOT NULL,
comp_at_rest bigint NOT NULL,
comp_get bigint NOT NULL,
comp_put bigint NOT NULL,
comp_get_repair bigint NOT NULL,
comp_put_repair bigint NOT NULL,
comp_get_audit bigint NOT NULL,
surge_percent bigint NOT NULL,
held bigint NOT NULL,
owed bigint NOT NULL,
disposed bigint NOT NULL,
paid bigint NOT NULL,
distributed bigint NOT NULL,
PRIMARY KEY ( period, node_id )
);
CREATE TABLE storagenode_storage_tallies (
node_id bytea NOT NULL,
interval_end_time timestamp with time zone NOT NULL,
data_total double precision NOT NULL,
PRIMARY KEY ( interval_end_time, node_id )
);
CREATE TABLE stripe_customers (
user_id bytea NOT NULL,
customer_id text NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( user_id ),
UNIQUE ( customer_id )
);
CREATE TABLE stripecoinpayments_invoice_project_records (
id bytea NOT NULL,
project_id bytea NOT NULL,
storage double precision NOT NULL,
egress bigint NOT NULL,
objects bigint NOT NULL,
period_start timestamp with time zone NOT NULL,
period_end timestamp with time zone NOT NULL,
state integer NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id ),
UNIQUE ( project_id, period_start, period_end )
);
CREATE TABLE stripecoinpayments_tx_conversion_rates (
tx_id text NOT NULL,
rate bytea NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( tx_id )
);
CREATE TABLE users (
id bytea NOT NULL,
email text NOT NULL,
normalized_email text NOT NULL,
full_name text NOT NULL,
short_name text,
password_hash bytea NOT NULL,
status integer NOT NULL,
partner_id bytea,
created_at timestamp with time zone NOT NULL,
project_limit integer NOT NULL DEFAULT 0,
position text,
company_name text,
company_size integer,
working_on text,
is_professional boolean NOT NULL DEFAULT false,
employee_count text,
PRIMARY KEY ( id )
);
CREATE TABLE value_attributions (
project_id bytea NOT NULL,
bucket_name bytea NOT NULL,
partner_id bytea NOT NULL,
last_updated timestamp with time zone NOT NULL,
PRIMARY KEY ( project_id, bucket_name )
);
CREATE TABLE api_keys (
id bytea NOT NULL,
project_id bytea NOT NULL REFERENCES projects( id ) ON DELETE CASCADE,
head bytea NOT NULL,
name text NOT NULL,
secret bytea NOT NULL,
partner_id bytea,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id ),
UNIQUE ( head ),
UNIQUE ( name, project_id )
);
CREATE TABLE bucket_metainfos (
id bytea NOT NULL,
project_id bytea NOT NULL REFERENCES projects( id ),
name bytea NOT NULL,
partner_id bytea,
path_cipher integer NOT NULL,
created_at timestamp with time zone NOT NULL,
default_segment_size integer NOT NULL,
default_encryption_cipher_suite integer NOT NULL,
default_encryption_block_size integer NOT NULL,
default_redundancy_algorithm integer NOT NULL,
default_redundancy_share_size integer NOT NULL,
default_redundancy_required_shares integer NOT NULL,
default_redundancy_repair_shares integer NOT NULL,
default_redundancy_optimal_shares integer NOT NULL,
default_redundancy_total_shares integer NOT NULL,
PRIMARY KEY ( id ),
UNIQUE ( project_id, name )
);
CREATE TABLE project_members (
member_id bytea NOT NULL REFERENCES users( id ) ON DELETE CASCADE,
project_id bytea NOT NULL REFERENCES projects( id ) ON DELETE CASCADE,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( member_id, project_id )
);
CREATE TABLE stripecoinpayments_apply_balance_intents (
tx_id text NOT NULL REFERENCES coinpayments_transactions( id ) ON DELETE CASCADE,
state integer NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( tx_id )
);
CREATE TABLE user_credits (
id serial NOT NULL,
user_id bytea NOT NULL REFERENCES users( id ) ON DELETE CASCADE,
offer_id integer NOT NULL REFERENCES offers( id ),
referred_by bytea REFERENCES users( id ) ON DELETE SET NULL,
type text NOT NULL,
credits_earned_in_cents integer NOT NULL,
credits_used_in_cents integer NOT NULL,
expires_at timestamp with time zone NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id ),
UNIQUE ( id, offer_id )
);
CREATE INDEX accounting_rollups_start_time_index ON accounting_rollups ( start_time );
CREATE INDEX bucket_bandwidth_rollups_project_id_action_interval_index ON bucket_bandwidth_rollups ( project_id, action, interval_start );
CREATE INDEX bucket_bandwidth_rollups_action_interval_project_id_index ON bucket_bandwidth_rollups ( action, interval_start, project_id );
CREATE INDEX bucket_bandwidth_rollups_archive_project_id_action_interval_index ON bucket_bandwidth_rollup_archives ( project_id, action, interval_start );
CREATE INDEX bucket_bandwidth_rollups_archive_action_interval_project_id_index ON bucket_bandwidth_rollup_archives ( action, interval_start, project_id );
CREATE INDEX bucket_storage_tallies_project_id_interval_start_index ON bucket_storage_tallies ( project_id, interval_start );
CREATE INDEX graceful_exit_transfer_queue_nid_dr_qa_fa_lfa_index ON graceful_exit_transfer_queue ( node_id, durability_ratio, queued_at, finished_at, last_failed_at );
CREATE INDEX injuredsegments_attempted_index ON injuredsegments ( attempted );
CREATE INDEX injuredsegments_segment_health_index ON injuredsegments ( segment_health );
CREATE INDEX injuredsegments_updated_at_index ON injuredsegments ( updated_at );
CREATE INDEX node_last_ip ON nodes ( last_net );
CREATE INDEX nodes_dis_unk_off_exit_fin_last_success_index ON nodes ( disqualified, unknown_audit_suspended, offline_suspended, exit_finished_at, last_contact_success );
CREATE INDEX storagenode_bandwidth_rollups_interval_start_index ON storagenode_bandwidth_rollups ( interval_start );
CREATE INDEX storagenode_bandwidth_rollup_archives_interval_start_index ON storagenode_bandwidth_rollup_archives ( interval_start );
CREATE INDEX storagenode_payments_node_id_period_index ON storagenode_payments ( node_id, period );
CREATE INDEX storagenode_paystubs_node_id_index ON storagenode_paystubs ( node_id );
CREATE INDEX storagenode_storage_tallies_node_id_index ON storagenode_storage_tallies ( node_id );
CREATE UNIQUE INDEX credits_earned_user_id_offer_id ON user_credits ( id, offer_id );
INSERT INTO "offers" ("id", "name", "description", "award_credit_in_cents", "invitee_credit_in_cents", "expires_at", "created_at", "status", "type", "award_credit_duration_days", "invitee_credit_duration_days") VALUES (1, 'Default referral offer', 'Is active when no other active referral offer', 300, 600, '2119-03-14 08:28:24.636949+00', '2019-07-14 08:28:24.636949+00', 1, 2, 365, 14);
INSERT INTO "offers" ("id", "name", "description", "award_credit_in_cents", "invitee_credit_in_cents", "expires_at", "created_at", "status", "type", "award_credit_duration_days", "invitee_credit_duration_days") VALUES (2, 'Default free credit offer', 'Is active when no active free credit offer', 0, 300, '2119-03-14 08:28:24.636949+00', '2019-07-14 08:28:24.636949+00', 1, 1, NULL, 14);
-- MAIN DATA --
INSERT INTO "accounting_rollups"("node_id", "start_time", "put_total", "get_total", "get_audit_total", "get_repair_total", "put_repair_total", "at_rest_total") VALUES (E'\\367M\\177\\251]t/\\022\\256\\214\\265\\025\\224\\204:\\217\\212\\0102<\\321\\374\\020&\\271Qc\\325\\261\\354\\246\\233'::bytea, '2019-02-09 00:00:00+00', 3000, 6000, 9000, 12000, 0, 15000);
INSERT INTO "accounting_timestamps" VALUES ('LastAtRestTally', '0001-01-01 00:00:00+00');
INSERT INTO "accounting_timestamps" VALUES ('LastRollup', '0001-01-01 00:00:00+00');
INSERT INTO "accounting_timestamps" VALUES ('LastBandwidthTally', '0001-01-01 00:00:00+00');
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90", "audit_success_count", "total_audit_count", "uptime_success_count", "total_uptime_count", "created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "suspended", "audit_reputation_alpha", "audit_reputation_beta", "unknown_audit_reputation_alpha", "unknown_audit_reputation_beta", "exit_success", "online_score") VALUES (E'\\153\\313\\233\\074\\327\\177\\136\\070\\346\\001', '127.0.0.1:55516', '', 0, 4, '', '', -1, 0, 0, 1, 0, '', 'epoch', false, 0, 0, 5, 0, 5, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, NULL, 50, 0, 1, 0, false, 1);
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90", "audit_success_count", "total_audit_count", "uptime_success_count", "total_uptime_count", "created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "suspended", "audit_reputation_alpha", "audit_reputation_beta", "unknown_audit_reputation_alpha", "unknown_audit_reputation_beta", "exit_success", "online_score") VALUES (E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n', '127.0.0.1:55518', '', 0, 4, '', '', -1, 0, 0, 1, 0, '', 'epoch', false, 0, 0, 0, 3, 3, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, NULL, 50, 0, 1, 0, false, 1);
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90", "audit_success_count", "total_audit_count", "uptime_success_count", "total_uptime_count", "created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "suspended", "audit_reputation_alpha", "audit_reputation_beta", "unknown_audit_reputation_alpha", "unknown_audit_reputation_beta", "exit_success", "online_score") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014', '127.0.0.1:55517', '', 0, 4, '', '', -1, 0, 0, 1, 0, '', 'epoch', false, 0, 0, 0, 0, 0, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, NULL, 50, 0, 1, 0, false, 1);
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90", "audit_success_count", "total_audit_count", "uptime_success_count", "total_uptime_count", "created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "suspended", "audit_reputation_alpha", "audit_reputation_beta", "unknown_audit_reputation_alpha", "unknown_audit_reputation_beta", "exit_success", "online_score") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\015', '127.0.0.1:55519', '', 0, 4, '', '', -1, 0, 0, 1, 0, '', 'epoch', false, 0, 1, 2, 1, 2, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, NULL, 50, 0, 1, 0, false, 1);
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90", "audit_success_count", "total_audit_count", "uptime_success_count", "total_uptime_count", "created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "suspended", "audit_reputation_alpha", "audit_reputation_beta", "unknown_audit_reputation_alpha", "unknown_audit_reputation_beta", "exit_success", "vetted_at", "online_score") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', '127.0.0.1:55520', '', 0, 4, '', '', -1, 0, 0, 1, 0, '', 'epoch', false, 0, 300, 400, 300, 400, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, NULL, 300, 0, 1, 0, false, '2020-03-18 12:00:00.000000+00', 1);
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90", "audit_success_count", "total_audit_count", "uptime_success_count", "total_uptime_count", "created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "suspended", "audit_reputation_alpha", "audit_reputation_beta", "unknown_audit_reputation_alpha", "unknown_audit_reputation_beta", "exit_success", "online_score") VALUES (E'\\154\\313\\233\\074\\327\\177\\136\\070\\346\\001', '127.0.0.1:55516', '', 0, 4, '', '', -1, 0, 0, 1, 0, '', 'epoch', false, 0, 0, 5, 0, 5, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, NULL, 50, 0, 75, 25, false, 1);
INSERT INTO "nodes"("id", "address", "last_net", "last_ip_port", "protocol", "type", "email", "wallet", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90", "audit_success_count", "total_audit_count", "uptime_success_count", "total_uptime_count", "created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "suspended", "audit_reputation_alpha", "audit_reputation_beta", "unknown_audit_reputation_alpha", "unknown_audit_reputation_beta", "exit_success", "online_score") VALUES (E'\\154\\313\\233\\074\\327\\177\\136\\070\\346\\002', '127.0.0.1:55516', '127.0.0.0', '127.0.0.1:55516', 0, 4, '', '', -1, 0, 0, 1, 0, '', 'epoch', false, 0, 0, 5, 0, 5, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, NULL, 50, 0, 75, 25, false, 1);
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90", "audit_success_count", "total_audit_count", "created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "suspended", "audit_reputation_alpha", "audit_reputation_beta", "unknown_audit_reputation_alpha", "unknown_audit_reputation_beta", "exit_success", "online_score") VALUES (E'\\363\\341\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', '127.0.0.1:55516', '', 0, 4, '', '', -1, 0, 0, 1, 0, '', 'epoch', false, 0, 0, 5, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, NULL, 50, 0, 1, 0, false, 1);
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "wallet_features", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90", "audit_success_count", "total_audit_count", "created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "suspended", "audit_reputation_alpha", "audit_reputation_beta", "unknown_audit_reputation_alpha", "unknown_audit_reputation_beta", "exit_success", "online_score") VALUES (E'\\362\\341\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', '127.0.0.1:55516', '', 0, 4, '', '', '', -1, 0, 0, 1, 0, '', 'epoch', false, 0, 0, 5, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, NULL, 50, 0, 1, 0, false, 1);
INSERT INTO "users"("id", "full_name", "short_name", "email", "normalized_email", "password_hash", "status", "partner_id", "created_at", "is_professional", "project_limit") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 'Noahson', 'William', '1email1@mail.test', '1EMAIL1@MAIL.TEST', E'some_readable_hash'::bytea, 1, NULL, '2019-02-14 08:28:24.614594+00', false, 10);
INSERT INTO "projects"("id", "name", "description", "usage_limit", "bandwidth_limit", "max_buckets", "partner_id", "owner_id", "created_at") VALUES (E'\\022\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, 'ProjectName', 'projects description', 5e11, 5e11, NULL, NULL, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, '2019-02-14 08:28:24.254934+00');
INSERT INTO "projects"("id", "name", "description", "usage_limit", "bandwidth_limit", "max_buckets", "partner_id", "owner_id", "created_at") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea, 'projName1', 'Test project 1', 5e11, 5e11, NULL, NULL, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, '2019-02-14 08:28:24.636949+00');
INSERT INTO "project_members"("member_id", "project_id", "created_at") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea, '2019-02-14 08:28:24.677953+00');
INSERT INTO "project_members"("member_id", "project_id", "created_at") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, E'\\022\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, '2019-02-13 08:28:24.677953+00');
INSERT INTO "irreparabledbs" ("segmentpath", "segmentdetail", "pieces_lost_count", "seg_damaged_unix_sec", "repair_attempt_count") VALUES ('\x49616d5365676d656e746b6579696e666f30', '\x49616d5365676d656e7464657461696c696e666f30', 10, 1550159554, 10);
INSERT INTO "registration_tokens" ("secret", "owner_id", "project_limit", "created_at") VALUES (E'\\070\\127\\144\\013\\332\\344\\102\\376\\306\\056\\303\\130\\106\\132\\321\\276\\321\\274\\170\\264\\054\\333\\221\\116\\154\\221\\335\\070\\220\\146\\344\\216'::bytea, null, 1, '2019-02-14 08:28:24.677953+00');
INSERT INTO "storagenode_bandwidth_rollups" ("storagenode_id", "interval_start", "interval_seconds", "action", "allocated", "settled") VALUES (E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n', '2019-03-06 08:00:00.000000' AT TIME ZONE current_setting('TIMEZONE'), 3600, 1, 1024, 2024);
INSERT INTO "storagenode_storage_tallies" VALUES (E'\\3510\\323\\225"~\\036<\\342\\330m\\0253Jhr\\246\\233K\\246#\\2303\\351\\256\\275j\\212UM\\362\\207', '2019-02-14 08:16:57.812849+00', 1000);
INSERT INTO "bucket_bandwidth_rollups" ("bucket_name", "project_id", "interval_start", "interval_seconds", "action", "inline", "allocated", "settled") VALUES (E'testbucket'::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea,'2019-03-06 08:00:00.000000' AT TIME ZONE current_setting('TIMEZONE'), 3600, 1, 1024, 2024, 3024);
INSERT INTO "bucket_storage_tallies" ("bucket_name", "project_id", "interval_start", "inline", "remote", "remote_segments_count", "inline_segments_count", "object_count", "metadata_size") VALUES (E'testbucket'::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea,'2019-03-06 08:00:00.000000' AT TIME ZONE current_setting('TIMEZONE'), 4024, 5024, 0, 0, 0, 0);
INSERT INTO "bucket_bandwidth_rollups" ("bucket_name", "project_id", "interval_start", "interval_seconds", "action", "inline", "allocated", "settled") VALUES (E'testbucket'::bytea, E'\\170\\160\\157\\370\\274\\366\\113\\364\\272\\235\\301\\243\\321\\102\\321\\136'::bytea,'2019-03-06 08:00:00.000000' AT TIME ZONE current_setting('TIMEZONE'), 3600, 1, 1024, 2024, 3024);
INSERT INTO "bucket_storage_tallies" ("bucket_name", "project_id", "interval_start", "inline", "remote", "remote_segments_count", "inline_segments_count", "object_count", "metadata_size") VALUES (E'testbucket'::bytea, E'\\170\\160\\157\\370\\274\\366\\113\\364\\272\\235\\301\\243\\321\\102\\321\\136'::bytea,'2019-03-06 08:00:00.000000' AT TIME ZONE current_setting('TIMEZONE'), 4024, 5024, 0, 0, 0, 0);
INSERT INTO "reset_password_tokens" ("secret", "owner_id", "created_at") VALUES (E'\\070\\127\\144\\013\\332\\344\\102\\376\\306\\056\\303\\130\\106\\132\\321\\276\\321\\274\\170\\264\\054\\333\\221\\116\\154\\221\\335\\070\\220\\146\\344\\216'::bytea, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, '2019-05-08 08:28:24.677953+00');
INSERT INTO "api_keys" ("id", "project_id", "head", "name", "secret", "partner_id", "created_at") VALUES (E'\\334/\\302;\\225\\355O\\323\\276f\\247\\354/6\\241\\033'::bytea, E'\\022\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, E'\\111\\142\\147\\304\\132\\375\\070\\163\\270\\160\\251\\370\\126\\063\\351\\037\\257\\071\\143\\375\\351\\320\\253\\232\\220\\260\\075\\173\\306\\307\\115\\136'::bytea, 'key 2', E'\\254\\011\\315\\333\\273\\365\\001\\071\\024\\154\\253\\332\\301\\216\\361\\074\\221\\367\\251\\231\\274\\333\\300\\367\\001\\272\\327\\111\\315\\123\\042\\016'::bytea, NULL, '2019-02-14 08:28:24.267934+00');
INSERT INTO "value_attributions" ("project_id", "bucket_name", "partner_id", "last_updated") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, E''::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea,'2019-02-14 08:07:31.028103+00');
INSERT INTO "user_credits" ("id", "user_id", "offer_id", "referred_by", "credits_earned_in_cents", "credits_used_in_cents", "type", "expires_at", "created_at") VALUES (1, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 1, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 200, 0, 'invalid', '2019-10-01 08:28:24.267934+00', '2019-06-01 08:28:24.267934+00');
INSERT INTO "bucket_metainfos" ("id", "project_id", "name", "partner_id", "created_at", "path_cipher", "default_segment_size", "default_encryption_cipher_suite", "default_encryption_block_size", "default_redundancy_algorithm", "default_redundancy_share_size", "default_redundancy_required_shares", "default_redundancy_repair_shares", "default_redundancy_optimal_shares", "default_redundancy_total_shares") VALUES (E'\\334/\\302;\\225\\355O\\323\\276f\\247\\354/6\\241\\033'::bytea, E'\\022\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, E'testbucketuniquename'::bytea, NULL, '2019-06-14 08:28:24.677953+00', 1, 65536, 1, 8192, 1, 4096, 4, 6, 8, 10);
INSERT INTO "pending_audits" ("node_id", "piece_id", "stripe_index", "share_size", "expected_share_hash", "reverify_count", "path") VALUES (E'\\153\\313\\233\\074\\327\\177\\136\\070\\346\\001'::bytea, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 5, 1024, E'\\070\\127\\144\\013\\332\\344\\102\\376\\306\\056\\303\\130\\106\\132\\321\\276\\321\\274\\170\\264\\054\\333\\221\\116\\154\\221\\335\\070\\220\\146\\344\\216'::bytea, 1, 'not null');
INSERT INTO "peer_identities" VALUES (E'\\334/\\302;\\225\\355O\\323\\276f\\247\\354/6\\241\\033'::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, '2019-02-14 08:07:31.335028+00');
INSERT INTO "graceful_exit_progress" ("node_id", "bytes_transferred", "pieces_transferred", "pieces_failed", "updated_at") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', 1000000000000000, 0, 0, '2019-09-12 10:07:31.028103+00');
INSERT INTO "graceful_exit_transfer_queue" ("node_id", "path", "piece_num", "durability_ratio", "queued_at", "requested_at", "last_failed_at", "last_failed_code", "failed_count", "finished_at", "order_limit_send_count") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', E'f8419768-5baa-4901-b3ba-62808013ec45/s0/test3/\\240\\243\\223n \\334~b}\\2624)\\250m\\201\\202\\235\\276\\361\\3304\\323\\352\\311\\361\\353;\\326\\311', 8, 1.0, '2019-09-12 10:07:31.028103+00', '2019-09-12 10:07:32.028103+00', null, null, 0, '2019-09-12 10:07:33.028103+00', 0);
INSERT INTO "graceful_exit_transfer_queue" ("node_id", "path", "piece_num", "durability_ratio", "queued_at", "requested_at", "last_failed_at", "last_failed_code", "failed_count", "finished_at", "order_limit_send_count") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', E'f8419768-5baa-4901-b3ba-62808013ec45/s0/test3/\\240\\243\\223n \\334~b}\\2624)\\250m\\201\\202\\235\\276\\361\\3304\\323\\352\\311\\361\\353;\\326\\312', 8, 1.0, '2019-09-12 10:07:31.028103+00', '2019-09-12 10:07:32.028103+00', null, null, 0, '2019-09-12 10:07:33.028103+00', 0);
INSERT INTO "stripe_customers" ("user_id", "customer_id", "created_at") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 'stripe_id', '2019-06-01 08:28:24.267934+00');
INSERT INTO "graceful_exit_transfer_queue" ("node_id", "path", "piece_num", "durability_ratio", "queued_at", "requested_at", "last_failed_at", "last_failed_code", "failed_count", "finished_at", "order_limit_send_count") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', E'f8419768-5baa-4901-b3ba-62808013ec45/s0/test3/\\240\\243\\223n \\334~b}\\2624)\\250m\\201\\202\\235\\276\\361\\3304\\323\\352\\311\\361\\353;\\326\\311', 9, 1.0, '2019-09-12 10:07:31.028103+00', '2019-09-12 10:07:32.028103+00', null, null, 0, '2019-09-12 10:07:33.028103+00', 0);
INSERT INTO "graceful_exit_transfer_queue" ("node_id", "path", "piece_num", "durability_ratio", "queued_at", "requested_at", "last_failed_at", "last_failed_code", "failed_count", "finished_at", "order_limit_send_count") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', E'f8419768-5baa-4901-b3ba-62808013ec45/s0/test3/\\240\\243\\223n \\334~b}\\2624)\\250m\\201\\202\\235\\276\\361\\3304\\323\\352\\311\\361\\353;\\326\\312', 9, 1.0, '2019-09-12 10:07:31.028103+00', '2019-09-12 10:07:32.028103+00', null, null, 0, '2019-09-12 10:07:33.028103+00', 0);
INSERT INTO "stripecoinpayments_invoice_project_records"("id", "project_id", "storage", "egress", "objects", "period_start", "period_end", "state", "created_at") VALUES (E'\\022\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, E'\\021\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, 0, 0, 0, '2019-06-01 08:28:24.267934+00', '2019-06-01 08:28:24.267934+00', 0, '2019-06-01 08:28:24.267934+00');
INSERT INTO "graceful_exit_transfer_queue" ("node_id", "path", "piece_num", "root_piece_id", "durability_ratio", "queued_at", "requested_at", "last_failed_at", "last_failed_code", "failed_count", "finished_at", "order_limit_send_count") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', E'f8419768-5baa-4901-b3ba-62808013ec45/s0/test3/\\240\\243\\223n \\334~b}\\2624)\\250m\\201\\202\\235\\276\\361\\3304\\323\\352\\311\\361\\353;\\326\\311', 10, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 1.0, '2019-09-12 10:07:31.028103+00', '2019-09-12 10:07:32.028103+00', null, null, 0, '2019-09-12 10:07:33.028103+00', 0);
INSERT INTO "stripecoinpayments_tx_conversion_rates" ("tx_id", "rate", "created_at") VALUES ('tx_id', E'\\363\\311\\033w\\222\\303Ci,'::bytea, '2019-06-01 08:28:24.267934+00');
INSERT INTO "coinpayments_transactions" ("id", "user_id", "address", "amount", "received", "status", "key", "timeout", "created_at") VALUES ('tx_id', E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 'address', E'\\363\\311\\033w'::bytea, E'\\363\\311\\033w'::bytea, 1, 'key', 60, '2019-06-01 08:28:24.267934+00');
INSERT INTO "storagenode_bandwidth_rollups" ("storagenode_id", "interval_start", "interval_seconds", "action", "settled") VALUES (E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n', '2020-01-11 08:00:00.000000' AT TIME ZONE current_setting('TIMEZONE'), 3600, 1, 2024);
INSERT INTO "coupons" ("id", "user_id", "amount", "description", "type", "status", "duration", "billing_periods", "created_at") VALUES (E'\\362\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 50, 'description', 0, 0, 2, NULL, '2019-06-01 08:28:24.267934+00');
INSERT INTO "coupon_usages" ("coupon_id", "amount", "status", "period") VALUES (E'\\362\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea, 22, 0, '2019-06-01 09:28:24.267934+00');
INSERT INTO "stripecoinpayments_apply_balance_intents" ("tx_id", "state", "created_at") VALUES ('tx_id', 0, '2019-06-01 08:28:24.267934+00');
INSERT INTO "projects"("id", "name", "description", "usage_limit", "bandwidth_limit", "max_buckets", "rate_limit", "partner_id", "owner_id", "created_at") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\347'::bytea, 'projName1', 'Test project 1', 5e11, 5e11, NULL, 2000000, NULL, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, '2020-01-15 08:28:24.636949+00');
INSERT INTO "injuredsegments" ("path", "data", "segment_health", "updated_at") VALUES ('0', '\x0a0130120100', 1.0, '2020-09-01 00:00:00.000000+00');
INSERT INTO "injuredsegments" ("path", "data", "segment_health", "updated_at") VALUES ('here''s/a/great/path', '\x0a136865726527732f612f67726561742f70617468120a0102030405060708090a', 1.0, '2020-09-01 00:00:00.000000+00');
INSERT INTO "injuredsegments" ("path", "data", "segment_health", "updated_at") VALUES ('yet/another/cool/path', '\x0a157965742f616e6f746865722f636f6f6c2f70617468120a0102030405060708090a', 1.0, '2020-09-01 00:00:00.000000+00');
INSERT INTO "injuredsegments" ("path", "data", "segment_health", "updated_at") VALUES ('/this/is/a/new/path', '\x0a23736f2f6d616e792f69636f6e69632f70617468732f746f2f63686f6f73652f66726f6d120a0102030405060708090a', 1.0, '2020-09-01 00:00:00.000000+00');
INSERT INTO "injuredsegments" ("path", "data", "segment_health", "updated_at") VALUES ('/some/path/1/23/4', '\x0a23736f2f6d618e792f69636f6e69632f70617468732f746f2f63686f6f73652f66726f6d120a0102030405060708090a', 0.2, '2020-09-01 00:00:00.000000+00');
INSERT INTO "project_bandwidth_rollups"("project_id", "interval_month", egress_allocated) VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\347'::bytea, '2020-04-01', 10000);
INSERT INTO "projects"("id", "name", "description", "usage_limit", "bandwidth_limit", "max_buckets","rate_limit", "partner_id", "owner_id", "created_at") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\345'::bytea, 'egress101', 'High Bandwidth Project', 5e11, 5e11, NULL, 2000000, NULL, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, '2020-05-15 08:46:24.000000+00');
INSERT INTO "storagenode_paystubs"("period", "node_id", "created_at", "codes", "usage_at_rest", "usage_get", "usage_put", "usage_get_repair", "usage_put_repair", "usage_get_audit", "comp_at_rest", "comp_get", "comp_put", "comp_get_repair", "comp_put_repair", "comp_get_audit", "surge_percent", "held", "owed", "disposed", "paid", "distributed") VALUES ('2020-01', '\xf2a3b4c4dfdf7221310382fd5db5aa73e1d227d6df09734ec4e5305000000000', '2020-04-07T20:14:21.479141Z', '', 1327959864508416, 294054066688, 159031363328, 226751, 0, 836608, 2861984, 5881081, 0, 226751, 0, 8, 300, 0, 26909472, 0, 26909472, 0);
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90", "audit_success_count", "total_audit_count", "uptime_success_count", "total_uptime_count", "created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "suspended", "audit_reputation_alpha", "audit_reputation_beta", "unknown_audit_reputation_alpha", "unknown_audit_reputation_beta", "exit_success", "unknown_audit_suspended", "offline_suspended", "under_review") VALUES (E'\\153\\313\\233\\074\\327\\255\\136\\070\\346\\001', '127.0.0.1:55516', '', 0, 4, '', '', -1, 0, 0, 1, 0, '', 'epoch', false, 0, 0, 5, 0, 5, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, NULL, 50, 0, 1, 0, false, '2019-02-14 08:07:31.108963+00', '2019-02-14 08:07:31.108963+00', '2019-02-14 08:07:31.108963+00');
INSERT INTO "audit_histories" ("node_id", "history") VALUES (E'\\153\\313\\233\\074\\327\\177\\136\\070\\346\\001', '\x0a23736f2f6d616e792f69636f6e69632f70617468732f746f2f63686f6f73652f66726f6d120a0102030405060708090a');
INSERT INTO "node_api_versions"("id", "api_version", "created_at", "updated_at") VALUES (E'\\153\\313\\233\\074\\327\\177\\136\\070\\346\\001', 1, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00');
INSERT INTO "node_api_versions"("id", "api_version", "created_at", "updated_at") VALUES (E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n', 2, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00');
INSERT INTO "node_api_versions"("id", "api_version", "created_at", "updated_at") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014', 3, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00');
INSERT INTO "projects"("id", "name", "description", "usage_limit", "bandwidth_limit", "rate_limit", "partner_id", "owner_id", "created_at", "max_buckets") VALUES (E'300\\273|\\342N\\347\\347\\363\\342\\363\\371>+F\\256\\263'::bytea, 'egress102', 'High Bandwidth Project 2', 5e11, 5e11, 2000000, NULL, E'265\\343U\\303\\312\\312\\363\\311\\033w\\222\\303Ci",'::bytea, '2020-05-15 08:46:24.000000+00', 1000);
INSERT INTO "projects"("id", "name", "description", "usage_limit", "bandwidth_limit", "rate_limit", "partner_id", "owner_id", "created_at", "max_buckets") VALUES (E'300\\273|\\342N\\347\\347\\363\\342\\363\\371>+F\\255\\244'::bytea, 'egress103', 'High Bandwidth Project 3', 5e11, 5e11, 2000000, NULL, E'265\\343U\\303\\312\\312\\363\\311\\033w\\222\\303Ci",'::bytea, '2020-05-15 08:46:24.000000+00', 1000);
INSERT INTO "projects"("id", "name", "description", "usage_limit", "bandwidth_limit", "rate_limit", "partner_id", "owner_id", "created_at", "max_buckets") VALUES (E'300\\273|\\342N\\347\\347\\363\\342\\363\\371>+F\\253\\231'::bytea, 'Limit Test 1', 'This project is above the default', 50000000001, 50000000001, 2000000, NULL, E'265\\343U\\303\\312\\312\\363\\311\\033w\\222\\303Ci",'::bytea, '2020-10-14 10:10:10.000000+00', 101);
INSERT INTO "projects"("id", "name", "description", "usage_limit", "bandwidth_limit", "rate_limit", "partner_id", "owner_id", "created_at", "max_buckets") VALUES (E'300\\273|\\342N\\347\\347\\363\\342\\363\\371>+F\\252\\230'::bytea, 'Limit Test 2', 'This project is below the default', 5e11, 5e11, 2000000, NULL, E'265\\343U\\303\\312\\312\\363\\311\\033w\\222\\303Ci",'::bytea, '2020-10-14 10:10:11.000000+00', NULL);
INSERT INTO "storagenode_bandwidth_rollups_phase2" ("storagenode_id", "interval_start", "interval_seconds", "action", "allocated", "settled") VALUES (E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n', '2019-03-06 08:00:00.000000' AT TIME ZONE current_setting('TIMEZONE'), 3600, 1, 1024, 2024);
INSERT INTO "users"("id", "full_name", "short_name", "email", "normalized_email", "password_hash", "status", "partner_id", "created_at", "position", "company_name", "working_on", "company_size", "is_professional", "project_limit") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\205\\311",'::bytea, 'Thierry', 'Berg', '2email2@mail.test', '2EMAIL2@MAIL.TEST', E'some_readable_hash'::bytea, 2, NULL, '2020-05-16 10:28:24.614594+00', 'engineer', 'storj', 'data storage', 55, true, 10);
INSERT INTO "storagenode_bandwidth_rollup_archives" ("storagenode_id", "interval_start", "interval_seconds", "action", "allocated", "settled") VALUES (E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n', '2019-03-06 08:00:00.000000' AT TIME ZONE current_setting('TIMEZONE'), 3600, 1, 1024, 2024);
INSERT INTO "bucket_bandwidth_rollup_archives" ("bucket_name", "project_id", "interval_start", "interval_seconds", "action", "inline", "allocated", "settled") VALUES (E'testbucket'::bytea, E'\\170\\160\\157\\370\\274\\366\\113\\364\\272\\235\\301\\243\\321\\102\\321\\136'::bytea,'2019-03-06 08:00:00.000000' AT TIME ZONE current_setting('TIMEZONE'), 3600, 1, 1024, 2024, 3024);
INSERT INTO "storagenode_paystubs"("period", "node_id", "created_at", "codes", "usage_at_rest", "usage_get", "usage_put", "usage_get_repair", "usage_put_repair", "usage_get_audit", "comp_at_rest", "comp_get", "comp_put", "comp_get_repair", "comp_put_repair", "comp_get_audit", "surge_percent", "held", "owed", "disposed", "paid", "distributed") VALUES ('2020-12', '\x1111111111111111111111111111111111111111111111111111111111111111', '2020-04-07T20:14:21.479141Z', '', 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 117);
INSERT INTO "storagenode_payments"("id", "created_at", "period", "node_id", "amount") VALUES (1, '2020-04-07T20:14:21.479141Z', '2020-12', '\x1111111111111111111111111111111111111111111111111111111111111111', 117);
INSERT INTO "users"("id", "full_name", "short_name", "email", "normalized_email", "password_hash", "status", "partner_id", "created_at", "position", "company_name", "working_on", "company_size", "is_professional", "employee_count", "project_limit") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\205\\312",'::bytea, 'Campbell', 'Wright', '4email4@mail.test', '4EMAIL4@MAIL.TEST', E'some_readable_hash'::bytea, 2, NULL, '2020-07-17 10:28:24.614594+00', 'engineer', 'storj', 'data storage', 82, true, '1-50', 10);
INSERT INTO "coupon_codes" ("id", "name", "amount", "description", "type", "billing_periods", "created_at") VALUES (E'\\362\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea, 'STORJ50', 50, '$50 for your first 5 months', 0, NULL, '2019-06-01 08:28:24.267934+00');
-- NEW DATA --
INSERT INTO "coupon_codes" ("id", "name", "amount", "description", "type", "billing_periods", "created_at") VALUES (E'\\362\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\015'::bytea, 'STORJ75', 75, '$75 for your first 5 months', 0, 2, '2019-06-01 08:28:24.267934+00');
INSERT INTO "coupons" ("id", "user_id", "amount", "description", "type", "status", "duration", "billing_periods", "created_at") VALUES (E'\\362\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\015'::bytea, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 50, 'description', 0, 0, 2, 2, '2019-06-01 08:28:24.267934+00');

View File

@ -512,7 +512,7 @@ identity.key-path: /root/.local/share/storj/identity/satellite/identity.key
# payments.bonus-rate: 10
# duration a new coupon is valid in months/billing cycles
# payments.coupon-duration: 2
# payments.coupon-duration: "2"
# project limit to which increase to after applying the coupon, 0 B means not changing it from the default
# payments.coupon-project-limit: 0 B