satellite/payments: coupons, coupons usage, invoice generation with pricing model applied

Change-Id: Ic5d5a2fc116388647efe46896cfccc2038c77537
This commit is contained in:
crawter 2020-01-07 12:41:19 +02:00 committed by Yehor Butko
parent ee87846f0b
commit a57ce18f58
30 changed files with 2092 additions and 341 deletions

View File

@ -95,6 +95,16 @@ var (
Short: "Creates stripe invoice line items for not consumed project records",
RunE: createInvoiceItems,
}
prepareInvoiceCouponsCmd = &cobra.Command{
Use: "prepare-invoice-coupons",
Short: "Creates coupon usage for all satellite projects",
RunE: prepareInvoiceCoupons,
}
createInvoiceCouponsCmd = &cobra.Command{
Use: "create-invoice-coupons",
Short: "Creates stripe invoice line items for not consumed coupons",
RunE: createInvoiceCoupons,
}
createInvoicesCmd = &cobra.Command{
Use: "create-invoices",
Short: "Creates stripe invoices for all stripe customers known to satellite",
@ -447,6 +457,7 @@ func sortSegments(segments []*pb.IrreparableSegment) map[string][]*pb.Irreparabl
}
func prepareInvoiceRecords(cmd *cobra.Command, args []string) error {
ctx, _ := process.Ctx(cmd)
i, err := NewInspector(*Addr, *IdentityPath)
if err != nil {
return ErrInspectorDial.Wrap(err)
@ -459,7 +470,7 @@ func prepareInvoiceRecords(cmd *cobra.Command, args []string) error {
return ErrArgs.New("invalid period specified: %v", err)
}
_, err = i.paymentsClient.PrepareInvoiceRecords(context.Background(),
_, err = i.paymentsClient.PrepareInvoiceRecords(ctx,
&pb.PrepareInvoiceRecordsRequest{
Period: period,
},
@ -473,6 +484,7 @@ func prepareInvoiceRecords(cmd *cobra.Command, args []string) error {
}
func createInvoiceItems(cmd *cobra.Command, args []string) error {
ctx, _ := process.Ctx(cmd)
i, err := NewInspector(*Addr, *IdentityPath)
if err != nil {
return ErrInspectorDial.Wrap(err)
@ -480,7 +492,7 @@ func createInvoiceItems(cmd *cobra.Command, args []string) error {
defer func() { err = errs.Combine(err, i.Close()) }()
_, err = i.paymentsClient.ApplyInvoiceRecords(context.Background(), &pb.ApplyInvoiceRecordsRequest{})
_, err = i.paymentsClient.ApplyInvoiceRecords(ctx, &pb.ApplyInvoiceRecordsRequest{})
if err != nil {
return err
}
@ -489,6 +501,44 @@ func createInvoiceItems(cmd *cobra.Command, args []string) error {
return nil
}
func prepareInvoiceCoupons(cmd *cobra.Command, args []string) error {
ctx, _ := process.Ctx(cmd)
i, err := NewInspector(*Addr, *IdentityPath)
if err != nil {
return ErrInspectorDial.Wrap(err)
}
defer func() { err = errs.Combine(err, i.Close()) }()
_, err = i.paymentsClient.PrepareInvoiceCoupons(ctx,
&pb.PrepareInvoiceCouponsRequest{},
)
if err != nil {
return err
}
fmt.Println("successfully created invoice coupons")
return nil
}
func createInvoiceCoupons(cmd *cobra.Command, args []string) error {
ctx, _ := process.Ctx(cmd)
i, err := NewInspector(*Addr, *IdentityPath)
if err != nil {
return ErrInspectorDial.Wrap(err)
}
defer func() { err = errs.Combine(err, i.Close()) }()
_, err = i.paymentsClient.ApplyInvoiceCoupons(ctx, &pb.ApplyInvoiceCouponsRequest{})
if err != nil {
return err
}
fmt.Println("successfully created invoice coupon line items")
return nil
}
func createInvoices(cmd *cobra.Command, args []string) error {
i, err := NewInspector(*Addr, *IdentityPath)
if err != nil {
@ -542,6 +592,8 @@ func init() {
paymentsCmd.AddCommand(prepareInvoiceRecordsCmd)
paymentsCmd.AddCommand(createInvoiceItemsCmd)
paymentsCmd.AddCommand(prepareInvoiceCouponsCmd)
paymentsCmd.AddCommand(createInvoiceCouponsCmd)
paymentsCmd.AddCommand(createInvoicesCmd)
objectHealthCmd.Flags().StringVar(&CSVPath, "csv-path", "stdout", "csv path where command output is written")

2
go.sum
View File

@ -599,4 +599,4 @@ storj.io/drpc v0.0.7-0.20191115031725-2171c57838d2/go.mod h1:/ascUDbzNAv0A3Jj7wU
storj.io/uplink v0.0.0-20200108132132-c2c5e0d46c1a h1:w/588H+U5IfTXCHA2GTFVLzpUbworS0DtoB4sR9h/8M=
storj.io/uplink v0.0.0-20200108132132-c2c5e0d46c1a/go.mod h1:3498FK1ewiOxrVTbPwGJmE/kwIWA3q9ULtAU/WAreys=
storj.io/uplink v0.0.0-20200109100422-69086b6ee4a8 h1:WG1rX2uc815ZkUz1xrebuZA+JWFBF9Y2n64gvVKZFko=
storj.io/uplink v0.0.0-20200109100422-69086b6ee4a8/go.mod h1:3498FK1ewiOxrVTbPwGJmE/kwIWA3q9ULtAU/WAreys=
storj.io/uplink v0.0.0-20200109100422-69086b6ee4a8/go.mod h1:3498FK1ewiOxrVTbPwGJmE/kwIWA3q9ULtAU/WAreys=

View File

@ -173,6 +173,13 @@ func (usage *Service) GetProjectBandwidthLimit(ctx context.Context, projectID uu
return limit, nil
}
// UpdateProjectLimits sets new value for project's bandwidth and storage limit.
func (usage *Service) UpdateProjectLimits(ctx context.Context, projectID uuid.UUID, limit memory.Size) (err error) {
defer mon.Task()(&ctx, projectID)(&err)
return ErrProjectUsage.Wrap(usage.projectAccountingDB.UpdateProjectUsageLimit(ctx, projectID, limit))
}
// AddProjectStorageUsage lets the live accounting know that the given
// project has just added inlineSpaceUsed bytes of inline space usage
// and remoteSpaceUsed bytes of remote space usage.

View File

@ -30,4 +30,6 @@ const (
Transaction BillingHistoryItemType = 1
// Charge is a credit card charge billing item.
Charge BillingHistoryItemType = 2
// Coupon is an entity that adds some funds to Accounts balance for some fixed period.
Coupon BillingHistoryItemType = 3
)

View File

@ -468,7 +468,7 @@ func (server *Server) projectUsageLimitsHandler(w http.ResponseWriter, r *http.R
jsonError.Error = err.Error()
if err := json.NewEncoder(w).Encode(err); err != nil {
if err := json.NewEncoder(w).Encode(jsonError); err != nil {
server.log.Error("error encoding project usage limits error", zap.Error(err))
}
}

View File

@ -17,6 +17,7 @@ import (
"gopkg.in/spacemonkeygo/monkit.v2"
"storj.io/common/macaroon"
"storj.io/common/memory"
"storj.io/storj/pkg/auth"
"storj.io/storj/satellite/accounting"
"storj.io/storj/satellite/console/consoleauth"
@ -149,10 +150,20 @@ func (payments PaymentsService) AddCreditCard(ctx context.Context, creditCardTok
auth, err := GetAuth(ctx)
if err != nil {
return err
return Error.Wrap(err)
}
return payments.service.accounts.CreditCards().Add(ctx, auth.User.ID, creditCardToken)
err = payments.service.accounts.CreditCards().Add(ctx, auth.User.ID, creditCardToken)
if err != nil {
return Error.Wrap(err)
}
err = payments.AddPromotionCoupon(ctx, auth.User.ID)
if err != nil {
payments.service.log.Error(fmt.Sprintf("can not add promotional coupon to user %s", auth.User.Email), zap.Error(err))
}
return nil
}
// MakeCreditCardDefault makes a credit card default payment method.
@ -267,6 +278,26 @@ func (payments PaymentsService) BillingHistory(ctx context.Context) (billingHist
})
}
coupons, err := payments.service.accounts.Coupons(ctx, auth.User.ID)
if err != nil {
return nil, err
}
for _, coupon := range coupons {
billingHistory = append(billingHistory,
&BillingHistoryItem{
ID: coupon.ID.String(),
// TODO: update description in future, when there will be more coupon types.
Description: fmt.Sprintf("Promotional credits (limited time - %d billing periods)", coupon.Duration),
Amount: coupon.Amount,
Status: "Added to balance",
Link: "",
Start: coupon.Created,
Type: Coupon,
},
)
}
sort.SliceStable(billingHistory,
func(i, j int) bool {
return billingHistory[i].Start.After(billingHistory[j].Start)
@ -289,6 +320,40 @@ func (payments PaymentsService) TokenDeposit(ctx context.Context, amount int64)
return tx, errs.Wrap(err)
}
// AddPromotionCoupon creates new coupon for specified user.
func (payments PaymentsService) AddPromotionCoupon(ctx context.Context, userID uuid.UUID) (err error) {
defer mon.Task()(&ctx)(&err)
projects, err := payments.service.store.Projects().GetByUserID(ctx, userID)
if err != nil {
return Error.Wrap(err)
}
creditCards, err := payments.service.accounts.CreditCards().List(ctx, userID)
if err != nil {
return Error.Wrap(err)
}
if len(creditCards) == 0 {
return Error.Wrap(errs.New("user don't have credit cards"))
}
coupons, err := payments.service.accounts.Coupons(ctx, userID)
if err != nil {
return Error.Wrap(err)
}
if len(coupons) > 0 {
return Error.Wrap(errs.New("user already have a coupon"))
}
err = payments.service.accounts.AddCoupon(ctx, userID, projects[0].ID, 50, 2, "promotional coupon", 0)
if err != nil {
return Error.Wrap(err)
}
// TODO: delete coupon if limits can't be updated?
return Error.Wrap(payments.service.projectUsage.UpdateProjectLimits(ctx, projects[0].ID, memory.TB))
}
// CreateUser gets password hash value and creates new inactive User
func (s *Service) CreateUser(ctx context.Context, user CreateUser, tokenSecret RegistrationSecret, refUserID string) (u *User, err error) {
defer mon.Task()(&ctx)(&err)
@ -800,6 +865,11 @@ func (s *Service) CreateProject(ctx context.Context, projectInfo ProjectInfo) (p
return nil, err
}
err = s.Payments().AddPromotionCoupon(ctx, auth.User.ID)
if err != nil {
s.log.Error(fmt.Sprintf("can not add promotional coupon to user %s", auth.User.Email), zap.Error(err))
}
return p, nil
}

View File

@ -335,8 +335,6 @@ func New(log *zap.Logger, full *identity.FullIdentity, db DB, pointerDB metainfo
service,
pc.StripeCoinPayments.TransactionUpdateInterval,
pc.StripeCoinPayments.AccountBalanceUpdateInterval,
// TODO: uncomment when coupons will be finished.
//pc.StripeCoinPayments.CouponUsageCycleInterval,
)
}
}

View File

@ -5,7 +5,6 @@ package payments
import (
"context"
"github.com/skyrings/skyring-common/tools/uuid"
"github.com/zeebo/errs"
)
@ -22,6 +21,9 @@ type Accounts interface {
// Balance returns an integer amount in cents that represents the current balance of payment account.
Balance(ctx context.Context, userID uuid.UUID) (int64, error)
// AddCoupon creates new coupon for specified user and project.
AddCoupon(ctx context.Context, userID, projectID uuid.UUID, amount int64, duration int, description string, couponType CouponType) (err error)
// ProjectCharges returns how much money current user will be charged for each project.
ProjectCharges(ctx context.Context, userID uuid.UUID) ([]ProjectCharge, error)

View File

@ -13,16 +13,37 @@ import (
// Coupon is attached to the project.
// At the end of the period, the entire remaining coupon amount will be returned from the account balance.
type Coupon struct {
ID uuid.UUID `json:"id"`
UserID uuid.UUID `json:"userId"`
ProjectID uuid.UUID `json:"projectId"`
Amount int64 `json:"amount"` // Amount is stored in cents.
Duration time.Duration `json:"duration"` // Duration is stored in days.
Description string `json:"description"`
Status CouponStatus `json:"status"`
Created time.Time `json:"created"`
ID uuid.UUID `json:"id"`
UserID uuid.UUID `json:"userId"`
ProjectID uuid.UUID `json:"projectId"`
Amount int64 `json:"amount"` // Amount is stored in cents.
Duration int `json:"duration"` // Duration is stored in number ob billing periods.
Description string `json:"description"`
Type CouponType `json:"type"`
Status CouponStatus `json:"status"`
Created time.Time `json:"created"`
}
// IsExpired checks if coupon is not after its rollup period.
func (coupon *Coupon) IsExpired() bool {
expirationDate := time.Date(coupon.Created.Year(), coupon.Created.Month(), 0, 0, 0, 0, 0, coupon.Created.Location())
expirationDate.AddDate(0, coupon.Duration, 0)
now := time.Now().UTC()
now = time.Date(now.Year(), now.Month(), 0, 0, 0, 0, 0, coupon.Created.Location())
return expirationDate.Before(now)
}
// CouponType indicates the type of the coupon.
type CouponType int
const (
// CouponTypePromotional defines that this coupon is a promotional coupon.
// Promotional coupon is added only once after adding payment method or 50$ with storj tokens.
CouponTypePromotional CouponType = 0
)
// CouponStatus indicates the state of the coupon.
type CouponStatus int

View File

@ -75,6 +75,13 @@ func (accounts *accounts) Balance(ctx context.Context, userID uuid.UUID) (_ int6
return 0, nil
}
// AddCoupon creates new coupon for specified user and project.
func (accounts *accounts) AddCoupon(ctx context.Context, userID, projectID uuid.UUID, amount int64, duration int, description string, couponType payments.CouponType) (err error) {
defer mon.Task()(&ctx, userID, projectID, amount, duration, couponType)(&err)
return nil
}
// ProjectCharges returns how much money current user will be charged for each project.
func (accounts *accounts) ProjectCharges(ctx context.Context, userID uuid.UUID) (charges []payments.ProjectCharge, err error) {
defer mon.Task()(&ctx, userID)(&err)

View File

@ -71,17 +71,39 @@ func (accounts *accounts) Balance(ctx context.Context, userID uuid.UUID) (_ int6
}
// add all active coupons amount to balance.
coupons, err := accounts.service.db.Coupons().ListByUserID(ctx, userID)
coupons, err := accounts.service.db.Coupons().ListByUserIDAndStatus(ctx, userID, payments.CouponActive)
if err != nil {
return 0, Error.Wrap(err)
}
var couponAmount int64 = 0
var couponsAmount int64 = 0
for _, coupon := range coupons {
couponAmount += coupon.Amount
alreadyUsed, err := accounts.service.db.Coupons().TotalUsage(ctx, coupon.ID)
if err != nil {
return 0, Error.Wrap(err)
}
couponsAmount += coupon.Amount - alreadyUsed
}
return c.Balance + couponAmount, nil
return -c.Balance + couponsAmount, nil
}
// AddCoupon attaches a coupon for payment account.
func (accounts *accounts) AddCoupon(ctx context.Context, userID, projectID uuid.UUID, amount int64, duration int, description string, couponType payments.CouponType) (err error) {
defer mon.Task()(&ctx, userID, amount, duration, description, couponType)(&err)
coupon := payments.Coupon{
UserID: userID,
Status: payments.CouponActive,
ProjectID: projectID,
Amount: amount,
Description: description,
Duration: duration,
Type: couponType,
}
return Error.Wrap(accounts.service.db.Coupons().Insert(ctx, coupon))
}
// ProjectCharges returns how much money current user will be charged for each project.

View File

@ -31,13 +31,11 @@ type Chore struct {
// NewChore creates new clearing loop chore.
// TODO: uncomment new interval when coupons will be finished.
func NewChore(log *zap.Logger, service *Service, txInterval, accBalanceInterval /* couponUsageInterval */ time.Duration) *Chore {
func NewChore(log *zap.Logger, service *Service, txInterval, accBalanceInterval time.Duration) *Chore {
return &Chore{
log: log,
service: service,
TransactionCycle: *sync2.NewCycle(txInterval),
// TODO: uncomment when coupons will be finished.
//CouponUsageCycle: *sync2.NewCycle(couponUsageInterval),
log: log,
service: service,
TransactionCycle: *sync2.NewCycle(txInterval),
AccountBalanceCycle: *sync2.NewCycle(accBalanceInterval),
}
}
@ -70,18 +68,6 @@ func (chore *Chore) Run(ctx context.Context) (err error) {
return nil
},
)
// TODO: uncomment when coupons will be finished.
//chore.CouponUsageCycle.Start(ctx, &group,
// func(ctx context.Context) error {
// chore.log.Info("running coupon usage cycle")
//
// if err := chore.service.updateCouponUsageLoop(ctx); err != nil {
// chore.log.Error("coupon usage cycle failed", zap.Error(ErrChore.Wrap(err)))
// }
//
// return nil
// },
//)
return ErrChore.Wrap(group.Wait())
}

View File

@ -20,25 +20,54 @@ type CouponsDB interface {
Insert(ctx context.Context, coupon payments.Coupon) error
// Update updates coupon in database.
Update(ctx context.Context, couponID uuid.UUID, status payments.CouponStatus) error
// Get returns coupon by ID.
Get(ctx context.Context, couponID uuid.UUID) (payments.Coupon, error)
// List returns all coupons with specified status.
List(ctx context.Context, status payments.CouponStatus) (_ []payments.Coupon, err error)
// List returns all coupons of specified user.
List(ctx context.Context, status payments.CouponStatus) ([]payments.Coupon, error)
// ListByUserID returns all coupons of specified user.
ListByUserID(ctx context.Context, userID uuid.UUID) ([]payments.Coupon, error)
// ListByUserIDAndStatus returns all coupons of specified user and status.
ListByUserIDAndStatus(ctx context.Context, userID uuid.UUID, status payments.CouponStatus) ([]payments.Coupon, error)
// ListByProjectID returns all active coupons for specified project.
ListByProjectID(ctx context.Context, projectID uuid.UUID) ([]payments.Coupon, error)
// ListPending returns paginated list of coupons with specified status.
ListPaged(ctx context.Context, offset int64, limit int, before time.Time, status payments.CouponStatus) (payments.CouponsPage, error)
// AddUsage creates new coupon usage record in database.
AddUsage(ctx context.Context, usage CouponUsage) error
// TotalUsage gets sum of all usage records for specified coupon.
TotalUsage(ctx context.Context, couponID uuid.UUID) (_ int64, err error)
TotalUsage(ctx context.Context, couponID uuid.UUID) (int64, error)
// GetLatest return period_end of latest coupon charge.
GetLatest(ctx context.Context, couponID uuid.UUID) (time.Time, error)
// ListUnapplied returns coupon usage page with unapplied coupon usages.
ListUnapplied(ctx context.Context, offset int64, limit int, before time.Time) (CouponUsagePage, error)
// ApplyUsage applies coupon usage and updates its status.
ApplyUsage(ctx context.Context, couponID uuid.UUID, period time.Time) error
}
// CouponUsage stores amount of money that should be charged from coupon for some period.
// CouponUsage stores amount of money that should be charged from coupon for billing period.
type CouponUsage struct {
ID uuid.UUID
CouponID uuid.UUID
Amount int64
End time.Time
Status CouponUsageStatus
Period time.Time
}
// CouponUsageStatus indicates the state of the coupon usage.
type CouponUsageStatus int
const (
// CouponUsageStatusUnapplied is a default coupon usage state.
CouponUsageStatusUnapplied CouponUsageStatus = 0
// CouponUsageStatusApplied status indicates that coupon usage was used.
CouponUsageStatusApplied CouponUsageStatus = 1
)
// CouponUsagePage holds coupons usages and
// indicates if there is more data available
// and provides next offset.
type CouponUsagePage struct {
Usages []CouponUsage
Next bool
NextOffset int64
}

View File

@ -24,7 +24,7 @@ func TestCouponRepository(t *testing.T) {
couponsRepo := db.StripeCoinPayments().Coupons()
coupon := payments.Coupon{
Duration: time.Hour * 24,
Duration: 2,
Amount: 10,
Status: payments.CouponActive,
Description: "description",
@ -70,7 +70,7 @@ func TestCouponRepository(t *testing.T) {
err := couponsRepo.AddUsage(ctx, stripecoinpayments.CouponUsage{
CouponID: coupon.ID,
Amount: 1,
End: now,
Period: now,
})
assert.NoError(t, err)
date, err := couponsRepo.GetLatest(ctx, coupon.ID)

View File

@ -8,6 +8,7 @@ import (
"github.com/skyrings/skyring-common/tools/uuid"
"github.com/stripe/stripe-go"
"github.com/zeebo/errs"
"storj.io/storj/satellite/payments"
)
@ -127,6 +128,21 @@ func (creditCards *creditCards) MakeDefault(ctx context.Context, userID uuid.UUI
func (creditCards *creditCards) Remove(ctx context.Context, userID uuid.UUID, cardID string) (err error) {
defer mon.Task()(&ctx, cardID)(&err)
customerID, err := creditCards.service.db.Customers().GetCustomerID(ctx, userID)
if err != nil {
return payments.ErrAccountNotSetup.Wrap(err)
}
customer, err := creditCards.service.stripeClient.Customers.Get(customerID, nil)
if err != nil {
return Error.Wrap(err)
}
if customer.InvoiceSettings != nil &&
customer.InvoiceSettings.DefaultPaymentMethod != nil &&
customer.InvoiceSettings.DefaultPaymentMethod.ID == cardID {
return Error.Wrap(errs.New("can not detach default payment method."))
}
_, err = creditCards.service.stripeClient.PaymentMethods.Detach(cardID, nil)
return Error.Wrap(err)

View File

@ -55,7 +55,12 @@ func (endpoint *Endpoint) PrepareInvoiceCoupons(ctx context.Context, req *pb.Pre
func (endpoint *Endpoint) ApplyInvoiceCoupons(ctx context.Context, req *pb.ApplyInvoiceCouponsRequest) (_ *pb.ApplyInvoiceCouponsResponse, err error) {
defer mon.Task()(&ctx)(&err)
return &pb.ApplyInvoiceCouponsResponse{}, rpcstatus.Error(rpcstatus.Unimplemented, "not implemented")
err = endpoint.service.InvoiceApplyCoupons(ctx)
if err != nil {
return nil, rpcstatus.Error(rpcstatus.Internal, err.Error())
}
return &pb.ApplyInvoiceCouponsResponse{}, nil
}
// CreateInvoices creates invoice for all user accounts on the satellite.

View File

@ -17,10 +17,12 @@ var ErrProjectRecordExists = Error.New("invoice project record already exists")
//
// architecture: Database
type ProjectRecordsDB interface {
// Create creates new invoice project record in the DB.
Create(ctx context.Context, records []CreateProjectRecord, start, end time.Time) error
// Create creates new invoice project record with coupon usages in the DB.
Create(ctx context.Context, records []CreateProjectRecord, couponUsages []CouponUsage, start, end time.Time) error
// Check checks if invoice project record for specified project and billing period exists.
Check(ctx context.Context, projectID uuid.UUID, start, end time.Time) error
// Get returns record for specified project and billing period.
Get(ctx context.Context, projectID uuid.UUID, start, end time.Time) (*ProjectRecord, error)
// Consume consumes invoice project record.
Consume(ctx context.Context, id uuid.UUID) error
// ListUnapplied returns project records page with unapplied project records.
@ -33,7 +35,7 @@ type CreateProjectRecord struct {
ProjectID uuid.UUID
Storage float64
Egress int64
Objects int64
Objects float64
}
// ProjectRecord holds project usage particular for billing period.
@ -42,7 +44,7 @@ type ProjectRecord struct {
ProjectID uuid.UUID
Storage float64
Egress int64
Objects int64
Objects float64
PeriodStart time.Time
PeriodEnd time.Time
}

View File

@ -42,6 +42,7 @@ func TestProjectRecords(t *testing.T) {
Objects: 3,
},
},
[]stripecoinpayments.CouponUsage{},
start, end,
)
require.NoError(t, err)
@ -92,12 +93,12 @@ func TestProjectRecordsList(t *testing.T) {
ProjectID: *projID,
Storage: float64(i) + 1,
Egress: int64(i) + 2,
Objects: int64(i) + 3,
Objects: float64(i) + 3,
},
)
}
err := projectRecordsDB.Create(ctx, createProjectRecords, start, end)
err := projectRecordsDB.Create(ctx, createProjectRecords, []stripecoinpayments.CouponUsage{}, start, end)
require.NoError(t, err)
page, err := projectRecordsDB.ListUnapplied(ctx, 0, recordsLen, time.Now())

View File

@ -17,6 +17,7 @@ import (
"go.uber.org/zap"
"gopkg.in/spacemonkeygo/monkit.v2"
"storj.io/common/memory"
"storj.io/storj/satellite/accounting"
"storj.io/storj/satellite/console"
"storj.io/storj/satellite/payments"
@ -41,8 +42,6 @@ type Config struct {
TransactionUpdateInterval time.Duration `help:"amount of time we wait before running next transaction update loop" devDefault:"1m" releaseDefault:"30m"`
AccountBalanceUpdateInterval time.Duration `help:"amount of time we wait before running next account balance update loop" devDefault:"3m" releaseDefault:"1h30m"`
ConversionRatesCycleInterval time.Duration `help:"amount of time we wait before running next conversion rates update loop" devDefault:"1m" releaseDefault:"10m"`
// TODO: uncomment when coupons will be finished.
// CouponUsageCycleInterval time.Duration `help:"amount of time we wait before running next coupon usage update loop" devDefault:"1d" releaseDefault:"1d"`
}
// Service is an implementation for payment service via Stripe and Coinpayments.
@ -105,7 +104,7 @@ func (service *Service) Accounts() payments.Accounts {
}
// AddCoupon attaches a coupon for payment account.
func (service *Service) AddCoupon(ctx context.Context, userID, projectID uuid.UUID, amount int64, duration time.Duration, description string) (err error) {
func (service *Service) AddCoupon(ctx context.Context, userID, projectID uuid.UUID, amount int64, duration int, description string, couponType payments.CouponType) (err error) {
defer mon.Task()(&ctx, userID, amount, duration)(&err)
coupon := payments.Coupon{
@ -113,6 +112,7 @@ func (service *Service) AddCoupon(ctx context.Context, userID, projectID uuid.UU
Status: payments.CouponActive,
ProjectID: projectID,
Amount: amount,
Type: couponType,
Description: description,
Duration: duration,
}
@ -120,114 +120,6 @@ func (service *Service) AddCoupon(ctx context.Context, userID, projectID uuid.UU
return Error.Wrap(service.db.Coupons().Insert(ctx, coupon))
}
// TODO: uncomment when coupons will be ready.
// updateCouponUsageLoop updates all daily coupon usage in a loop.
//func (service *Service) updateCouponUsageLoop(ctx context.Context) (err error) {
// defer mon.Task()(&ctx)(&err)
//
// const limit = 25
// before := time.Now()
//
// // takes first coupon page
// couponPage, err := service.db.Coupons().ListPaged(ctx, 0, limit, before, payments.CouponActive)
// if err != nil {
// return Error.Wrap(err)
// }
//
// // iterates through all coupons, takes daily project usage and create new coupon usage
// err = service.createDailyCouponUsage(ctx, couponPage.Coupons)
// if err != nil {
// return Error.Wrap(err)
// }
//
// // iterates by rest pages
// for couponPage.Next {
// if err = ctx.Err(); err != nil {
// return Error.Wrap(err)
// }
//
// couponPage, err = service.db.Coupons().ListPaged(ctx, couponPage.NextOffset, limit, before, payments.CouponActive)
// if err != nil {
// return Error.Wrap(err)
// }
//
// // iterates through all coupons, takes daily project usage and create new coupon usage
// err = service.createDailyCouponUsage(ctx, couponPage.Coupons)
// if err != nil {
// return Error.Wrap(err)
// }
// }
//
// return nil
//}
// createDailyCouponUsage iterates through all coupons, takes daily project usage and create new coupon usage.
// TODO: it will works only for 1 coupon per project. Need rework in future.
//func (service *Service) createDailyCouponUsage(ctx context.Context, coupons []payments.Coupon) (err error) {
// defer mon.Task()(&ctx)(&err)
//
// for _, coupon := range coupons {
// // check if coupon expired
// if coupon.Created.Add(coupon.Duration).After(time.Now().UTC()) {
// if err = service.db.Coupons().Update(ctx, coupon.ID, payments.CouponExpired); err != nil {
// return err
// }
//
// continue
// }
//
// since, err := service.db.Coupons().GetLatest(ctx, coupon.ID)
// if err != nil {
// if !ErrNoCouponUsages.Has(err) {
// return err
// }
//
// since = coupon.Created
// }
//
// start, end := date.DayBoundary(since)
// usage, err := service.usageDB.GetProjectTotal(ctx, coupon.ProjectID, start, end)
// if err != nil {
// return err
// }
//
// // TODO: reuse this code fragment.
// egressPrice := usage.Egress * service.EgressPrice / int64(memory.TB)
// objectCountPrice := int64(usage.ObjectCount * float64(service.PerObjectPrice))
// storageGbHrsPrice := int64(usage.Storage*float64(service.TBhPrice)) / int64(memory.TB)
//
// currentUsageAmount := egressPrice + objectCountPrice + storageGbHrsPrice
//
// // TODO: we should add caching for TotalUsage call
// alreadyChargedAmount, err := service.db.Coupons().TotalUsage(ctx, coupon.ID)
// if err != nil {
// return err
// }
// remaining := coupon.Amount - alreadyChargedAmount
//
// // check if coupon is used
// if currentUsageAmount >= remaining {
// if err = service.db.Coupons().Update(ctx, coupon.ID, payments.CouponUsed); err != nil {
// return err
// }
//
// currentUsageAmount = remaining
// }
//
// couponUsage := CouponUsage{
// End: time.Now().UTC(),
// Amount: currentUsageAmount,
// CouponID: coupon.ID,
// }
//
// if err = service.db.Coupons().AddUsage(ctx, couponUsage); err != nil {
// return err
// }
// }
//
// return nil
//}
// updateTransactionsLoop updates all pending transactions in a loop.
func (service *Service) updateTransactionsLoop(ctx context.Context) (err error) {
defer mon.Task()(&ctx)(&err)
@ -468,6 +360,7 @@ func (service *Service) createProjectRecords(ctx context.Context, projects []con
defer mon.Task()(&ctx)(&err)
var records []CreateProjectRecord
var usages []CouponUsage
for _, project := range projects {
if err = ctx.Err(); err != nil {
return err
@ -481,18 +374,62 @@ func (service *Service) createProjectRecords(ctx context.Context, projects []con
return err
}
usage, err := service.usageDB.GetProjectTotal(ctx, project.ID, start, end)
if err != nil {
return err
}
// TODO: account for usage data.
records = append(records,
CreateProjectRecord{
ProjectID: project.ID,
Storage: 0,
Egress: 0,
Objects: 0,
Storage: usage.Storage,
Egress: usage.Egress,
Objects: usage.ObjectCount,
},
)
coupons, err := service.db.Coupons().ListByProjectID(ctx, project.ID)
if err != nil {
return err
}
egressPrice := usage.Egress * service.EgressPrice / int64(memory.TB)
objectCountPrice := int64(usage.ObjectCount * float64(service.PerObjectPrice))
storageGbHrsPrice := int64(usage.Storage*float64(service.TBhPrice)) / int64(memory.TB)
currentUsagePrice := egressPrice + objectCountPrice + storageGbHrsPrice
// TODO: only for 1 coupon per project
for _, coupon := range coupons {
if coupon.IsExpired() {
if err = service.db.Coupons().Update(ctx, coupon.ID, payments.CouponExpired); err != nil {
return err
}
continue
}
alreadyChargedAmount, err := service.db.Coupons().TotalUsage(ctx, coupon.ID)
if err != nil {
return err
}
remaining := coupon.Amount - alreadyChargedAmount
if currentUsagePrice >= remaining {
currentUsagePrice = remaining
}
usages = append(usages, CouponUsage{
Period: start,
Amount: currentUsagePrice,
Status: CouponUsageStatusUnapplied,
CouponID: coupon.ID,
})
}
}
return service.db.ProjectRecords().Create(ctx, records, start, end)
return service.db.ProjectRecords().Create(ctx, records, usages, start, end)
}
// InvoiceApplyProjectRecords iterates through unapplied invoice project records and creates invoice line items
@ -501,7 +438,7 @@ func (service *Service) InvoiceApplyProjectRecords(ctx context.Context) (err err
defer mon.Task()(&ctx)(&err)
const limit = 25
before := time.Now()
before := time.Now().UTC()
recordsPage, err := service.db.ProjectRecords().ListUnapplied(ctx, 0, limit, before)
if err != nil {
@ -569,9 +506,15 @@ func (service *Service) createInvoiceItems(ctx context.Context, cusID, projName
return err
}
// TODO: add and apply pricing.
// TODO: reuse this code fragment.
egressPrice := record.Egress * service.EgressPrice / int64(memory.TB)
objectCountPrice := int64(record.Objects * float64(service.PerObjectPrice))
storageGbHrsPrice := int64(record.Storage*float64(service.TBhPrice)) / int64(memory.TB)
currentUsageAmount := egressPrice + objectCountPrice + storageGbHrsPrice
projectItem := &stripe.InvoiceItemParams{
Amount: stripe.Int64(0),
Amount: stripe.Int64(currentUsageAmount),
Currency: stripe.String(string(stripe.CurrencyUSD)),
Customer: stripe.String(cusID),
Description: stripe.String(fmt.Sprintf("project %s", projName)),
@ -587,39 +530,34 @@ func (service *Service) createInvoiceItems(ctx context.Context, cusID, projName
return err
}
// InvoiceApplyCoupons iterates through all active coupons.
// TODO: current implementation could possibly charge some coupons twice
// TODO: in case when this method failed and we call it again.
// TODO: we should mark coupons as 'already charged for current billing period' somehow to prevent multiple charge
// TODO: during invoices generation.
// InvoiceApplyCoupons iterates through unapplied project coupons and creates invoice line items
// for stripe customer.
func (service *Service) InvoiceApplyCoupons(ctx context.Context) (err error) {
defer mon.Task()(&ctx)(&err)
const limit = 25
before := time.Now()
before := time.Now().UTC()
// takes first coupon page
couponPage, err := service.db.Coupons().ListPaged(ctx, 0, limit, before, payments.CouponActive)
usagePage, err := service.db.Coupons().ListUnapplied(ctx, 0, limit, before)
if err != nil {
return Error.Wrap(err)
}
if err = service.applyCouponsDiscount(ctx, couponPage.Coupons); err != nil {
if err = service.applyCoupons(ctx, usagePage.Usages); err != nil {
return Error.Wrap(err)
}
// iterates by rest pages
for couponPage.Next {
for usagePage.Next {
if err = ctx.Err(); err != nil {
return Error.Wrap(err)
}
couponPage, err = service.db.Coupons().ListPaged(ctx, couponPage.NextOffset, limit, before, payments.CouponActive)
usagePage, err = service.db.Coupons().ListUnapplied(ctx, usagePage.NextOffset, limit, before)
if err != nil {
return Error.Wrap(err)
}
if err = service.applyCouponsDiscount(ctx, couponPage.Coupons); err != nil {
if err = service.applyCoupons(ctx, usagePage.Usages); err != nil {
return Error.Wrap(err)
}
}
@ -627,37 +565,30 @@ func (service *Service) InvoiceApplyCoupons(ctx context.Context) (err error) {
return nil
}
// applyCouponsDiscount iterates through all coupons, gets total usage for this coupon and creates Invoice coupon item.
func (service *Service) applyCouponsDiscount(ctx context.Context, coupons []payments.Coupon) (err error) {
// applyCoupons applies concrete coupon usage as invoice line item.
func (service *Service) applyCoupons(ctx context.Context, usages []CouponUsage) (err error) {
defer mon.Task()(&ctx)(&err)
for _, coupon := range coupons {
for _, usage := range usages {
if err = ctx.Err(); err != nil {
return err
}
coupon, err := service.db.Coupons().Get(ctx, usage.CouponID)
if err != nil {
return err
}
customerID, err := service.db.Customers().GetCustomerID(ctx, coupon.UserID)
if err != nil {
if err != ErrNoCustomer {
return err
if err == ErrNoCustomer {
continue
}
service.log.Error(
fmt.Sprintf("Could not apply coupon for user %s", coupon.UserID.String()),
zap.Error(Error.Wrap(err)),
)
continue
}
amountToCharge, err := service.db.Coupons().TotalUsage(ctx, coupon.ID)
if err != nil {
return err
}
intervalEnd, err := service.db.Coupons().GetLatest(ctx, coupon.ID)
if err != nil {
return err
}
err = service.createInvoiceCouponItem(ctx, customerID, coupon, amountToCharge, intervalEnd)
if err != nil {
if err = service.createInvoiceCouponItems(ctx, coupon, usage, customerID); err != nil {
return err
}
}
@ -665,18 +596,34 @@ func (service *Service) applyCouponsDiscount(ctx context.Context, coupons []paym
return nil
}
// createInvoiceCouponItem creates new Invoice item for specified coupon.
func (service *Service) createInvoiceCouponItem(ctx context.Context, customerID string, coupon payments.Coupon, amountToCharge int64, intervalEnd time.Time) (err error) {
// createInvoiceItems consumes invoice project record and creates invoice line items for stripe customer.
func (service *Service) createInvoiceCouponItems(ctx context.Context, coupon payments.Coupon, usage CouponUsage, customerID string) (err error) {
defer mon.Task()(&ctx, customerID, coupon)(&err)
err = service.db.Coupons().ApplyUsage(ctx, usage.CouponID, usage.Period)
if err != nil {
return err
}
totalUsage, err := service.db.Coupons().TotalUsage(ctx, coupon.ID)
if err != nil {
return err
}
if totalUsage == coupon.Amount {
err = service.db.Coupons().Update(ctx, coupon.ID, payments.CouponUsed)
if err != nil {
return err
}
}
projectItem := &stripe.InvoiceItemParams{
Amount: stripe.Int64(-amountToCharge),
Amount: stripe.Int64(-usage.Amount),
Currency: stripe.String(string(stripe.CurrencyUSD)),
Customer: stripe.String(customerID),
Description: stripe.String(fmt.Sprintf("Discount from coupon: %s", coupon.Description)),
Period: &stripe.InvoiceItemPeriodParams{
End: stripe.Int64(intervalEnd.Unix()),
Start: stripe.Int64(coupon.Created.Unix()),
End: stripe.Int64(usage.Period.AddDate(0, 1, 0).Unix()),
Start: stripe.Int64(usage.Period.Unix()),
},
}
@ -685,7 +632,6 @@ func (service *Service) createInvoiceCouponItem(ctx context.Context, customerID
_, err = service.stripeClient.InvoiceItems.New(projectItem)
// TODO: do smth with coupon
return err
}

View File

@ -44,6 +44,7 @@ func (coupons *coupons) Insert(ctx context.Context, coupon payments.Coupon) (err
dbx.Coupon_UserId(coupon.UserID[:]),
dbx.Coupon_Amount(coupon.Amount),
dbx.Coupon_Description(coupon.Description),
dbx.Coupon_Type(int(coupon.Type)),
dbx.Coupon_Status(int(coupon.Status)),
dbx.Coupon_Duration(int64(coupon.Duration)),
)
@ -66,6 +67,18 @@ func (coupons *coupons) Update(ctx context.Context, couponID uuid.UUID, status p
return err
}
// Get returns coupon by ID.
func (coupons *coupons) Get(ctx context.Context, couponID uuid.UUID) (_ payments.Coupon, err error) {
defer mon.Task()(&ctx, couponID)(&err)
dbxCoupon, err := coupons.db.Get_Coupon_By_Id(ctx, dbx.Coupon_Id(couponID[:]))
if err != nil {
return payments.Coupon{}, err
}
return fromDBXCoupon(dbxCoupon)
}
// List returns all coupons of specified user.
func (coupons *coupons) ListByUserID(ctx context.Context, userID uuid.UUID) (_ []payments.Coupon, err error) {
defer mon.Task()(&ctx, userID)(&err)
@ -81,6 +94,22 @@ func (coupons *coupons) ListByUserID(ctx context.Context, userID uuid.UUID) (_ [
return couponsFromDbxSlice(dbxCoupons)
}
// ListByUserIDAndStatus returns all coupons of specified user and status.
func (coupons *coupons) ListByUserIDAndStatus(ctx context.Context, userID uuid.UUID, status payments.CouponStatus) (_ []payments.Coupon, err error) {
defer mon.Task()(&ctx, userID)(&err)
dbxCoupons, err := coupons.db.All_Coupon_By_UserId_And_Status_OrderBy_Desc_CreatedAt(
ctx,
dbx.Coupon_UserId(userID[:]),
dbx.Coupon_Status(int(status)),
)
if err != nil {
return nil, err
}
return couponsFromDbxSlice(dbxCoupons)
}
// List returns all coupons with specified status.
func (coupons *coupons) List(ctx context.Context, status payments.CouponStatus) (_ []payments.Coupon, err error) {
defer mon.Task()(&ctx, status)(&err)
@ -128,6 +157,21 @@ func (coupons *coupons) ListPaged(ctx context.Context, offset int64, limit int,
return page, nil
}
// ListByProjectID returns all active coupons for specified project.
func (coupons *coupons) ListByProjectID(ctx context.Context, projectID uuid.UUID) (_ []payments.Coupon, err error) {
defer mon.Task()(&ctx, projectID)(&err)
dbxCoupons, err := coupons.db.All_Coupon_By_ProjectId_And_Status_Equal_Number_OrderBy_Desc_CreatedAt(
ctx,
dbx.Coupon_ProjectId(projectID[:]),
)
if err != nil {
return nil, err
}
return couponsFromDbxSlice(dbxCoupons)
}
// fromDBXCoupon converts *dbx.Coupon to *payments.Coupon.
func fromDBXCoupon(dbxCoupon *dbx.Coupon) (coupon payments.Coupon, err error) {
coupon.UserID, err = dbutil.BytesToUUID(dbxCoupon.UserId)
@ -145,7 +189,7 @@ func fromDBXCoupon(dbxCoupon *dbx.Coupon) (coupon payments.Coupon, err error) {
return payments.Coupon{}, err
}
coupon.Duration = time.Duration(dbxCoupon.Duration)
coupon.Duration = int(dbxCoupon.Duration)
coupon.Description = dbxCoupon.Description
coupon.Amount = dbxCoupon.Amount
coupon.Created = dbxCoupon.CreatedAt
@ -158,17 +202,12 @@ func fromDBXCoupon(dbxCoupon *dbx.Coupon) (coupon payments.Coupon, err error) {
func (coupons *coupons) AddUsage(ctx context.Context, usage stripecoinpayments.CouponUsage) (err error) {
defer mon.Task()(&ctx, usage)(&err)
id, err := uuid.New()
if err != nil {
return err
}
_, err = coupons.db.Create_CouponUsage(
ctx,
dbx.CouponUsage_Id(id[:]),
dbx.CouponUsage_CouponId(usage.CouponID[:]),
dbx.CouponUsage_Amount(usage.Amount),
dbx.CouponUsage_IntervalEnd(usage.End),
dbx.CouponUsage_Status(int(usage.Status)),
dbx.CouponUsage_Period(usage.Period),
)
return err
@ -192,15 +231,33 @@ func (coupons *coupons) TotalUsage(ctx context.Context, couponID uuid.UUID) (_ i
return amount, err
}
// TotalUsage gets sum of all usage records for specified coupon.
func (coupons *coupons) TotalUsageForPeriod(ctx context.Context, couponID uuid.UUID, period time.Time) (_ int64, err error) {
defer mon.Task()(&ctx, couponID)(&err)
query := coupons.db.Rebind(
`SELECT COALESCE(SUM(amount), 0)
FROM coupon_usages
WHERE coupon_id = ?;`,
)
amountRow := coupons.db.QueryRowContext(ctx, query, couponID[:])
var amount int64
err = amountRow.Scan(&amount)
return amount, err
}
// GetLatest return period_end of latest coupon charge.
func (coupons *coupons) GetLatest(ctx context.Context, couponID uuid.UUID) (_ time.Time, err error) {
defer mon.Task()(&ctx, couponID)(&err)
query := coupons.db.Rebind(
`SELECT interval_end
`SELECT period
FROM coupon_usages
WHERE coupon_id = ?
ORDER BY interval_end DESC
ORDER BY period DESC
LIMIT 1;`,
)
@ -212,7 +269,58 @@ func (coupons *coupons) GetLatest(ctx context.Context, couponID uuid.UUID) (_ ti
return created, stripecoinpayments.ErrNoCouponUsages.Wrap(err)
}
return created.UTC(), err
return created, err
}
// ListUnapplied returns coupon usage page with unapplied coupon usages.
func (coupons *coupons) ListUnapplied(ctx context.Context, offset int64, limit int, before time.Time) (_ stripecoinpayments.CouponUsagePage, err error) {
defer mon.Task()(&ctx, offset, limit, before)(&err)
var page stripecoinpayments.CouponUsagePage
dbxRecords, err := coupons.db.Limited_CouponUsage_By_Period_LessOrEqual_And_Status_Equal_Number_OrderBy_Desc_Period(
ctx,
dbx.CouponUsage_Period(before),
limit+1,
offset,
)
if err != nil {
return stripecoinpayments.CouponUsagePage{}, err
}
if len(dbxRecords) == limit+1 {
page.Next = true
page.NextOffset = offset + int64(limit) + 1
dbxRecords = dbxRecords[:len(dbxRecords)-1]
}
for _, dbxRecord := range dbxRecords {
record, err := couponUsageFromDbxSlice(dbxRecord)
if err != nil {
return stripecoinpayments.CouponUsagePage{}, err
}
page.Usages = append(page.Usages, record)
}
return page, nil
}
// ApplyUsage applies coupon usage and updates its status.
func (coupons *coupons) ApplyUsage(ctx context.Context, couponID uuid.UUID, period time.Time) (err error) {
defer mon.Task()(&ctx, couponID, period)(&err)
_, err = coupons.db.Update_CouponUsage_By_CouponId_And_Period(
ctx,
dbx.CouponUsage_CouponId(couponID[:]),
dbx.CouponUsage_Period(period),
dbx.CouponUsage_Update_Fields{
Status: dbx.CouponUsage_Status(int(stripecoinpayments.CouponUsageStatusApplied)),
},
)
return err
}
// couponsFromDbxSlice is used for creating []payments.Coupon entities from autogenerated []dbx.Coupon struct.
@ -233,3 +341,17 @@ func couponsFromDbxSlice(couponsDbx []*dbx.Coupon) (_ []payments.Coupon, err err
return coupons, errs.Combine(errors...)
}
// couponUsageFromDbxSlice is used for creating stripecoinpayments.CouponUsage entity from autogenerated dbx.CouponUsage struct.
func couponUsageFromDbxSlice(couponUsageDbx *dbx.CouponUsage) (usage stripecoinpayments.CouponUsage, err error) {
usage.Status = stripecoinpayments.CouponUsageStatus(couponUsageDbx.Status)
usage.Period = couponUsageDbx.Period
usage.Amount = couponUsageDbx.Amount
usage.CouponID, err = dbutil.BytesToUUID(couponUsageDbx.CouponId)
if err != nil {
return stripecoinpayments.CouponUsage{}, err
}
return usage, err
}

View File

@ -1019,13 +1019,13 @@ read one (
model coupon (
key id
unique project_id
field id blob
field project_id blob
field user_id blob
field amount int64
field description text
field type int
field status int ( updatable )
field duration int64
@ -1039,11 +1039,27 @@ update coupon (
delete coupon (
where coupon.id = ?
)
read one (
select coupon
where coupon.id = ?
)
read all (
select coupon
where coupon.project_id = ?
where coupon.status = 0
orderby desc coupon.created_at
)
read all (
select coupon
where coupon.user_id = ?
orderby desc coupon.created_at
)
read all (
select coupon
where coupon.user_id = ?
where coupon.status = ?
orderby desc coupon.created_at
)
read all (
select coupon
where coupon.status = ?
@ -1056,11 +1072,21 @@ read limitoffset (
orderby desc coupon.created_at
)
model coupon_usage (
key id
key coupon_id period
field id blob
field coupon_id blob
field amount int64
field interval_end timestamp
field coupon_id blob
field amount int64
field status int ( updatable )
field period timestamp
)
create coupon_usage ()
read limitoffset (
select coupon_usage
where coupon_usage.period <= ?
where coupon_usage.status = 0
orderby desc coupon_usage.period
)
update coupon_usage (
where coupon_usage.coupon_id = ?
where coupon_usage.period = ?
)

View File

@ -58,18 +58,18 @@ CREATE TABLE coupons (
user_id bytea NOT NULL,
amount bigint NOT NULL,
description text NOT NULL,
type integer NOT NULL,
status integer NOT NULL,
duration bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id ),
UNIQUE ( project_id )
PRIMARY KEY ( id )
);
CREATE TABLE coupon_usages (
id bytea NOT NULL,
coupon_id bytea NOT NULL,
amount bigint NOT NULL,
interval_end timestamp with time zone NOT NULL,
PRIMARY KEY ( id )
status integer NOT NULL,
period timestamp with time zone NOT NULL,
PRIMARY KEY ( coupon_id, period )
);
CREATE TABLE graceful_exit_progress (
node_id bytea NOT NULL,

View File

@ -328,18 +328,18 @@ CREATE TABLE coupons (
user_id bytea NOT NULL,
amount bigint NOT NULL,
description text NOT NULL,
type integer NOT NULL,
status integer NOT NULL,
duration bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id ),
UNIQUE ( project_id )
PRIMARY KEY ( id )
);
CREATE TABLE coupon_usages (
id bytea NOT NULL,
coupon_id bytea NOT NULL,
amount bigint NOT NULL,
interval_end timestamp with time zone NOT NULL,
PRIMARY KEY ( id )
status integer NOT NULL,
period timestamp with time zone NOT NULL,
PRIMARY KEY ( coupon_id, period )
);
CREATE TABLE graceful_exit_progress (
node_id bytea NOT NULL,
@ -746,18 +746,18 @@ CREATE TABLE coupons (
user_id bytea NOT NULL,
amount bigint NOT NULL,
description text NOT NULL,
type integer NOT NULL,
status integer NOT NULL,
duration bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id ),
UNIQUE ( project_id )
PRIMARY KEY ( id )
);
CREATE TABLE coupon_usages (
id bytea NOT NULL,
coupon_id bytea NOT NULL,
amount bigint NOT NULL,
interval_end timestamp with time zone NOT NULL,
PRIMARY KEY ( id )
status integer NOT NULL,
period timestamp with time zone NOT NULL,
PRIMARY KEY ( coupon_id, period )
);
CREATE TABLE graceful_exit_progress (
node_id bytea NOT NULL,
@ -1899,6 +1899,7 @@ type Coupon struct {
UserId []byte
Amount int64
Description string
Type int
Status int
Duration int64
CreatedAt time.Time
@ -2005,6 +2006,25 @@ func (f Coupon_Description_Field) value() interface{} {
func (Coupon_Description_Field) _Column() string { return "description" }
type Coupon_Type_Field struct {
_set bool
_null bool
_value int
}
func Coupon_Type(v int) Coupon_Type_Field {
return Coupon_Type_Field{_set: true, _value: v}
}
func (f Coupon_Type_Field) value() interface{} {
if !f._set || f._null {
return nil
}
return f._value
}
func (Coupon_Type_Field) _Column() string { return "type" }
type Coupon_Status_Field struct {
_set bool
_null bool
@ -2063,36 +2083,18 @@ func (f Coupon_CreatedAt_Field) value() interface{} {
func (Coupon_CreatedAt_Field) _Column() string { return "created_at" }
type CouponUsage struct {
Id []byte
CouponId []byte
Amount int64
IntervalEnd time.Time
CouponId []byte
Amount int64
Status int
Period time.Time
}
func (CouponUsage) _Table() string { return "coupon_usages" }
type CouponUsage_Update_Fields struct {
Status CouponUsage_Status_Field
}
type CouponUsage_Id_Field struct {
_set bool
_null bool
_value []byte
}
func CouponUsage_Id(v []byte) CouponUsage_Id_Field {
return CouponUsage_Id_Field{_set: true, _value: v}
}
func (f CouponUsage_Id_Field) value() interface{} {
if !f._set || f._null {
return nil
}
return f._value
}
func (CouponUsage_Id_Field) _Column() string { return "id" }
type CouponUsage_CouponId_Field struct {
_set bool
_null bool
@ -2131,24 +2133,43 @@ func (f CouponUsage_Amount_Field) value() interface{} {
func (CouponUsage_Amount_Field) _Column() string { return "amount" }
type CouponUsage_IntervalEnd_Field struct {
type CouponUsage_Status_Field struct {
_set bool
_null bool
_value time.Time
_value int
}
func CouponUsage_IntervalEnd(v time.Time) CouponUsage_IntervalEnd_Field {
return CouponUsage_IntervalEnd_Field{_set: true, _value: v}
func CouponUsage_Status(v int) CouponUsage_Status_Field {
return CouponUsage_Status_Field{_set: true, _value: v}
}
func (f CouponUsage_IntervalEnd_Field) value() interface{} {
func (f CouponUsage_Status_Field) value() interface{} {
if !f._set || f._null {
return nil
}
return f._value
}
func (CouponUsage_IntervalEnd_Field) _Column() string { return "interval_end" }
func (CouponUsage_Status_Field) _Column() string { return "status" }
type CouponUsage_Period_Field struct {
_set bool
_null bool
_value time.Time
}
func CouponUsage_Period(v time.Time) CouponUsage_Period_Field {
return CouponUsage_Period_Field{_set: true, _value: v}
}
func (f CouponUsage_Period_Field) value() interface{} {
if !f._set || f._null {
return nil
}
return f._value
}
func (CouponUsage_Period_Field) _Column() string { return "period" }
type GracefulExitProgress struct {
NodeId []byte
@ -7814,6 +7835,7 @@ func (obj *postgresImpl) Create_Coupon(ctx context.Context,
coupon_user_id Coupon_UserId_Field,
coupon_amount Coupon_Amount_Field,
coupon_description Coupon_Description_Field,
coupon_type Coupon_Type_Field,
coupon_status Coupon_Status_Field,
coupon_duration Coupon_Duration_Field) (
coupon *Coupon, err error) {
@ -7825,17 +7847,18 @@ func (obj *postgresImpl) Create_Coupon(ctx context.Context,
__user_id_val := coupon_user_id.value()
__amount_val := coupon_amount.value()
__description_val := coupon_description.value()
__type_val := coupon_type.value()
__status_val := coupon_status.value()
__duration_val := coupon_duration.value()
__created_at_val := __now
var __embed_stmt = __sqlbundle_Literal("INSERT INTO coupons ( id, project_id, user_id, amount, description, status, duration, created_at ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ? ) RETURNING coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.status, coupons.duration, coupons.created_at")
var __embed_stmt = __sqlbundle_Literal("INSERT INTO coupons ( id, project_id, user_id, amount, description, type, status, duration, created_at ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ? ) RETURNING coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.created_at")
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
obj.logStmt(__stmt, __id_val, __project_id_val, __user_id_val, __amount_val, __description_val, __status_val, __duration_val, __created_at_val)
obj.logStmt(__stmt, __id_val, __project_id_val, __user_id_val, __amount_val, __description_val, __type_val, __status_val, __duration_val, __created_at_val)
coupon = &Coupon{}
err = obj.driver.QueryRow(__stmt, __id_val, __project_id_val, __user_id_val, __amount_val, __description_val, __status_val, __duration_val, __created_at_val).Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
err = obj.driver.QueryRow(__stmt, __id_val, __project_id_val, __user_id_val, __amount_val, __description_val, __type_val, __status_val, __duration_val, __created_at_val).Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
if err != nil {
return nil, obj.makeErr(err)
}
@ -7844,24 +7867,24 @@ func (obj *postgresImpl) Create_Coupon(ctx context.Context,
}
func (obj *postgresImpl) Create_CouponUsage(ctx context.Context,
coupon_usage_id CouponUsage_Id_Field,
coupon_usage_coupon_id CouponUsage_CouponId_Field,
coupon_usage_amount CouponUsage_Amount_Field,
coupon_usage_interval_end CouponUsage_IntervalEnd_Field) (
coupon_usage_status CouponUsage_Status_Field,
coupon_usage_period CouponUsage_Period_Field) (
coupon_usage *CouponUsage, err error) {
defer mon.Task()(&ctx)(&err)
__id_val := coupon_usage_id.value()
__coupon_id_val := coupon_usage_coupon_id.value()
__amount_val := coupon_usage_amount.value()
__interval_end_val := coupon_usage_interval_end.value()
__status_val := coupon_usage_status.value()
__period_val := coupon_usage_period.value()
var __embed_stmt = __sqlbundle_Literal("INSERT INTO coupon_usages ( id, coupon_id, amount, interval_end ) VALUES ( ?, ?, ?, ? ) RETURNING coupon_usages.id, coupon_usages.coupon_id, coupon_usages.amount, coupon_usages.interval_end")
var __embed_stmt = __sqlbundle_Literal("INSERT INTO coupon_usages ( coupon_id, amount, status, period ) VALUES ( ?, ?, ?, ? ) RETURNING coupon_usages.coupon_id, coupon_usages.amount, coupon_usages.status, coupon_usages.period")
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
obj.logStmt(__stmt, __id_val, __coupon_id_val, __amount_val, __interval_end_val)
obj.logStmt(__stmt, __coupon_id_val, __amount_val, __status_val, __period_val)
coupon_usage = &CouponUsage{}
err = obj.driver.QueryRow(__stmt, __id_val, __coupon_id_val, __amount_val, __interval_end_val).Scan(&coupon_usage.Id, &coupon_usage.CouponId, &coupon_usage.Amount, &coupon_usage.IntervalEnd)
err = obj.driver.QueryRow(__stmt, __coupon_id_val, __amount_val, __status_val, __period_val).Scan(&coupon_usage.CouponId, &coupon_usage.Amount, &coupon_usage.Status, &coupon_usage.Period)
if err != nil {
return nil, obj.makeErr(err)
}
@ -9801,12 +9824,68 @@ func (obj *postgresImpl) Get_StripecoinpaymentsTxConversionRate_By_TxId(ctx cont
}
func (obj *postgresImpl) Get_Coupon_By_Id(ctx context.Context,
coupon_id Coupon_Id_Field) (
coupon *Coupon, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.created_at FROM coupons WHERE coupons.id = ?")
var __values []interface{}
__values = append(__values, coupon_id.value())
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
obj.logStmt(__stmt, __values...)
coupon = &Coupon{}
err = obj.driver.QueryRow(__stmt, __values...).Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
if err != nil {
return (*Coupon)(nil), obj.makeErr(err)
}
return coupon, nil
}
func (obj *postgresImpl) All_Coupon_By_ProjectId_And_Status_Equal_Number_OrderBy_Desc_CreatedAt(ctx context.Context,
coupon_project_id Coupon_ProjectId_Field) (
rows []*Coupon, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.created_at FROM coupons WHERE coupons.project_id = ? AND coupons.status = 0 ORDER BY coupons.created_at DESC")
var __values []interface{}
__values = append(__values, coupon_project_id.value())
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
obj.logStmt(__stmt, __values...)
__rows, err := obj.driver.Query(__stmt, __values...)
if err != nil {
return nil, obj.makeErr(err)
}
defer __rows.Close()
for __rows.Next() {
coupon := &Coupon{}
err = __rows.Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
if err != nil {
return nil, obj.makeErr(err)
}
rows = append(rows, coupon)
}
if err := __rows.Err(); err != nil {
return nil, obj.makeErr(err)
}
return rows, nil
}
func (obj *postgresImpl) All_Coupon_By_UserId_OrderBy_Desc_CreatedAt(ctx context.Context,
coupon_user_id Coupon_UserId_Field) (
rows []*Coupon, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.status, coupons.duration, coupons.created_at FROM coupons WHERE coupons.user_id = ? ORDER BY coupons.created_at DESC")
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.created_at FROM coupons WHERE coupons.user_id = ? ORDER BY coupons.created_at DESC")
var __values []interface{}
__values = append(__values, coupon_user_id.value())
@ -9822,7 +9901,42 @@ func (obj *postgresImpl) All_Coupon_By_UserId_OrderBy_Desc_CreatedAt(ctx context
for __rows.Next() {
coupon := &Coupon{}
err = __rows.Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
err = __rows.Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
if err != nil {
return nil, obj.makeErr(err)
}
rows = append(rows, coupon)
}
if err := __rows.Err(); err != nil {
return nil, obj.makeErr(err)
}
return rows, nil
}
func (obj *postgresImpl) All_Coupon_By_UserId_And_Status_OrderBy_Desc_CreatedAt(ctx context.Context,
coupon_user_id Coupon_UserId_Field,
coupon_status Coupon_Status_Field) (
rows []*Coupon, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.created_at FROM coupons WHERE coupons.user_id = ? AND coupons.status = ? ORDER BY coupons.created_at DESC")
var __values []interface{}
__values = append(__values, coupon_user_id.value(), coupon_status.value())
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
obj.logStmt(__stmt, __values...)
__rows, err := obj.driver.Query(__stmt, __values...)
if err != nil {
return nil, obj.makeErr(err)
}
defer __rows.Close()
for __rows.Next() {
coupon := &Coupon{}
err = __rows.Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
if err != nil {
return nil, obj.makeErr(err)
}
@ -9840,7 +9954,7 @@ func (obj *postgresImpl) All_Coupon_By_Status_OrderBy_Desc_CreatedAt(ctx context
rows []*Coupon, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.status, coupons.duration, coupons.created_at FROM coupons WHERE coupons.status = ? ORDER BY coupons.created_at DESC")
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.created_at FROM coupons WHERE coupons.status = ? ORDER BY coupons.created_at DESC")
var __values []interface{}
__values = append(__values, coupon_status.value())
@ -9856,7 +9970,7 @@ func (obj *postgresImpl) All_Coupon_By_Status_OrderBy_Desc_CreatedAt(ctx context
for __rows.Next() {
coupon := &Coupon{}
err = __rows.Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
err = __rows.Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
if err != nil {
return nil, obj.makeErr(err)
}
@ -9876,7 +9990,7 @@ func (obj *postgresImpl) Limited_Coupon_By_CreatedAt_LessOrEqual_And_Status_Orde
rows []*Coupon, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.status, coupons.duration, coupons.created_at FROM coupons WHERE coupons.created_at <= ? AND coupons.status = ? ORDER BY coupons.created_at DESC LIMIT ? OFFSET ?")
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.created_at FROM coupons WHERE coupons.created_at <= ? AND coupons.status = ? ORDER BY coupons.created_at DESC LIMIT ? OFFSET ?")
var __values []interface{}
__values = append(__values, coupon_created_at_less_or_equal.value(), coupon_status.value())
@ -9894,7 +10008,7 @@ func (obj *postgresImpl) Limited_Coupon_By_CreatedAt_LessOrEqual_And_Status_Orde
for __rows.Next() {
coupon := &Coupon{}
err = __rows.Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
err = __rows.Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
if err != nil {
return nil, obj.makeErr(err)
}
@ -9907,6 +10021,43 @@ func (obj *postgresImpl) Limited_Coupon_By_CreatedAt_LessOrEqual_And_Status_Orde
}
func (obj *postgresImpl) Limited_CouponUsage_By_Period_LessOrEqual_And_Status_Equal_Number_OrderBy_Desc_Period(ctx context.Context,
coupon_usage_period_less_or_equal CouponUsage_Period_Field,
limit int, offset int64) (
rows []*CouponUsage, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupon_usages.coupon_id, coupon_usages.amount, coupon_usages.status, coupon_usages.period FROM coupon_usages WHERE coupon_usages.period <= ? AND coupon_usages.status = 0 ORDER BY coupon_usages.period DESC LIMIT ? OFFSET ?")
var __values []interface{}
__values = append(__values, coupon_usage_period_less_or_equal.value())
__values = append(__values, limit, offset)
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
obj.logStmt(__stmt, __values...)
__rows, err := obj.driver.Query(__stmt, __values...)
if err != nil {
return nil, obj.makeErr(err)
}
defer __rows.Close()
for __rows.Next() {
coupon_usage := &CouponUsage{}
err = __rows.Scan(&coupon_usage.CouponId, &coupon_usage.Amount, &coupon_usage.Status, &coupon_usage.Period)
if err != nil {
return nil, obj.makeErr(err)
}
rows = append(rows, coupon_usage)
}
if err := __rows.Err(); err != nil {
return nil, obj.makeErr(err)
}
return rows, nil
}
func (obj *postgresImpl) Update_PendingAudits_By_NodeId(ctx context.Context,
pending_audits_node_id PendingAudits_NodeId_Field,
update PendingAudits_Update_Fields) (
@ -11081,7 +11232,7 @@ func (obj *postgresImpl) Update_Coupon_By_Id(ctx context.Context,
defer mon.Task()(&ctx)(&err)
var __sets = &__sqlbundle_Hole{}
var __embed_stmt = __sqlbundle_Literals{Join: "", SQLs: []__sqlbundle_SQL{__sqlbundle_Literal("UPDATE coupons SET "), __sets, __sqlbundle_Literal(" WHERE coupons.id = ? RETURNING coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.status, coupons.duration, coupons.created_at")}}
var __embed_stmt = __sqlbundle_Literals{Join: "", SQLs: []__sqlbundle_SQL{__sqlbundle_Literal("UPDATE coupons SET "), __sets, __sqlbundle_Literal(" WHERE coupons.id = ? RETURNING coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.created_at")}}
__sets_sql := __sqlbundle_Literals{Join: ", "}
var __values []interface{}
@ -11105,7 +11256,7 @@ func (obj *postgresImpl) Update_Coupon_By_Id(ctx context.Context,
obj.logStmt(__stmt, __values...)
coupon = &Coupon{}
err = obj.driver.QueryRow(__stmt, __values...).Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
err = obj.driver.QueryRow(__stmt, __values...).Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
if err == sql.ErrNoRows {
return nil, nil
}
@ -11115,6 +11266,48 @@ func (obj *postgresImpl) Update_Coupon_By_Id(ctx context.Context,
return coupon, nil
}
func (obj *postgresImpl) Update_CouponUsage_By_CouponId_And_Period(ctx context.Context,
coupon_usage_coupon_id CouponUsage_CouponId_Field,
coupon_usage_period CouponUsage_Period_Field,
update CouponUsage_Update_Fields) (
coupon_usage *CouponUsage, err error) {
defer mon.Task()(&ctx)(&err)
var __sets = &__sqlbundle_Hole{}
var __embed_stmt = __sqlbundle_Literals{Join: "", SQLs: []__sqlbundle_SQL{__sqlbundle_Literal("UPDATE coupon_usages SET "), __sets, __sqlbundle_Literal(" WHERE coupon_usages.coupon_id = ? AND coupon_usages.period = ? RETURNING coupon_usages.coupon_id, coupon_usages.amount, coupon_usages.status, coupon_usages.period")}}
__sets_sql := __sqlbundle_Literals{Join: ", "}
var __values []interface{}
var __args []interface{}
if update.Status._set {
__values = append(__values, update.Status.value())
__sets_sql.SQLs = append(__sets_sql.SQLs, __sqlbundle_Literal("status = ?"))
}
if len(__sets_sql.SQLs) == 0 {
return nil, emptyUpdate()
}
__args = append(__args, coupon_usage_coupon_id.value(), coupon_usage_period.value())
__values = append(__values, __args...)
__sets.SQL = __sets_sql
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
obj.logStmt(__stmt, __values...)
coupon_usage = &CouponUsage{}
err = obj.driver.QueryRow(__stmt, __values...).Scan(&coupon_usage.CouponId, &coupon_usage.Amount, &coupon_usage.Status, &coupon_usage.Period)
if err == sql.ErrNoRows {
return nil, nil
}
if err != nil {
return nil, obj.makeErr(err)
}
return coupon_usage, nil
}
func (obj *postgresImpl) Delete_ValueAttribution_By_ProjectId_And_BucketName(ctx context.Context,
value_attribution_project_id ValueAttribution_ProjectId_Field,
value_attribution_bucket_name ValueAttribution_BucketName_Field) (
@ -12940,6 +13133,7 @@ func (obj *cockroachImpl) Create_Coupon(ctx context.Context,
coupon_user_id Coupon_UserId_Field,
coupon_amount Coupon_Amount_Field,
coupon_description Coupon_Description_Field,
coupon_type Coupon_Type_Field,
coupon_status Coupon_Status_Field,
coupon_duration Coupon_Duration_Field) (
coupon *Coupon, err error) {
@ -12951,17 +13145,18 @@ func (obj *cockroachImpl) Create_Coupon(ctx context.Context,
__user_id_val := coupon_user_id.value()
__amount_val := coupon_amount.value()
__description_val := coupon_description.value()
__type_val := coupon_type.value()
__status_val := coupon_status.value()
__duration_val := coupon_duration.value()
__created_at_val := __now
var __embed_stmt = __sqlbundle_Literal("INSERT INTO coupons ( id, project_id, user_id, amount, description, status, duration, created_at ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ? ) RETURNING coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.status, coupons.duration, coupons.created_at")
var __embed_stmt = __sqlbundle_Literal("INSERT INTO coupons ( id, project_id, user_id, amount, description, type, status, duration, created_at ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ? ) RETURNING coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.created_at")
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
obj.logStmt(__stmt, __id_val, __project_id_val, __user_id_val, __amount_val, __description_val, __status_val, __duration_val, __created_at_val)
obj.logStmt(__stmt, __id_val, __project_id_val, __user_id_val, __amount_val, __description_val, __type_val, __status_val, __duration_val, __created_at_val)
coupon = &Coupon{}
err = obj.driver.QueryRow(__stmt, __id_val, __project_id_val, __user_id_val, __amount_val, __description_val, __status_val, __duration_val, __created_at_val).Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
err = obj.driver.QueryRow(__stmt, __id_val, __project_id_val, __user_id_val, __amount_val, __description_val, __type_val, __status_val, __duration_val, __created_at_val).Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
if err != nil {
return nil, obj.makeErr(err)
}
@ -12970,24 +13165,24 @@ func (obj *cockroachImpl) Create_Coupon(ctx context.Context,
}
func (obj *cockroachImpl) Create_CouponUsage(ctx context.Context,
coupon_usage_id CouponUsage_Id_Field,
coupon_usage_coupon_id CouponUsage_CouponId_Field,
coupon_usage_amount CouponUsage_Amount_Field,
coupon_usage_interval_end CouponUsage_IntervalEnd_Field) (
coupon_usage_status CouponUsage_Status_Field,
coupon_usage_period CouponUsage_Period_Field) (
coupon_usage *CouponUsage, err error) {
defer mon.Task()(&ctx)(&err)
__id_val := coupon_usage_id.value()
__coupon_id_val := coupon_usage_coupon_id.value()
__amount_val := coupon_usage_amount.value()
__interval_end_val := coupon_usage_interval_end.value()
__status_val := coupon_usage_status.value()
__period_val := coupon_usage_period.value()
var __embed_stmt = __sqlbundle_Literal("INSERT INTO coupon_usages ( id, coupon_id, amount, interval_end ) VALUES ( ?, ?, ?, ? ) RETURNING coupon_usages.id, coupon_usages.coupon_id, coupon_usages.amount, coupon_usages.interval_end")
var __embed_stmt = __sqlbundle_Literal("INSERT INTO coupon_usages ( coupon_id, amount, status, period ) VALUES ( ?, ?, ?, ? ) RETURNING coupon_usages.coupon_id, coupon_usages.amount, coupon_usages.status, coupon_usages.period")
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
obj.logStmt(__stmt, __id_val, __coupon_id_val, __amount_val, __interval_end_val)
obj.logStmt(__stmt, __coupon_id_val, __amount_val, __status_val, __period_val)
coupon_usage = &CouponUsage{}
err = obj.driver.QueryRow(__stmt, __id_val, __coupon_id_val, __amount_val, __interval_end_val).Scan(&coupon_usage.Id, &coupon_usage.CouponId, &coupon_usage.Amount, &coupon_usage.IntervalEnd)
err = obj.driver.QueryRow(__stmt, __coupon_id_val, __amount_val, __status_val, __period_val).Scan(&coupon_usage.CouponId, &coupon_usage.Amount, &coupon_usage.Status, &coupon_usage.Period)
if err != nil {
return nil, obj.makeErr(err)
}
@ -14927,12 +15122,68 @@ func (obj *cockroachImpl) Get_StripecoinpaymentsTxConversionRate_By_TxId(ctx con
}
func (obj *cockroachImpl) Get_Coupon_By_Id(ctx context.Context,
coupon_id Coupon_Id_Field) (
coupon *Coupon, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.created_at FROM coupons WHERE coupons.id = ?")
var __values []interface{}
__values = append(__values, coupon_id.value())
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
obj.logStmt(__stmt, __values...)
coupon = &Coupon{}
err = obj.driver.QueryRow(__stmt, __values...).Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
if err != nil {
return (*Coupon)(nil), obj.makeErr(err)
}
return coupon, nil
}
func (obj *cockroachImpl) All_Coupon_By_ProjectId_And_Status_Equal_Number_OrderBy_Desc_CreatedAt(ctx context.Context,
coupon_project_id Coupon_ProjectId_Field) (
rows []*Coupon, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.created_at FROM coupons WHERE coupons.project_id = ? AND coupons.status = 0 ORDER BY coupons.created_at DESC")
var __values []interface{}
__values = append(__values, coupon_project_id.value())
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
obj.logStmt(__stmt, __values...)
__rows, err := obj.driver.Query(__stmt, __values...)
if err != nil {
return nil, obj.makeErr(err)
}
defer __rows.Close()
for __rows.Next() {
coupon := &Coupon{}
err = __rows.Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
if err != nil {
return nil, obj.makeErr(err)
}
rows = append(rows, coupon)
}
if err := __rows.Err(); err != nil {
return nil, obj.makeErr(err)
}
return rows, nil
}
func (obj *cockroachImpl) All_Coupon_By_UserId_OrderBy_Desc_CreatedAt(ctx context.Context,
coupon_user_id Coupon_UserId_Field) (
rows []*Coupon, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.status, coupons.duration, coupons.created_at FROM coupons WHERE coupons.user_id = ? ORDER BY coupons.created_at DESC")
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.created_at FROM coupons WHERE coupons.user_id = ? ORDER BY coupons.created_at DESC")
var __values []interface{}
__values = append(__values, coupon_user_id.value())
@ -14948,7 +15199,42 @@ func (obj *cockroachImpl) All_Coupon_By_UserId_OrderBy_Desc_CreatedAt(ctx contex
for __rows.Next() {
coupon := &Coupon{}
err = __rows.Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
err = __rows.Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
if err != nil {
return nil, obj.makeErr(err)
}
rows = append(rows, coupon)
}
if err := __rows.Err(); err != nil {
return nil, obj.makeErr(err)
}
return rows, nil
}
func (obj *cockroachImpl) All_Coupon_By_UserId_And_Status_OrderBy_Desc_CreatedAt(ctx context.Context,
coupon_user_id Coupon_UserId_Field,
coupon_status Coupon_Status_Field) (
rows []*Coupon, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.created_at FROM coupons WHERE coupons.user_id = ? AND coupons.status = ? ORDER BY coupons.created_at DESC")
var __values []interface{}
__values = append(__values, coupon_user_id.value(), coupon_status.value())
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
obj.logStmt(__stmt, __values...)
__rows, err := obj.driver.Query(__stmt, __values...)
if err != nil {
return nil, obj.makeErr(err)
}
defer __rows.Close()
for __rows.Next() {
coupon := &Coupon{}
err = __rows.Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
if err != nil {
return nil, obj.makeErr(err)
}
@ -14966,7 +15252,7 @@ func (obj *cockroachImpl) All_Coupon_By_Status_OrderBy_Desc_CreatedAt(ctx contex
rows []*Coupon, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.status, coupons.duration, coupons.created_at FROM coupons WHERE coupons.status = ? ORDER BY coupons.created_at DESC")
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.created_at FROM coupons WHERE coupons.status = ? ORDER BY coupons.created_at DESC")
var __values []interface{}
__values = append(__values, coupon_status.value())
@ -14982,7 +15268,7 @@ func (obj *cockroachImpl) All_Coupon_By_Status_OrderBy_Desc_CreatedAt(ctx contex
for __rows.Next() {
coupon := &Coupon{}
err = __rows.Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
err = __rows.Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
if err != nil {
return nil, obj.makeErr(err)
}
@ -15002,7 +15288,7 @@ func (obj *cockroachImpl) Limited_Coupon_By_CreatedAt_LessOrEqual_And_Status_Ord
rows []*Coupon, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.status, coupons.duration, coupons.created_at FROM coupons WHERE coupons.created_at <= ? AND coupons.status = ? ORDER BY coupons.created_at DESC LIMIT ? OFFSET ?")
var __embed_stmt = __sqlbundle_Literal("SELECT coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.created_at FROM coupons WHERE coupons.created_at <= ? AND coupons.status = ? ORDER BY coupons.created_at DESC LIMIT ? OFFSET ?")
var __values []interface{}
__values = append(__values, coupon_created_at_less_or_equal.value(), coupon_status.value())
@ -15020,7 +15306,7 @@ func (obj *cockroachImpl) Limited_Coupon_By_CreatedAt_LessOrEqual_And_Status_Ord
for __rows.Next() {
coupon := &Coupon{}
err = __rows.Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
err = __rows.Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
if err != nil {
return nil, obj.makeErr(err)
}
@ -15033,6 +15319,43 @@ func (obj *cockroachImpl) Limited_Coupon_By_CreatedAt_LessOrEqual_And_Status_Ord
}
func (obj *cockroachImpl) Limited_CouponUsage_By_Period_LessOrEqual_And_Status_Equal_Number_OrderBy_Desc_Period(ctx context.Context,
coupon_usage_period_less_or_equal CouponUsage_Period_Field,
limit int, offset int64) (
rows []*CouponUsage, err error) {
defer mon.Task()(&ctx)(&err)
var __embed_stmt = __sqlbundle_Literal("SELECT coupon_usages.coupon_id, coupon_usages.amount, coupon_usages.status, coupon_usages.period FROM coupon_usages WHERE coupon_usages.period <= ? AND coupon_usages.status = 0 ORDER BY coupon_usages.period DESC LIMIT ? OFFSET ?")
var __values []interface{}
__values = append(__values, coupon_usage_period_less_or_equal.value())
__values = append(__values, limit, offset)
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
obj.logStmt(__stmt, __values...)
__rows, err := obj.driver.Query(__stmt, __values...)
if err != nil {
return nil, obj.makeErr(err)
}
defer __rows.Close()
for __rows.Next() {
coupon_usage := &CouponUsage{}
err = __rows.Scan(&coupon_usage.CouponId, &coupon_usage.Amount, &coupon_usage.Status, &coupon_usage.Period)
if err != nil {
return nil, obj.makeErr(err)
}
rows = append(rows, coupon_usage)
}
if err := __rows.Err(); err != nil {
return nil, obj.makeErr(err)
}
return rows, nil
}
func (obj *cockroachImpl) Update_PendingAudits_By_NodeId(ctx context.Context,
pending_audits_node_id PendingAudits_NodeId_Field,
update PendingAudits_Update_Fields) (
@ -16207,7 +16530,7 @@ func (obj *cockroachImpl) Update_Coupon_By_Id(ctx context.Context,
defer mon.Task()(&ctx)(&err)
var __sets = &__sqlbundle_Hole{}
var __embed_stmt = __sqlbundle_Literals{Join: "", SQLs: []__sqlbundle_SQL{__sqlbundle_Literal("UPDATE coupons SET "), __sets, __sqlbundle_Literal(" WHERE coupons.id = ? RETURNING coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.status, coupons.duration, coupons.created_at")}}
var __embed_stmt = __sqlbundle_Literals{Join: "", SQLs: []__sqlbundle_SQL{__sqlbundle_Literal("UPDATE coupons SET "), __sets, __sqlbundle_Literal(" WHERE coupons.id = ? RETURNING coupons.id, coupons.project_id, coupons.user_id, coupons.amount, coupons.description, coupons.type, coupons.status, coupons.duration, coupons.created_at")}}
__sets_sql := __sqlbundle_Literals{Join: ", "}
var __values []interface{}
@ -16231,7 +16554,7 @@ func (obj *cockroachImpl) Update_Coupon_By_Id(ctx context.Context,
obj.logStmt(__stmt, __values...)
coupon = &Coupon{}
err = obj.driver.QueryRow(__stmt, __values...).Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
err = obj.driver.QueryRow(__stmt, __values...).Scan(&coupon.Id, &coupon.ProjectId, &coupon.UserId, &coupon.Amount, &coupon.Description, &coupon.Type, &coupon.Status, &coupon.Duration, &coupon.CreatedAt)
if err == sql.ErrNoRows {
return nil, nil
}
@ -16241,6 +16564,48 @@ func (obj *cockroachImpl) Update_Coupon_By_Id(ctx context.Context,
return coupon, nil
}
func (obj *cockroachImpl) Update_CouponUsage_By_CouponId_And_Period(ctx context.Context,
coupon_usage_coupon_id CouponUsage_CouponId_Field,
coupon_usage_period CouponUsage_Period_Field,
update CouponUsage_Update_Fields) (
coupon_usage *CouponUsage, err error) {
defer mon.Task()(&ctx)(&err)
var __sets = &__sqlbundle_Hole{}
var __embed_stmt = __sqlbundle_Literals{Join: "", SQLs: []__sqlbundle_SQL{__sqlbundle_Literal("UPDATE coupon_usages SET "), __sets, __sqlbundle_Literal(" WHERE coupon_usages.coupon_id = ? AND coupon_usages.period = ? RETURNING coupon_usages.coupon_id, coupon_usages.amount, coupon_usages.status, coupon_usages.period")}}
__sets_sql := __sqlbundle_Literals{Join: ", "}
var __values []interface{}
var __args []interface{}
if update.Status._set {
__values = append(__values, update.Status.value())
__sets_sql.SQLs = append(__sets_sql.SQLs, __sqlbundle_Literal("status = ?"))
}
if len(__sets_sql.SQLs) == 0 {
return nil, emptyUpdate()
}
__args = append(__args, coupon_usage_coupon_id.value(), coupon_usage_period.value())
__values = append(__values, __args...)
__sets.SQL = __sets_sql
var __stmt = __sqlbundle_Render(obj.dialect, __embed_stmt)
obj.logStmt(__stmt, __values...)
coupon_usage = &CouponUsage{}
err = obj.driver.QueryRow(__stmt, __values...).Scan(&coupon_usage.CouponId, &coupon_usage.Amount, &coupon_usage.Status, &coupon_usage.Period)
if err == sql.ErrNoRows {
return nil, nil
}
if err != nil {
return nil, obj.makeErr(err)
}
return coupon_usage, nil
}
func (obj *cockroachImpl) Delete_ValueAttribution_By_ProjectId_And_BucketName(ctx context.Context,
value_attribution_project_id ValueAttribution_ProjectId_Field,
value_attribution_bucket_name ValueAttribution_BucketName_Field) (
@ -17239,6 +17604,16 @@ func (rx *Rx) All_CoinpaymentsTransaction_By_UserId_OrderBy_Desc_CreatedAt(ctx c
return tx.All_CoinpaymentsTransaction_By_UserId_OrderBy_Desc_CreatedAt(ctx, coinpayments_transaction_user_id)
}
func (rx *Rx) All_Coupon_By_ProjectId_And_Status_Equal_Number_OrderBy_Desc_CreatedAt(ctx context.Context,
coupon_project_id Coupon_ProjectId_Field) (
rows []*Coupon, err error) {
var tx *Tx
if tx, err = rx.getTx(ctx); err != nil {
return
}
return tx.All_Coupon_By_ProjectId_And_Status_Equal_Number_OrderBy_Desc_CreatedAt(ctx, coupon_project_id)
}
func (rx *Rx) All_Coupon_By_Status_OrderBy_Desc_CreatedAt(ctx context.Context,
coupon_status Coupon_Status_Field) (
rows []*Coupon, err error) {
@ -17249,6 +17624,17 @@ func (rx *Rx) All_Coupon_By_Status_OrderBy_Desc_CreatedAt(ctx context.Context,
return tx.All_Coupon_By_Status_OrderBy_Desc_CreatedAt(ctx, coupon_status)
}
func (rx *Rx) All_Coupon_By_UserId_And_Status_OrderBy_Desc_CreatedAt(ctx context.Context,
coupon_user_id Coupon_UserId_Field,
coupon_status Coupon_Status_Field) (
rows []*Coupon, err error) {
var tx *Tx
if tx, err = rx.getTx(ctx); err != nil {
return
}
return tx.All_Coupon_By_UserId_And_Status_OrderBy_Desc_CreatedAt(ctx, coupon_user_id, coupon_status)
}
func (rx *Rx) All_Coupon_By_UserId_OrderBy_Desc_CreatedAt(ctx context.Context,
coupon_user_id Coupon_UserId_Field) (
rows []*Coupon, err error) {
@ -17663,6 +18049,7 @@ func (rx *Rx) Create_Coupon(ctx context.Context,
coupon_user_id Coupon_UserId_Field,
coupon_amount Coupon_Amount_Field,
coupon_description Coupon_Description_Field,
coupon_type Coupon_Type_Field,
coupon_status Coupon_Status_Field,
coupon_duration Coupon_Duration_Field) (
coupon *Coupon, err error) {
@ -17670,21 +18057,21 @@ func (rx *Rx) Create_Coupon(ctx context.Context,
if tx, err = rx.getTx(ctx); err != nil {
return
}
return tx.Create_Coupon(ctx, coupon_id, coupon_project_id, coupon_user_id, coupon_amount, coupon_description, coupon_status, coupon_duration)
return tx.Create_Coupon(ctx, coupon_id, coupon_project_id, coupon_user_id, coupon_amount, coupon_description, coupon_type, coupon_status, coupon_duration)
}
func (rx *Rx) Create_CouponUsage(ctx context.Context,
coupon_usage_id CouponUsage_Id_Field,
coupon_usage_coupon_id CouponUsage_CouponId_Field,
coupon_usage_amount CouponUsage_Amount_Field,
coupon_usage_interval_end CouponUsage_IntervalEnd_Field) (
coupon_usage_status CouponUsage_Status_Field,
coupon_usage_period CouponUsage_Period_Field) (
coupon_usage *CouponUsage, err error) {
var tx *Tx
if tx, err = rx.getTx(ctx); err != nil {
return
}
return tx.Create_CouponUsage(ctx, coupon_usage_id, coupon_usage_coupon_id, coupon_usage_amount, coupon_usage_interval_end)
return tx.Create_CouponUsage(ctx, coupon_usage_coupon_id, coupon_usage_amount, coupon_usage_status, coupon_usage_period)
}
@ -18218,6 +18605,16 @@ func (rx *Rx) Get_BucketMetainfo_By_ProjectId_And_Name(ctx context.Context,
return tx.Get_BucketMetainfo_By_ProjectId_And_Name(ctx, bucket_metainfo_project_id, bucket_metainfo_name)
}
func (rx *Rx) Get_Coupon_By_Id(ctx context.Context,
coupon_id Coupon_Id_Field) (
coupon *Coupon, err error) {
var tx *Tx
if tx, err = rx.getTx(ctx); err != nil {
return
}
return tx.Get_Coupon_By_Id(ctx, coupon_id)
}
func (rx *Rx) Get_GracefulExitProgress_By_NodeId(ctx context.Context,
graceful_exit_progress_node_id GracefulExitProgress_NodeId_Field) (
graceful_exit_progress *GracefulExitProgress, err error) {
@ -18480,6 +18877,17 @@ func (rx *Rx) Limited_CoinpaymentsTransaction_By_CreatedAt_LessOrEqual_And_Statu
return tx.Limited_CoinpaymentsTransaction_By_CreatedAt_LessOrEqual_And_Status_OrderBy_Desc_CreatedAt(ctx, coinpayments_transaction_created_at_less_or_equal, coinpayments_transaction_status, limit, offset)
}
func (rx *Rx) Limited_CouponUsage_By_Period_LessOrEqual_And_Status_Equal_Number_OrderBy_Desc_Period(ctx context.Context,
coupon_usage_period_less_or_equal CouponUsage_Period_Field,
limit int, offset int64) (
rows []*CouponUsage, err error) {
var tx *Tx
if tx, err = rx.getTx(ctx); err != nil {
return
}
return tx.Limited_CouponUsage_By_Period_LessOrEqual_And_Status_Equal_Number_OrderBy_Desc_Period(ctx, coupon_usage_period_less_or_equal, limit, offset)
}
func (rx *Rx) Limited_Coupon_By_CreatedAt_LessOrEqual_And_Status_OrderBy_Desc_CreatedAt(ctx context.Context,
coupon_created_at_less_or_equal Coupon_CreatedAt_Field,
coupon_status Coupon_Status_Field,
@ -18693,6 +19101,18 @@ func (rx *Rx) Update_CoinpaymentsTransaction_By_Id(ctx context.Context,
return tx.Update_CoinpaymentsTransaction_By_Id(ctx, coinpayments_transaction_id, update)
}
func (rx *Rx) Update_CouponUsage_By_CouponId_And_Period(ctx context.Context,
coupon_usage_coupon_id CouponUsage_CouponId_Field,
coupon_usage_period CouponUsage_Period_Field,
update CouponUsage_Update_Fields) (
coupon_usage *CouponUsage, err error) {
var tx *Tx
if tx, err = rx.getTx(ctx); err != nil {
return
}
return tx.Update_CouponUsage_By_CouponId_And_Period(ctx, coupon_usage_coupon_id, coupon_usage_period, update)
}
func (rx *Rx) Update_Coupon_By_Id(ctx context.Context,
coupon_id Coupon_Id_Field,
update Coupon_Update_Fields) (
@ -18804,10 +19224,19 @@ type Methods interface {
coinpayments_transaction_user_id CoinpaymentsTransaction_UserId_Field) (
rows []*CoinpaymentsTransaction, err error)
All_Coupon_By_ProjectId_And_Status_Equal_Number_OrderBy_Desc_CreatedAt(ctx context.Context,
coupon_project_id Coupon_ProjectId_Field) (
rows []*Coupon, err error)
All_Coupon_By_Status_OrderBy_Desc_CreatedAt(ctx context.Context,
coupon_status Coupon_Status_Field) (
rows []*Coupon, err error)
All_Coupon_By_UserId_And_Status_OrderBy_Desc_CreatedAt(ctx context.Context,
coupon_user_id Coupon_UserId_Field,
coupon_status Coupon_Status_Field) (
rows []*Coupon, err error)
All_Coupon_By_UserId_OrderBy_Desc_CreatedAt(ctx context.Context,
coupon_user_id Coupon_UserId_Field) (
rows []*Coupon, err error)
@ -19022,15 +19451,16 @@ type Methods interface {
coupon_user_id Coupon_UserId_Field,
coupon_amount Coupon_Amount_Field,
coupon_description Coupon_Description_Field,
coupon_type Coupon_Type_Field,
coupon_status Coupon_Status_Field,
coupon_duration Coupon_Duration_Field) (
coupon *Coupon, err error)
Create_CouponUsage(ctx context.Context,
coupon_usage_id CouponUsage_Id_Field,
coupon_usage_coupon_id CouponUsage_CouponId_Field,
coupon_usage_amount CouponUsage_Amount_Field,
coupon_usage_interval_end CouponUsage_IntervalEnd_Field) (
coupon_usage_status CouponUsage_Status_Field,
coupon_usage_period CouponUsage_Period_Field) (
coupon_usage *CouponUsage, err error)
Create_NodesOfflineTime(ctx context.Context,
@ -19275,6 +19705,10 @@ type Methods interface {
bucket_metainfo_name BucketMetainfo_Name_Field) (
bucket_metainfo *BucketMetainfo, err error)
Get_Coupon_By_Id(ctx context.Context,
coupon_id Coupon_Id_Field) (
coupon *Coupon, err error)
Get_GracefulExitProgress_By_NodeId(ctx context.Context,
graceful_exit_progress_node_id GracefulExitProgress_NodeId_Field) (
graceful_exit_progress *GracefulExitProgress, err error)
@ -19387,6 +19821,11 @@ type Methods interface {
limit int, offset int64) (
rows []*CoinpaymentsTransaction, err error)
Limited_CouponUsage_By_Period_LessOrEqual_And_Status_Equal_Number_OrderBy_Desc_Period(ctx context.Context,
coupon_usage_period_less_or_equal CouponUsage_Period_Field,
limit int, offset int64) (
rows []*CouponUsage, err error)
Limited_Coupon_By_CreatedAt_LessOrEqual_And_Status_OrderBy_Desc_CreatedAt(ctx context.Context,
coupon_created_at_less_or_equal Coupon_CreatedAt_Field,
coupon_status Coupon_Status_Field,
@ -19486,6 +19925,12 @@ type Methods interface {
update CoinpaymentsTransaction_Update_Fields) (
coinpayments_transaction *CoinpaymentsTransaction, err error)
Update_CouponUsage_By_CouponId_And_Period(ctx context.Context,
coupon_usage_coupon_id CouponUsage_CouponId_Field,
coupon_usage_period CouponUsage_Period_Field,
update CouponUsage_Update_Fields) (
coupon_usage *CouponUsage, err error)
Update_Coupon_By_Id(ctx context.Context,
coupon_id Coupon_Id_Field,
update Coupon_Update_Fields) (

View File

@ -58,18 +58,18 @@ CREATE TABLE coupons (
user_id bytea NOT NULL,
amount bigint NOT NULL,
description text NOT NULL,
type integer NOT NULL,
status integer NOT NULL,
duration bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id ),
UNIQUE ( project_id )
PRIMARY KEY ( id )
);
CREATE TABLE coupon_usages (
id bytea NOT NULL,
coupon_id bytea NOT NULL,
amount bigint NOT NULL,
interval_end timestamp with time zone NOT NULL,
PRIMARY KEY ( id )
status integer NOT NULL,
period timestamp with time zone NOT NULL,
PRIMARY KEY ( coupon_id, period )
);
CREATE TABLE graceful_exit_progress (
node_id bytea NOT NULL,

View File

@ -42,7 +42,7 @@ type invoiceProjectRecords struct {
}
// Create creates new invoice project record in the DB.
func (db *invoiceProjectRecords) Create(ctx context.Context, records []stripecoinpayments.CreateProjectRecord, start, end time.Time) (err error) {
func (db *invoiceProjectRecords) Create(ctx context.Context, records []stripecoinpayments.CreateProjectRecord, couponUsages []stripecoinpayments.CouponUsage, start, end time.Time) (err error) {
defer mon.Task()(&ctx)(&err)
return db.db.WithTx(ctx, func(ctx context.Context, tx *dbx.Tx) error {
@ -57,7 +57,7 @@ func (db *invoiceProjectRecords) Create(ctx context.Context, records []stripecoi
dbx.StripecoinpaymentsInvoiceProjectRecord_ProjectId(record.ProjectID[:]),
dbx.StripecoinpaymentsInvoiceProjectRecord_Storage(record.Storage),
dbx.StripecoinpaymentsInvoiceProjectRecord_Egress(record.Egress),
dbx.StripecoinpaymentsInvoiceProjectRecord_Objects(record.Objects),
dbx.StripecoinpaymentsInvoiceProjectRecord_Objects(int64(record.Objects)),
dbx.StripecoinpaymentsInvoiceProjectRecord_PeriodStart(start),
dbx.StripecoinpaymentsInvoiceProjectRecord_PeriodEnd(end),
dbx.StripecoinpaymentsInvoiceProjectRecord_State(invoiceProjectRecordStateUnapplied.Int()),
@ -67,6 +67,18 @@ func (db *invoiceProjectRecords) Create(ctx context.Context, records []stripecoi
}
}
for _, couponUsage := range couponUsages {
_, err = db.db.Create_CouponUsage(
ctx,
dbx.CouponUsage_CouponId(couponUsage.CouponID[:]),
dbx.CouponUsage_Amount(couponUsage.Amount),
dbx.CouponUsage_Status(int(couponUsage.Status)),
dbx.CouponUsage_Period(couponUsage.Period),
)
if err != nil {
return err
}
}
return nil
})
}
@ -92,6 +104,23 @@ func (db *invoiceProjectRecords) Check(ctx context.Context, projectID uuid.UUID,
return stripecoinpayments.ErrProjectRecordExists
}
// Get returns record for specified project and billing period.
func (db *invoiceProjectRecords) Get(ctx context.Context, projectID uuid.UUID, start, end time.Time) (record *stripecoinpayments.ProjectRecord, err error) {
defer mon.Task()(&ctx)(&err)
dbxRecord, err := db.db.Get_StripecoinpaymentsInvoiceProjectRecord_By_ProjectId_And_PeriodStart_And_PeriodEnd(ctx,
dbx.StripecoinpaymentsInvoiceProjectRecord_ProjectId(projectID[:]),
dbx.StripecoinpaymentsInvoiceProjectRecord_PeriodStart(start),
dbx.StripecoinpaymentsInvoiceProjectRecord_PeriodEnd(end),
)
if err != nil {
return nil, err
}
return fromDBXInvoiceProjectRecord(dbxRecord)
}
// Consume consumes invoice project record.
func (db *invoiceProjectRecords) Consume(ctx context.Context, id uuid.UUID) (err error) {
defer mon.Task()(&ctx)(&err)
@ -157,7 +186,7 @@ func fromDBXInvoiceProjectRecord(dbxRecord *dbx.StripecoinpaymentsInvoiceProject
ProjectID: projectID,
Storage: dbxRecord.Storage,
Egress: dbxRecord.Egress,
Objects: dbxRecord.Objects,
Objects: float64(dbxRecord.Objects),
PeriodStart: dbxRecord.PeriodStart,
PeriodEnd: dbxRecord.PeriodEnd,
}, nil

View File

@ -560,6 +560,41 @@ func (db *satelliteDB) PostgresMigration() *migrate.Migration {
`ALTER TABLE storagenode_bandwidth_rollups ALTER COLUMN allocated SET DEFAULT 0;`,
},
},
{
DB: db.DB,
Description: "Drop coupon related tables",
Version: 75,
Action: migrate.SQL{
`DROP TABLE coupon_usages;`,
`DROP TABLE coupons;`,
},
},
{
DB: db.DB,
Description: "Update coupon related tables",
Version: 76,
Action: migrate.SQL{
`CREATE TABLE coupons (
id bytea NOT NULL,
project_id bytea NOT NULL,
user_id bytea NOT NULL,
amount bigint NOT NULL,
description text NOT NULL,
type integer NOT NULL,
status integer NOT NULL,
duration bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id )
);`,
`CREATE TABLE coupon_usages (
coupon_id bytea NOT NULL,
amount bigint NOT NULL,
status integer NOT NULL,
period timestamp with time zone NOT NULL,
PRIMARY KEY ( coupon_id, period )
);`,
},
},
},
}
}

View File

@ -189,16 +189,16 @@ CREATE TABLE storagenode_storage_tallies
PRIMARY KEY (id)
);
CREATE TABLE users (
id bytea NOT NULL,
email text NOT NULL,
normalized_email text NOT NULL,
full_name text NOT NULL,
short_name text,
password_hash bytea NOT NULL,
status integer NOT NULL,
partner_id bytea,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id )
id bytea NOT NULL,
email text NOT NULL,
normalized_email text NOT NULL,
full_name text NOT NULL,
short_name text,
password_hash bytea NOT NULL,
status integer NOT NULL,
partner_id bytea,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id )
);
CREATE TABLE value_attributions
(
@ -374,10 +374,10 @@ CREATE TABLE coupon_usages
PRIMARY KEY ( id )
);
CREATE TABLE nodes_offline_times (
node_id bytea NOT NULL,
tracked_at timestamp with time zone NOT NULL,
seconds integer NOT NULL,
PRIMARY KEY ( node_id, tracked_at )
node_id bytea NOT NULL,
tracked_at timestamp with time zone NOT NULL,
seconds integer NOT NULL,
PRIMARY KEY ( node_id, tracked_at )
);
CREATE INDEX bucket_name_project_id_interval_start_interval_seconds ON bucket_bandwidth_rollups ( bucket_name, project_id, interval_start, interval_seconds );
@ -470,4 +470,4 @@ INSERT INTO "nodes_offline_times" ("node_id", "tracked_at", "seconds") VALUES (E
INSERT INTO "nodes_offline_times" ("node_id", "tracked_at", "seconds") VALUES (E'\\153\\313\\233\\074\\327\\177\\136\\070\\346\\001'::bytea, '2017-06-01 09:28:24.267934+00', 100);
INSERT INTO "nodes_offline_times" ("node_id", "tracked_at", "seconds") VALUES (E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n'::bytea, '2019-06-01 09:28:24.267934+00', 3600);
-- NEW DATA --
INSERT INTO "storagenode_bandwidth_rollups" ("storagenode_id", "interval_start", "interval_seconds", "action", "settled") VALUES (E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n', '2020-01-11 08:00:00.000000+00', 3600, 1, 2024);
INSERT INTO "storagenode_bandwidth_rollups" ("storagenode_id", "interval_start", "interval_seconds", "action", "settled") VALUES (E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n', '2020-01-11 08:00:00.000000+00', 3600, 1, 2024);

View File

@ -0,0 +1,453 @@
-- AUTOGENERATED BY gopkg.in/spacemonkeygo/dbx.v1
-- DO NOT EDIT
CREATE TABLE accounting_rollups
(
id bigserial NOT NULL,
node_id bytea NOT NULL,
start_time timestamp with time zone NOT NULL,
put_total bigint NOT NULL,
get_total bigint NOT NULL,
get_audit_total bigint NOT NULL,
get_repair_total bigint NOT NULL,
put_repair_total bigint NOT NULL,
at_rest_total double precision NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE accounting_timestamps
(
name text NOT NULL,
value timestamp with time zone NOT NULL,
PRIMARY KEY (name)
);
CREATE TABLE bucket_bandwidth_rollups
(
bucket_name bytea NOT NULL,
project_id bytea NOT NULL,
interval_start timestamp NOT NULL,
interval_seconds integer NOT NULL,
action integer NOT NULL,
inline bigint NOT NULL,
allocated bigint NOT NULL,
settled bigint NOT NULL,
PRIMARY KEY (bucket_name, project_id, interval_start, action)
);
CREATE TABLE bucket_storage_tallies
(
bucket_name bytea NOT NULL,
project_id bytea NOT NULL,
interval_start timestamp NOT NULL,
inline bigint NOT NULL,
remote bigint NOT NULL,
remote_segments_count integer NOT NULL,
inline_segments_count integer NOT NULL,
object_count integer NOT NULL,
metadata_size bigint NOT NULL,
PRIMARY KEY (bucket_name, project_id, interval_start)
);
CREATE TABLE injuredsegments
(
path bytea NOT NULL,
data bytea NOT NULL,
attempted timestamp,
PRIMARY KEY (path)
);
CREATE TABLE irreparabledbs
(
segmentpath bytea NOT NULL,
segmentdetail bytea NOT NULL,
pieces_lost_count bigint NOT NULL,
seg_damaged_unix_sec bigint NOT NULL,
repair_attempt_count bigint NOT NULL,
PRIMARY KEY (segmentpath)
);
CREATE TABLE nodes
(
id bytea NOT NULL,
address text NOT NULL,
last_net text NOT NULL,
protocol integer NOT NULL,
type integer NOT NULL,
email text NOT NULL,
wallet text NOT NULL,
free_bandwidth bigint NOT NULL,
free_disk bigint NOT NULL,
piece_count bigint NOT NULL,
major bigint NOT NULL,
minor bigint NOT NULL,
patch bigint NOT NULL,
hash text NOT NULL,
timestamp timestamp with time zone NOT NULL,
release boolean NOT NULL,
latency_90 bigint NOT NULL,
audit_success_count bigint NOT NULL,
total_audit_count bigint NOT NULL,
uptime_success_count bigint NOT NULL,
total_uptime_count bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
last_contact_success timestamp with time zone NOT NULL,
last_contact_failure timestamp with time zone NOT NULL,
contained boolean NOT NULL,
disqualified timestamp with time zone,
audit_reputation_alpha double precision NOT NULL,
audit_reputation_beta double precision NOT NULL,
uptime_reputation_alpha double precision NOT NULL,
uptime_reputation_beta double precision NOT NULL,
exit_initiated_at timestamp,
exit_loop_completed_at timestamp,
exit_finished_at timestamp,
exit_success boolean NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE offers
(
id serial NOT NULL,
name text NOT NULL,
description text NOT NULL,
award_credit_in_cents integer NOT NULL,
invitee_credit_in_cents integer NOT NULL,
award_credit_duration_days integer,
invitee_credit_duration_days integer,
redeemable_cap integer,
expires_at timestamp with time zone NOT NULL,
created_at timestamp with time zone NOT NULL,
status integer NOT NULL,
type integer NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE peer_identities
(
node_id bytea NOT NULL,
leaf_serial_number bytea NOT NULL,
chain bytea NOT NULL,
updated_at timestamp with time zone NOT NULL,
PRIMARY KEY (node_id)
);
CREATE TABLE pending_audits
(
node_id bytea NOT NULL,
piece_id bytea NOT NULL,
stripe_index bigint NOT NULL,
share_size bigint NOT NULL,
expected_share_hash bytea NOT NULL,
reverify_count bigint NOT NULL,
path bytea NOT NULL,
PRIMARY KEY (node_id)
);
CREATE TABLE projects
(
id bytea NOT NULL,
name text NOT NULL,
description text NOT NULL,
usage_limit bigint NOT NULL,
partner_id bytea,
owner_id bytea NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE registration_tokens
(
secret bytea NOT NULL,
owner_id bytea,
project_limit integer NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY (secret),
UNIQUE (owner_id)
);
CREATE TABLE reset_password_tokens
(
secret bytea NOT NULL,
owner_id bytea NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY (secret),
UNIQUE (owner_id)
);
CREATE TABLE serial_numbers
(
id serial NOT NULL,
serial_number bytea NOT NULL,
bucket_id bytea NOT NULL,
expires_at timestamp NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE storagenode_bandwidth_rollups
(
storagenode_id bytea NOT NULL,
interval_start timestamp NOT NULL,
interval_seconds integer NOT NULL,
action integer NOT NULL,
allocated bigint DEFAULT 0,
settled bigint NOT NULL,
PRIMARY KEY (storagenode_id, interval_start, action)
);
CREATE TABLE storagenode_storage_tallies
(
id bigserial NOT NULL,
node_id bytea NOT NULL,
interval_end_time timestamp with time zone NOT NULL,
data_total double precision NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE users (
id bytea NOT NULL,
email text NOT NULL,
normalized_email text NOT NULL,
full_name text NOT NULL,
short_name text,
password_hash bytea NOT NULL,
status integer NOT NULL,
partner_id bytea,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id )
);
CREATE TABLE value_attributions
(
project_id bytea NOT NULL,
bucket_name bytea NOT NULL,
partner_id bytea NOT NULL,
last_updated timestamp NOT NULL,
PRIMARY KEY (project_id, bucket_name)
);
CREATE TABLE api_keys
(
id bytea NOT NULL,
project_id bytea NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
head bytea NOT NULL,
name text NOT NULL,
secret bytea NOT NULL,
partner_id bytea,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY (id),
UNIQUE (head),
UNIQUE (name, project_id)
);
CREATE TABLE bucket_metainfos
(
id bytea NOT NULL,
project_id bytea NOT NULL REFERENCES projects (id),
name bytea NOT NULL,
partner_id bytea,
path_cipher integer NOT NULL,
created_at timestamp with time zone NOT NULL,
default_segment_size integer NOT NULL,
default_encryption_cipher_suite integer NOT NULL,
default_encryption_block_size integer NOT NULL,
default_redundancy_algorithm integer NOT NULL,
default_redundancy_share_size integer NOT NULL,
default_redundancy_required_shares integer NOT NULL,
default_redundancy_repair_shares integer NOT NULL,
default_redundancy_optimal_shares integer NOT NULL,
default_redundancy_total_shares integer NOT NULL,
PRIMARY KEY (id),
UNIQUE (name, project_id)
);
CREATE TABLE project_invoice_stamps
(
project_id bytea NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
invoice_id bytea NOT NULL,
start_date timestamp with time zone NOT NULL,
end_date timestamp with time zone NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY (project_id, start_date, end_date),
UNIQUE (invoice_id)
);
CREATE TABLE project_members
(
member_id bytea NOT NULL REFERENCES users (id) ON DELETE CASCADE,
project_id bytea NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY (member_id, project_id)
);
CREATE TABLE used_serials
(
serial_number_id integer NOT NULL REFERENCES serial_numbers (id) ON DELETE CASCADE,
storage_node_id bytea NOT NULL,
PRIMARY KEY (serial_number_id, storage_node_id)
);
CREATE TABLE user_credits
(
id serial NOT NULL,
user_id bytea NOT NULL REFERENCES users (id) ON DELETE CASCADE,
offer_id integer NOT NULL REFERENCES offers (id),
referred_by bytea REFERENCES users (id) ON DELETE SET NULL,
type text NOT NULL,
credits_earned_in_cents integer NOT NULL,
credits_used_in_cents integer NOT NULL,
expires_at timestamp with time zone NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY (id),
UNIQUE( id, offer_id )
);
CREATE TABLE graceful_exit_progress
(
node_id bytea NOT NULL,
bytes_transferred bigint NOT NULL,
pieces_transferred bigint NOT NULL,
pieces_failed bigint NOT NULL,
updated_at timestamp NOT NULL,
PRIMARY KEY ( node_id )
);
CREATE TABLE graceful_exit_transfer_queue
(
node_id bytea NOT NULL,
path bytea NOT NULL,
piece_num integer NOT NULL,
root_piece_id bytea,
durability_ratio double precision NOT NULL,
queued_at timestamp NOT NULL,
requested_at timestamp,
last_failed_at timestamp,
last_failed_code integer,
failed_count integer,
finished_at timestamp,
order_limit_send_count integer NOT NULL,
PRIMARY KEY ( node_id, path, piece_num )
);
CREATE TABLE stripe_customers
(
user_id bytea NOT NULL,
customer_id text NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( user_id ),
UNIQUE ( customer_id )
);
CREATE TABLE coinpayments_transactions
(
id text NOT NULL,
user_id bytea NOT NULL,
address text NOT NULL,
amount bytea NOT NULL,
received bytea NOT NULL,
status integer NOT NULL,
key text NOT NULL,
timeout integer NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id )
);
CREATE TABLE stripecoinpayments_apply_balance_intents
(
tx_id text NOT NULL REFERENCES coinpayments_transactions( id ) ON DELETE CASCADE,
state integer NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( tx_id )
);
CREATE TABLE stripecoinpayments_invoice_project_records
(
id bytea NOT NULL,
project_id bytea NOT NULL,
storage double precision NOT NULL,
egress bigint NOT NULL,
objects bigint NOT NULL,
period_start timestamp with time zone NOT NULL,
period_end timestamp with time zone NOT NULL,
state integer NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id ),
UNIQUE ( project_id, period_start, period_end )
);
CREATE TABLE stripecoinpayments_tx_conversion_rates
(
tx_id text NOT NULL,
rate bytea NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( tx_id )
);
CREATE TABLE nodes_offline_times
(
node_id bytea NOT NULL,
tracked_at timestamp with time zone NOT NULL,
seconds integer NOT NULL,
PRIMARY KEY ( node_id, tracked_at )
);
CREATE INDEX bucket_name_project_id_interval_start_interval_seconds ON bucket_bandwidth_rollups ( bucket_name, project_id, interval_start, interval_seconds );
CREATE INDEX node_last_ip ON nodes ( last_net );
CREATE UNIQUE INDEX serial_number ON serial_numbers ( serial_number );
CREATE INDEX serial_numbers_expires_at_index ON serial_numbers ( expires_at );
CREATE INDEX storagenode_id_interval_start_interval_seconds ON storagenode_bandwidth_rollups ( storagenode_id, interval_start, interval_seconds );
CREATE UNIQUE INDEX credits_earned_user_id_offer_id ON user_credits (id, offer_id);
CREATE INDEX nodes_offline_times_node_id_index ON nodes_offline_times ( node_id );
INSERT INTO "accounting_rollups"("id", "node_id", "start_time", "put_total", "get_total", "get_audit_total", "get_repair_total", "put_repair_total", "at_rest_total") VALUES (1, E'\\367M\\177\\251]t/\\022\\256\\214\\265\\025\\224\\204:\\217\\212\\0102<\\321\\374\\020&\\271Qc\\325\\261\\354\\246\\233'::bytea, '2019-02-09 00:00:00+00', 1000, 2000, 3000, 4000, 0, 5000);
INSERT INTO "accounting_timestamps" VALUES ('LastAtRestTally', '0001-01-01 00:00:00+00');
INSERT INTO "accounting_timestamps" VALUES ('LastRollup', '0001-01-01 00:00:00+00');
INSERT INTO "accounting_timestamps" VALUES ('LastBandwidthTally', '0001-01-01 00:00:00+00');
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_bandwidth", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90", "audit_success_count", "total_audit_count", "uptime_success_count", "total_uptime_count", "created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "audit_reputation_alpha", "audit_reputation_beta", "uptime_reputation_alpha", "uptime_reputation_beta", "exit_success") VALUES (E'\\153\\313\\233\\074\\327\\177\\136\\070\\346\\001', '127.0.0.1:55516', '', 0, 4, '', '', -1, -1, 0, 0, 1, 0, '', 'epoch', false, 0, 0, 5, 0, 5, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, 50, 0, 100, 5, false);
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_bandwidth", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90", "audit_success_count", "total_audit_count", "uptime_success_count", "total_uptime_count", "created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "audit_reputation_alpha", "audit_reputation_beta", "uptime_reputation_alpha", "uptime_reputation_beta", "exit_success") VALUES (E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n', '127.0.0.1:55518', '', 0, 4, '', '', -1, -1, 0, 0, 1, 0, '', 'epoch', false, 0, 0, 0, 3, 3, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, 50, 0, 100, 0, false);
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_bandwidth", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90", "audit_success_count", "total_audit_count", "uptime_success_count", "total_uptime_count", "created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "audit_reputation_alpha", "audit_reputation_beta", "uptime_reputation_alpha", "uptime_reputation_beta", "exit_success") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014', '127.0.0.1:55517', '', 0, 4, '', '', -1, -1, 0, 0, 1, 0, '', 'epoch', false, 0, 0, 0, 0, 0, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, 50, 0, 100, 0, false);
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_bandwidth", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90", "audit_success_count", "total_audit_count", "uptime_success_count", "total_uptime_count", "created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "audit_reputation_alpha", "audit_reputation_beta", "uptime_reputation_alpha", "uptime_reputation_beta", "exit_success") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\015', '127.0.0.1:55519', '', 0, 4, '', '', -1, -1, 0, 0, 1, 0, '', 'epoch', false, 0, 1, 2, 1, 2, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, 50, 0, 100, 1, false);
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_bandwidth", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90", "audit_success_count", "total_audit_count", "uptime_success_count", "total_uptime_count", "created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "audit_reputation_alpha", "audit_reputation_beta", "uptime_reputation_alpha", "uptime_reputation_beta", "exit_success") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', '127.0.0.1:55520', '', 0, 4, '', '', -1, -1, 0, 0, 1, 0, '', 'epoch', false, 0, 300, 400, 300, 400, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, 300, 0, 300, 100, false);
INSERT INTO "users"("id", "full_name", "short_name", "email", "normalized_email", "password_hash", "status", "partner_id", "created_at") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 'Noahson', 'William', '1email1@mail.test', '1EMAIL1@MAIL.TEST', E'some_readable_hash'::bytea, 1, NULL, '2019-02-14 08:28:24.614594+00');
INSERT INTO "projects"("id", "name", "description", "usage_limit", "partner_id", "owner_id", "created_at") VALUES (E'\\022\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, 'ProjectName', 'projects description', 0, NULL, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, '2019-02-14 08:28:24.254934+00');
INSERT INTO "projects"("id", "name", "description", "usage_limit", "partner_id", "owner_id", "created_at") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea, 'projName1', 'Test project 1', 0, NULL, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, '2019-02-14 08:28:24.636949+00');
INSERT INTO "project_members"("member_id", "project_id", "created_at") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea, '2019-02-14 08:28:24.677953+00');
INSERT INTO "project_members"("member_id", "project_id", "created_at") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, E'\\022\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, '2019-02-13 08:28:24.677953+00');
INSERT INTO "irreparabledbs" ("segmentpath", "segmentdetail", "pieces_lost_count", "seg_damaged_unix_sec", "repair_attempt_count") VALUES ('\x49616d5365676d656e746b6579696e666f30', '\x49616d5365676d656e7464657461696c696e666f30', 10, 1550159554, 10);
INSERT INTO "injuredsegments" ("path", "data") VALUES ('0', '\x0a0130120100');
INSERT INTO "injuredsegments" ("path", "data") VALUES ('here''s/a/great/path', '\x0a136865726527732f612f67726561742f70617468120a0102030405060708090a');
INSERT INTO "injuredsegments" ("path", "data") VALUES ('yet/another/cool/path', '\x0a157965742f616e6f746865722f636f6f6c2f70617468120a0102030405060708090a');
INSERT INTO "injuredsegments" ("path", "data") VALUES ('so/many/iconic/paths/to/choose/from', '\x0a23736f2f6d616e792f69636f6e69632f70617468732f746f2f63686f6f73652f66726f6d120a0102030405060708090a');
INSERT INTO "registration_tokens" ("secret", "owner_id", "project_limit", "created_at") VALUES (E'\\070\\127\\144\\013\\332\\344\\102\\376\\306\\056\\303\\130\\106\\132\\321\\276\\321\\274\\170\\264\\054\\333\\221\\116\\154\\221\\335\\070\\220\\146\\344\\216'::bytea, null, 1, '2019-02-14 08:28:24.677953+00');
INSERT INTO "serial_numbers" ("id", "serial_number", "bucket_id", "expires_at") VALUES (1, E'0123456701234567'::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014/testbucket'::bytea, '2019-03-06 08:28:24.677953+00');
INSERT INTO "used_serials" ("serial_number_id", "storage_node_id") VALUES (1, E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n');
INSERT INTO "storagenode_bandwidth_rollups" ("storagenode_id", "interval_start", "interval_seconds", "action", "allocated", "settled") VALUES (E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n', '2019-03-06 08:00:00.000000+00', 3600, 1, 1024, 2024);
INSERT INTO "storagenode_storage_tallies" VALUES (1, E'\\3510\\323\\225"~\\036<\\342\\330m\\0253Jhr\\246\\233K\\246#\\2303\\351\\256\\275j\\212UM\\362\\207', '2019-02-14 08:16:57.812849+00', 1000);
INSERT INTO "bucket_bandwidth_rollups" ("bucket_name", "project_id", "interval_start", "interval_seconds", "action", "inline", "allocated", "settled") VALUES (E'testbucket'::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea,'2019-03-06 08:00:00.000000+00', 3600, 1, 1024, 2024, 3024);
INSERT INTO "bucket_storage_tallies" ("bucket_name", "project_id", "interval_start", "inline", "remote", "remote_segments_count", "inline_segments_count", "object_count", "metadata_size") VALUES (E'testbucket'::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea,'2019-03-06 08:00:00.000000+00', 4024, 5024, 0, 0, 0, 0);
INSERT INTO "bucket_bandwidth_rollups" ("bucket_name", "project_id", "interval_start", "interval_seconds", "action", "inline", "allocated", "settled") VALUES (E'testbucket'::bytea, E'\\170\\160\\157\\370\\274\\366\\113\\364\\272\\235\\301\\243\\321\\102\\321\\136'::bytea,'2019-03-06 08:00:00.000000+00', 3600, 1, 1024, 2024, 3024);
INSERT INTO "bucket_storage_tallies" ("bucket_name", "project_id", "interval_start", "inline", "remote", "remote_segments_count", "inline_segments_count", "object_count", "metadata_size") VALUES (E'testbucket'::bytea, E'\\170\\160\\157\\370\\274\\366\\113\\364\\272\\235\\301\\243\\321\\102\\321\\136'::bytea,'2019-03-06 08:00:00.000000+00', 4024, 5024, 0, 0, 0, 0);
INSERT INTO "reset_password_tokens" ("secret", "owner_id", "created_at") VALUES (E'\\070\\127\\144\\013\\332\\344\\102\\376\\306\\056\\303\\130\\106\\132\\321\\276\\321\\274\\170\\264\\054\\333\\221\\116\\154\\221\\335\\070\\220\\146\\344\\216'::bytea, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, '2019-05-08 08:28:24.677953+00');
INSERT INTO "offers" ("id", "name", "description", "award_credit_in_cents", "invitee_credit_in_cents", "expires_at", "created_at", "status", "type", "award_credit_duration_days", "invitee_credit_duration_days") VALUES (1, 'Default referral offer', 'Is active when no other active referral offer', 300, 600, '2119-03-14 08:28:24.636949+00', '2019-07-14 08:28:24.636949+00', 1, 2, 365, 14);
INSERT INTO "offers" ("id", "name", "description", "award_credit_in_cents", "invitee_credit_in_cents", "expires_at", "created_at", "status", "type", "award_credit_duration_days", "invitee_credit_duration_days") VALUES (2, 'Default free credit offer', 'Is active when no active free credit offer', 0, 300, '2119-03-14 08:28:24.636949+00', '2019-07-14 08:28:24.636949+00', 1, 1, NULL, 14);
INSERT INTO "api_keys" ("id", "project_id", "head", "name", "secret", "partner_id", "created_at") VALUES (E'\\334/\\302;\\225\\355O\\323\\276f\\247\\354/6\\241\\033'::bytea, E'\\022\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, E'\\111\\142\\147\\304\\132\\375\\070\\163\\270\\160\\251\\370\\126\\063\\351\\037\\257\\071\\143\\375\\351\\320\\253\\232\\220\\260\\075\\173\\306\\307\\115\\136'::bytea, 'key 2', E'\\254\\011\\315\\333\\273\\365\\001\\071\\024\\154\\253\\332\\301\\216\\361\\074\\221\\367\\251\\231\\274\\333\\300\\367\\001\\272\\327\\111\\315\\123\\042\\016'::bytea, NULL, '2019-02-14 08:28:24.267934+00');
INSERT INTO "project_invoice_stamps" ("project_id", "invoice_id", "start_date", "end_date", "created_at") VALUES (E'\\022\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, E'\\363\\311\\033w\\222\\303,'::bytea, '2019-06-01 08:28:24.267934+00', '2019-06-29 08:28:24.267934+00', '2019-06-01 08:28:24.267934+00');
INSERT INTO "value_attributions" ("project_id", "bucket_name", "partner_id", "last_updated") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, E''::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea,'2019-02-14 08:07:31.028103+00');
INSERT INTO "user_credits" ("id", "user_id", "offer_id", "referred_by", "credits_earned_in_cents", "credits_used_in_cents", "type", "expires_at", "created_at") VALUES (1, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 1, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 200, 0, 'invalid', '2019-10-01 08:28:24.267934+00', '2019-06-01 08:28:24.267934+00');
INSERT INTO "bucket_metainfos" ("id", "project_id", "name", "partner_id", "created_at", "path_cipher", "default_segment_size", "default_encryption_cipher_suite", "default_encryption_block_size", "default_redundancy_algorithm", "default_redundancy_share_size", "default_redundancy_required_shares", "default_redundancy_repair_shares", "default_redundancy_optimal_shares", "default_redundancy_total_shares") VALUES (E'\\334/\\302;\\225\\355O\\323\\276f\\247\\354/6\\241\\033'::bytea, E'\\022\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, E'testbucketuniquename'::bytea, NULL, '2019-06-14 08:28:24.677953+00', 1, 65536, 1, 8192, 1, 4096, 4, 6, 8, 10);
INSERT INTO "pending_audits" ("node_id", "piece_id", "stripe_index", "share_size", "expected_share_hash", "reverify_count", "path") VALUES (E'\\153\\313\\233\\074\\327\\177\\136\\070\\346\\001'::bytea, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 5, 1024, E'\\070\\127\\144\\013\\332\\344\\102\\376\\306\\056\\303\\130\\106\\132\\321\\276\\321\\274\\170\\264\\054\\333\\221\\116\\154\\221\\335\\070\\220\\146\\344\\216'::bytea, 1, 'not null');
INSERT INTO "peer_identities" VALUES (E'\\334/\\302;\\225\\355O\\323\\276f\\247\\354/6\\241\\033'::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, '2019-02-14 08:07:31.335028+00');
INSERT INTO "graceful_exit_progress" ("node_id", "bytes_transferred", "pieces_transferred", "pieces_failed", "updated_at") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', 1000000000000000, 0, 0, '2019-09-12 10:07:31.028103');
INSERT INTO "graceful_exit_transfer_queue" ("node_id", "path", "piece_num", "durability_ratio", "queued_at", "requested_at", "last_failed_at", "last_failed_code", "failed_count", "finished_at", "order_limit_send_count") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', E'f8419768-5baa-4901-b3ba-62808013ec45/s0/test3/\\240\\243\\223n \\334~b}\\2624)\\250m\\201\\202\\235\\276\\361\\3304\\323\\352\\311\\361\\353;\\326\\311', 8, 1.0, '2019-09-12 10:07:31.028103', '2019-09-12 10:07:32.028103', null, null, 0, '2019-09-12 10:07:33.028103', 0);
INSERT INTO "graceful_exit_transfer_queue" ("node_id", "path", "piece_num", "durability_ratio", "queued_at", "requested_at", "last_failed_at", "last_failed_code", "failed_count", "finished_at", "order_limit_send_count") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', E'f8419768-5baa-4901-b3ba-62808013ec45/s0/test3/\\240\\243\\223n \\334~b}\\2624)\\250m\\201\\202\\235\\276\\361\\3304\\323\\352\\311\\361\\353;\\326\\312', 8, 1.0, '2019-09-12 10:07:31.028103', '2019-09-12 10:07:32.028103', null, null, 0, '2019-09-12 10:07:33.028103', 0);
INSERT INTO "stripe_customers" ("user_id", "customer_id", "created_at") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 'stripe_id', '2019-06-01 08:28:24.267934+00');
INSERT INTO "graceful_exit_transfer_queue" ("node_id", "path", "piece_num", "durability_ratio", "queued_at", "requested_at", "last_failed_at", "last_failed_code", "failed_count", "finished_at", "order_limit_send_count") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', E'f8419768-5baa-4901-b3ba-62808013ec45/s0/test3/\\240\\243\\223n \\334~b}\\2624)\\250m\\201\\202\\235\\276\\361\\3304\\323\\352\\311\\361\\353;\\326\\311', 9, 1.0, '2019-09-12 10:07:31.028103', '2019-09-12 10:07:32.028103', null, null, 0, '2019-09-12 10:07:33.028103', 0);
INSERT INTO "graceful_exit_transfer_queue" ("node_id", "path", "piece_num", "durability_ratio", "queued_at", "requested_at", "last_failed_at", "last_failed_code", "failed_count", "finished_at", "order_limit_send_count") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', E'f8419768-5baa-4901-b3ba-62808013ec45/s0/test3/\\240\\243\\223n \\334~b}\\2624)\\250m\\201\\202\\235\\276\\361\\3304\\323\\352\\311\\361\\353;\\326\\312', 9, 1.0, '2019-09-12 10:07:31.028103', '2019-09-12 10:07:32.028103', null, null, 0, '2019-09-12 10:07:33.028103', 0);
INSERT INTO "stripecoinpayments_invoice_project_records"("id", "project_id", "storage", "egress", "objects", "period_start", "period_end", "state", "created_at") VALUES (E'\\022\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, E'\\021\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, 0, 0, 0, '2019-06-01 08:28:24.267934+00', '2019-06-01 08:28:24.267934+00', 0, '2019-06-01 08:28:24.267934+00');
INSERT INTO "graceful_exit_transfer_queue" ("node_id", "path", "piece_num", "root_piece_id", "durability_ratio", "queued_at", "requested_at", "last_failed_at", "last_failed_code", "failed_count", "finished_at", "order_limit_send_count") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', E'f8419768-5baa-4901-b3ba-62808013ec45/s0/test3/\\240\\243\\223n \\334~b}\\2624)\\250m\\201\\202\\235\\276\\361\\3304\\323\\352\\311\\361\\353;\\326\\311', 10, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 1.0, '2019-09-12 10:07:31.028103', '2019-09-12 10:07:32.028103', null, null, 0, '2019-09-12 10:07:33.028103', 0);
INSERT INTO "stripecoinpayments_tx_conversion_rates" ("tx_id", "rate", "created_at") VALUES ('tx_id', E'\\363\\311\\033w\\222\\303Ci,'::bytea, '2019-06-01 08:28:24.267934+00');
INSERT INTO "coinpayments_transactions" ("id", "user_id", "address", "amount", "received", "status", "key", "timeout", "created_at") VALUES ('tx_id', E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 'address', E'\\363\\311\\033w'::bytea, E'\\363\\311\\033w'::bytea, 1, 'key', 60, '2019-06-01 08:28:24.267934+00');
INSERT INTO "stripecoinpayments_apply_balance_intents" ("tx_id", "state", "created_at") VALUES ('tx_id', 0, '2019-06-01 08:28:24.267934+00');
INSERT INTO "nodes_offline_times" ("node_id", "tracked_at", "seconds") VALUES (E'\\153\\313\\233\\074\\327\\177\\136\\070\\346\\001'::bytea, '2019-06-01 09:28:24.267934+00', 3600);
INSERT INTO "nodes_offline_times" ("node_id", "tracked_at", "seconds") VALUES (E'\\153\\313\\233\\074\\327\\177\\136\\070\\346\\001'::bytea, '2017-06-01 09:28:24.267934+00', 100);
INSERT INTO "nodes_offline_times" ("node_id", "tracked_at", "seconds") VALUES (E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n'::bytea, '2019-06-01 09:28:24.267934+00', 3600);
INSERT INTO "storagenode_bandwidth_rollups" ("storagenode_id", "interval_start", "interval_seconds", "action", "settled") VALUES (E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n', '2020-01-11 08:00:00.000000+00', 3600, 1, 2024);
-- NEW DATA --

View File

@ -0,0 +1,476 @@
-- AUTOGENERATED BY gopkg.in/spacemonkeygo/dbx.v1
-- DO NOT EDIT
CREATE TABLE accounting_rollups
(
id bigserial NOT NULL,
node_id bytea NOT NULL,
start_time timestamp with time zone NOT NULL,
put_total bigint NOT NULL,
get_total bigint NOT NULL,
get_audit_total bigint NOT NULL,
get_repair_total bigint NOT NULL,
put_repair_total bigint NOT NULL,
at_rest_total double precision NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE accounting_timestamps
(
name text NOT NULL,
value timestamp with time zone NOT NULL,
PRIMARY KEY (name)
);
CREATE TABLE bucket_bandwidth_rollups
(
bucket_name bytea NOT NULL,
project_id bytea NOT NULL,
interval_start timestamp NOT NULL,
interval_seconds integer NOT NULL,
action integer NOT NULL,
inline bigint NOT NULL,
allocated bigint NOT NULL,
settled bigint NOT NULL,
PRIMARY KEY (bucket_name, project_id, interval_start, action)
);
CREATE TABLE bucket_storage_tallies
(
bucket_name bytea NOT NULL,
project_id bytea NOT NULL,
interval_start timestamp NOT NULL,
inline bigint NOT NULL,
remote bigint NOT NULL,
remote_segments_count integer NOT NULL,
inline_segments_count integer NOT NULL,
object_count integer NOT NULL,
metadata_size bigint NOT NULL,
PRIMARY KEY (bucket_name, project_id, interval_start)
);
CREATE TABLE injuredsegments
(
path bytea NOT NULL,
data bytea NOT NULL,
attempted timestamp,
PRIMARY KEY (path)
);
CREATE TABLE irreparabledbs
(
segmentpath bytea NOT NULL,
segmentdetail bytea NOT NULL,
pieces_lost_count bigint NOT NULL,
seg_damaged_unix_sec bigint NOT NULL,
repair_attempt_count bigint NOT NULL,
PRIMARY KEY (segmentpath)
);
CREATE TABLE nodes
(
id bytea NOT NULL,
address text NOT NULL,
last_net text NOT NULL,
protocol integer NOT NULL,
type integer NOT NULL,
email text NOT NULL,
wallet text NOT NULL,
free_bandwidth bigint NOT NULL,
free_disk bigint NOT NULL,
piece_count bigint NOT NULL,
major bigint NOT NULL,
minor bigint NOT NULL,
patch bigint NOT NULL,
hash text NOT NULL,
timestamp timestamp with time zone NOT NULL,
release boolean NOT NULL,
latency_90 bigint NOT NULL,
audit_success_count bigint NOT NULL,
total_audit_count bigint NOT NULL,
uptime_success_count bigint NOT NULL,
total_uptime_count bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
last_contact_success timestamp with time zone NOT NULL,
last_contact_failure timestamp with time zone NOT NULL,
contained boolean NOT NULL,
disqualified timestamp with time zone,
audit_reputation_alpha double precision NOT NULL,
audit_reputation_beta double precision NOT NULL,
uptime_reputation_alpha double precision NOT NULL,
uptime_reputation_beta double precision NOT NULL,
exit_initiated_at timestamp,
exit_loop_completed_at timestamp,
exit_finished_at timestamp,
exit_success boolean NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE offers
(
id serial NOT NULL,
name text NOT NULL,
description text NOT NULL,
award_credit_in_cents integer NOT NULL,
invitee_credit_in_cents integer NOT NULL,
award_credit_duration_days integer,
invitee_credit_duration_days integer,
redeemable_cap integer,
expires_at timestamp with time zone NOT NULL,
created_at timestamp with time zone NOT NULL,
status integer NOT NULL,
type integer NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE peer_identities
(
node_id bytea NOT NULL,
leaf_serial_number bytea NOT NULL,
chain bytea NOT NULL,
updated_at timestamp with time zone NOT NULL,
PRIMARY KEY (node_id)
);
CREATE TABLE pending_audits
(
node_id bytea NOT NULL,
piece_id bytea NOT NULL,
stripe_index bigint NOT NULL,
share_size bigint NOT NULL,
expected_share_hash bytea NOT NULL,
reverify_count bigint NOT NULL,
path bytea NOT NULL,
PRIMARY KEY (node_id)
);
CREATE TABLE projects
(
id bytea NOT NULL,
name text NOT NULL,
description text NOT NULL,
usage_limit bigint NOT NULL,
partner_id bytea,
owner_id bytea NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE registration_tokens
(
secret bytea NOT NULL,
owner_id bytea,
project_limit integer NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY (secret),
UNIQUE (owner_id)
);
CREATE TABLE reset_password_tokens
(
secret bytea NOT NULL,
owner_id bytea NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY (secret),
UNIQUE (owner_id)
);
CREATE TABLE serial_numbers
(
id serial NOT NULL,
serial_number bytea NOT NULL,
bucket_id bytea NOT NULL,
expires_at timestamp NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE storagenode_bandwidth_rollups
(
storagenode_id bytea NOT NULL,
interval_start timestamp NOT NULL,
interval_seconds integer NOT NULL,
action integer NOT NULL,
allocated bigint DEFAULT 0,
settled bigint NOT NULL,
PRIMARY KEY (storagenode_id, interval_start, action)
);
CREATE TABLE storagenode_storage_tallies
(
id bigserial NOT NULL,
node_id bytea NOT NULL,
interval_end_time timestamp with time zone NOT NULL,
data_total double precision NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE users (
id bytea NOT NULL,
email text NOT NULL,
normalized_email text NOT NULL,
full_name text NOT NULL,
short_name text,
password_hash bytea NOT NULL,
status integer NOT NULL,
partner_id bytea,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id )
);
CREATE TABLE value_attributions
(
project_id bytea NOT NULL,
bucket_name bytea NOT NULL,
partner_id bytea NOT NULL,
last_updated timestamp NOT NULL,
PRIMARY KEY (project_id, bucket_name)
);
CREATE TABLE api_keys
(
id bytea NOT NULL,
project_id bytea NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
head bytea NOT NULL,
name text NOT NULL,
secret bytea NOT NULL,
partner_id bytea,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY (id),
UNIQUE (head),
UNIQUE (name, project_id)
);
CREATE TABLE bucket_metainfos
(
id bytea NOT NULL,
project_id bytea NOT NULL REFERENCES projects (id),
name bytea NOT NULL,
partner_id bytea,
path_cipher integer NOT NULL,
created_at timestamp with time zone NOT NULL,
default_segment_size integer NOT NULL,
default_encryption_cipher_suite integer NOT NULL,
default_encryption_block_size integer NOT NULL,
default_redundancy_algorithm integer NOT NULL,
default_redundancy_share_size integer NOT NULL,
default_redundancy_required_shares integer NOT NULL,
default_redundancy_repair_shares integer NOT NULL,
default_redundancy_optimal_shares integer NOT NULL,
default_redundancy_total_shares integer NOT NULL,
PRIMARY KEY (id),
UNIQUE (name, project_id)
);
CREATE TABLE project_invoice_stamps
(
project_id bytea NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
invoice_id bytea NOT NULL,
start_date timestamp with time zone NOT NULL,
end_date timestamp with time zone NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY (project_id, start_date, end_date),
UNIQUE (invoice_id)
);
CREATE TABLE project_members
(
member_id bytea NOT NULL REFERENCES users (id) ON DELETE CASCADE,
project_id bytea NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY (member_id, project_id)
);
CREATE TABLE used_serials
(
serial_number_id integer NOT NULL REFERENCES serial_numbers (id) ON DELETE CASCADE,
storage_node_id bytea NOT NULL,
PRIMARY KEY (serial_number_id, storage_node_id)
);
CREATE TABLE user_credits
(
id serial NOT NULL,
user_id bytea NOT NULL REFERENCES users (id) ON DELETE CASCADE,
offer_id integer NOT NULL REFERENCES offers (id),
referred_by bytea REFERENCES users (id) ON DELETE SET NULL,
type text NOT NULL,
credits_earned_in_cents integer NOT NULL,
credits_used_in_cents integer NOT NULL,
expires_at timestamp with time zone NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY (id),
UNIQUE( id, offer_id )
);
CREATE TABLE graceful_exit_progress
(
node_id bytea NOT NULL,
bytes_transferred bigint NOT NULL,
pieces_transferred bigint NOT NULL,
pieces_failed bigint NOT NULL,
updated_at timestamp NOT NULL,
PRIMARY KEY ( node_id )
);
CREATE TABLE graceful_exit_transfer_queue
(
node_id bytea NOT NULL,
path bytea NOT NULL,
piece_num integer NOT NULL,
root_piece_id bytea,
durability_ratio double precision NOT NULL,
queued_at timestamp NOT NULL,
requested_at timestamp,
last_failed_at timestamp,
last_failed_code integer,
failed_count integer,
finished_at timestamp,
order_limit_send_count integer NOT NULL,
PRIMARY KEY ( node_id, path, piece_num )
);
CREATE TABLE stripe_customers
(
user_id bytea NOT NULL,
customer_id text NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( user_id ),
UNIQUE ( customer_id )
);
CREATE TABLE coinpayments_transactions
(
id text NOT NULL,
user_id bytea NOT NULL,
address text NOT NULL,
amount bytea NOT NULL,
received bytea NOT NULL,
status integer NOT NULL,
key text NOT NULL,
timeout integer NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id )
);
CREATE TABLE stripecoinpayments_apply_balance_intents
(
tx_id text NOT NULL REFERENCES coinpayments_transactions( id ) ON DELETE CASCADE,
state integer NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( tx_id )
);
CREATE TABLE stripecoinpayments_invoice_project_records
(
id bytea NOT NULL,
project_id bytea NOT NULL,
storage double precision NOT NULL,
egress bigint NOT NULL,
objects bigint NOT NULL,
period_start timestamp with time zone NOT NULL,
period_end timestamp with time zone NOT NULL,
state integer NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id ),
UNIQUE ( project_id, period_start, period_end )
);
CREATE TABLE stripecoinpayments_tx_conversion_rates
(
tx_id text NOT NULL,
rate bytea NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( tx_id )
);
CREATE TABLE nodes_offline_times
(
node_id bytea NOT NULL,
tracked_at timestamp with time zone NOT NULL,
seconds integer NOT NULL,
PRIMARY KEY ( node_id, tracked_at )
);
CREATE TABLE coupons
(
id bytea NOT NULL,
project_id bytea NOT NULL,
user_id bytea NOT NULL,
amount bigint NOT NULL,
description text NOT NULL,
type integer NOT NULL,
status integer NOT NULL,
duration bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
PRIMARY KEY ( id )
);
CREATE TABLE coupon_usages
(
coupon_id bytea NOT NULL,
amount bigint NOT NULL,
status integer NOT NULL,
period timestamp with time zone NOT NULL,
PRIMARY KEY ( coupon_id, period )
);
CREATE INDEX bucket_name_project_id_interval_start_interval_seconds ON bucket_bandwidth_rollups ( bucket_name, project_id, interval_start, interval_seconds );
CREATE INDEX node_last_ip ON nodes ( last_net );
CREATE UNIQUE INDEX serial_number ON serial_numbers ( serial_number );
CREATE INDEX serial_numbers_expires_at_index ON serial_numbers ( expires_at );
CREATE INDEX storagenode_id_interval_start_interval_seconds ON storagenode_bandwidth_rollups ( storagenode_id, interval_start, interval_seconds );
CREATE UNIQUE INDEX credits_earned_user_id_offer_id ON user_credits (id, offer_id);
CREATE INDEX nodes_offline_times_node_id_index ON nodes_offline_times ( node_id );
INSERT INTO "accounting_rollups"("id", "node_id", "start_time", "put_total", "get_total", "get_audit_total", "get_repair_total", "put_repair_total", "at_rest_total") VALUES (1, E'\\367M\\177\\251]t/\\022\\256\\214\\265\\025\\224\\204:\\217\\212\\0102<\\321\\374\\020&\\271Qc\\325\\261\\354\\246\\233'::bytea, '2019-02-09 00:00:00+00', 1000, 2000, 3000, 4000, 0, 5000);
INSERT INTO "accounting_timestamps" VALUES ('LastAtRestTally', '0001-01-01 00:00:00+00');
INSERT INTO "accounting_timestamps" VALUES ('LastRollup', '0001-01-01 00:00:00+00');
INSERT INTO "accounting_timestamps" VALUES ('LastBandwidthTally', '0001-01-01 00:00:00+00');
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_bandwidth", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90", "audit_success_count", "total_audit_count", "uptime_success_count", "total_uptime_count", "created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "audit_reputation_alpha", "audit_reputation_beta", "uptime_reputation_alpha", "uptime_reputation_beta", "exit_success") VALUES (E'\\153\\313\\233\\074\\327\\177\\136\\070\\346\\001', '127.0.0.1:55516', '', 0, 4, '', '', -1, -1, 0, 0, 1, 0, '', 'epoch', false, 0, 0, 5, 0, 5, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, 50, 0, 100, 5, false);
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_bandwidth", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90", "audit_success_count", "total_audit_count", "uptime_success_count", "total_uptime_count", "created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "audit_reputation_alpha", "audit_reputation_beta", "uptime_reputation_alpha", "uptime_reputation_beta", "exit_success") VALUES (E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n', '127.0.0.1:55518', '', 0, 4, '', '', -1, -1, 0, 0, 1, 0, '', 'epoch', false, 0, 0, 0, 3, 3, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, 50, 0, 100, 0, false);
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_bandwidth", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90", "audit_success_count", "total_audit_count", "uptime_success_count", "total_uptime_count", "created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "audit_reputation_alpha", "audit_reputation_beta", "uptime_reputation_alpha", "uptime_reputation_beta", "exit_success") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014', '127.0.0.1:55517', '', 0, 4, '', '', -1, -1, 0, 0, 1, 0, '', 'epoch', false, 0, 0, 0, 0, 0, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, 50, 0, 100, 0, false);
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_bandwidth", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90", "audit_success_count", "total_audit_count", "uptime_success_count", "total_uptime_count", "created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "audit_reputation_alpha", "audit_reputation_beta", "uptime_reputation_alpha", "uptime_reputation_beta", "exit_success") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\015', '127.0.0.1:55519', '', 0, 4, '', '', -1, -1, 0, 0, 1, 0, '', 'epoch', false, 0, 1, 2, 1, 2, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, 50, 0, 100, 1, false);
INSERT INTO "nodes"("id", "address", "last_net", "protocol", "type", "email", "wallet", "free_bandwidth", "free_disk", "piece_count", "major", "minor", "patch", "hash", "timestamp", "release","latency_90", "audit_success_count", "total_audit_count", "uptime_success_count", "total_uptime_count", "created_at", "updated_at", "last_contact_success", "last_contact_failure", "contained", "disqualified", "audit_reputation_alpha", "audit_reputation_beta", "uptime_reputation_alpha", "uptime_reputation_beta", "exit_success") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', '127.0.0.1:55520', '', 0, 4, '', '', -1, -1, 0, 0, 1, 0, '', 'epoch', false, 0, 300, 400, 300, 400, '2019-02-14 08:07:31.028103+00', '2019-02-14 08:07:31.108963+00', 'epoch', 'epoch', false, NULL, 300, 0, 300, 100, false);
INSERT INTO "users"("id", "full_name", "short_name", "email", "normalized_email", "password_hash", "status", "partner_id", "created_at") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 'Noahson', 'William', '1email1@mail.test', '1EMAIL1@MAIL.TEST', E'some_readable_hash'::bytea, 1, NULL, '2019-02-14 08:28:24.614594+00');
INSERT INTO "projects"("id", "name", "description", "usage_limit", "partner_id", "owner_id", "created_at") VALUES (E'\\022\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, 'ProjectName', 'projects description', 0, NULL, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, '2019-02-14 08:28:24.254934+00');
INSERT INTO "projects"("id", "name", "description", "usage_limit", "partner_id", "owner_id", "created_at") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea, 'projName1', 'Test project 1', 0, NULL, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, '2019-02-14 08:28:24.636949+00');
INSERT INTO "project_members"("member_id", "project_id", "created_at") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea, '2019-02-14 08:28:24.677953+00');
INSERT INTO "project_members"("member_id", "project_id", "created_at") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, E'\\022\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, '2019-02-13 08:28:24.677953+00');
INSERT INTO "irreparabledbs" ("segmentpath", "segmentdetail", "pieces_lost_count", "seg_damaged_unix_sec", "repair_attempt_count") VALUES ('\x49616d5365676d656e746b6579696e666f30', '\x49616d5365676d656e7464657461696c696e666f30', 10, 1550159554, 10);
INSERT INTO "injuredsegments" ("path", "data") VALUES ('0', '\x0a0130120100');
INSERT INTO "injuredsegments" ("path", "data") VALUES ('here''s/a/great/path', '\x0a136865726527732f612f67726561742f70617468120a0102030405060708090a');
INSERT INTO "injuredsegments" ("path", "data") VALUES ('yet/another/cool/path', '\x0a157965742f616e6f746865722f636f6f6c2f70617468120a0102030405060708090a');
INSERT INTO "injuredsegments" ("path", "data") VALUES ('so/many/iconic/paths/to/choose/from', '\x0a23736f2f6d616e792f69636f6e69632f70617468732f746f2f63686f6f73652f66726f6d120a0102030405060708090a');
INSERT INTO "registration_tokens" ("secret", "owner_id", "project_limit", "created_at") VALUES (E'\\070\\127\\144\\013\\332\\344\\102\\376\\306\\056\\303\\130\\106\\132\\321\\276\\321\\274\\170\\264\\054\\333\\221\\116\\154\\221\\335\\070\\220\\146\\344\\216'::bytea, null, 1, '2019-02-14 08:28:24.677953+00');
INSERT INTO "serial_numbers" ("id", "serial_number", "bucket_id", "expires_at") VALUES (1, E'0123456701234567'::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014/testbucket'::bytea, '2019-03-06 08:28:24.677953+00');
INSERT INTO "used_serials" ("serial_number_id", "storage_node_id") VALUES (1, E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n');
INSERT INTO "storagenode_bandwidth_rollups" ("storagenode_id", "interval_start", "interval_seconds", "action", "allocated", "settled") VALUES (E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n', '2019-03-06 08:00:00.000000+00', 3600, 1, 1024, 2024);
INSERT INTO "storagenode_storage_tallies" VALUES (1, E'\\3510\\323\\225"~\\036<\\342\\330m\\0253Jhr\\246\\233K\\246#\\2303\\351\\256\\275j\\212UM\\362\\207', '2019-02-14 08:16:57.812849+00', 1000);
INSERT INTO "bucket_bandwidth_rollups" ("bucket_name", "project_id", "interval_start", "interval_seconds", "action", "inline", "allocated", "settled") VALUES (E'testbucket'::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea,'2019-03-06 08:00:00.000000+00', 3600, 1, 1024, 2024, 3024);
INSERT INTO "bucket_storage_tallies" ("bucket_name", "project_id", "interval_start", "inline", "remote", "remote_segments_count", "inline_segments_count", "object_count", "metadata_size") VALUES (E'testbucket'::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea,'2019-03-06 08:00:00.000000+00', 4024, 5024, 0, 0, 0, 0);
INSERT INTO "bucket_bandwidth_rollups" ("bucket_name", "project_id", "interval_start", "interval_seconds", "action", "inline", "allocated", "settled") VALUES (E'testbucket'::bytea, E'\\170\\160\\157\\370\\274\\366\\113\\364\\272\\235\\301\\243\\321\\102\\321\\136'::bytea,'2019-03-06 08:00:00.000000+00', 3600, 1, 1024, 2024, 3024);
INSERT INTO "bucket_storage_tallies" ("bucket_name", "project_id", "interval_start", "inline", "remote", "remote_segments_count", "inline_segments_count", "object_count", "metadata_size") VALUES (E'testbucket'::bytea, E'\\170\\160\\157\\370\\274\\366\\113\\364\\272\\235\\301\\243\\321\\102\\321\\136'::bytea,'2019-03-06 08:00:00.000000+00', 4024, 5024, 0, 0, 0, 0);
INSERT INTO "reset_password_tokens" ("secret", "owner_id", "created_at") VALUES (E'\\070\\127\\144\\013\\332\\344\\102\\376\\306\\056\\303\\130\\106\\132\\321\\276\\321\\274\\170\\264\\054\\333\\221\\116\\154\\221\\335\\070\\220\\146\\344\\216'::bytea, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, '2019-05-08 08:28:24.677953+00');
INSERT INTO "offers" ("id", "name", "description", "award_credit_in_cents", "invitee_credit_in_cents", "expires_at", "created_at", "status", "type", "award_credit_duration_days", "invitee_credit_duration_days") VALUES (1, 'Default referral offer', 'Is active when no other active referral offer', 300, 600, '2119-03-14 08:28:24.636949+00', '2019-07-14 08:28:24.636949+00', 1, 2, 365, 14);
INSERT INTO "offers" ("id", "name", "description", "award_credit_in_cents", "invitee_credit_in_cents", "expires_at", "created_at", "status", "type", "award_credit_duration_days", "invitee_credit_duration_days") VALUES (2, 'Default free credit offer', 'Is active when no active free credit offer', 0, 300, '2119-03-14 08:28:24.636949+00', '2019-07-14 08:28:24.636949+00', 1, 1, NULL, 14);
INSERT INTO "api_keys" ("id", "project_id", "head", "name", "secret", "partner_id", "created_at") VALUES (E'\\334/\\302;\\225\\355O\\323\\276f\\247\\354/6\\241\\033'::bytea, E'\\022\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, E'\\111\\142\\147\\304\\132\\375\\070\\163\\270\\160\\251\\370\\126\\063\\351\\037\\257\\071\\143\\375\\351\\320\\253\\232\\220\\260\\075\\173\\306\\307\\115\\136'::bytea, 'key 2', E'\\254\\011\\315\\333\\273\\365\\001\\071\\024\\154\\253\\332\\301\\216\\361\\074\\221\\367\\251\\231\\274\\333\\300\\367\\001\\272\\327\\111\\315\\123\\042\\016'::bytea, NULL, '2019-02-14 08:28:24.267934+00');
INSERT INTO "project_invoice_stamps" ("project_id", "invoice_id", "start_date", "end_date", "created_at") VALUES (E'\\022\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, E'\\363\\311\\033w\\222\\303,'::bytea, '2019-06-01 08:28:24.267934+00', '2019-06-29 08:28:24.267934+00', '2019-06-01 08:28:24.267934+00');
INSERT INTO "value_attributions" ("project_id", "bucket_name", "partner_id", "last_updated") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, E''::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea,'2019-02-14 08:07:31.028103+00');
INSERT INTO "user_credits" ("id", "user_id", "offer_id", "referred_by", "credits_earned_in_cents", "credits_used_in_cents", "type", "expires_at", "created_at") VALUES (1, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 1, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 200, 0, 'invalid', '2019-10-01 08:28:24.267934+00', '2019-06-01 08:28:24.267934+00');
INSERT INTO "bucket_metainfos" ("id", "project_id", "name", "partner_id", "created_at", "path_cipher", "default_segment_size", "default_encryption_cipher_suite", "default_encryption_block_size", "default_redundancy_algorithm", "default_redundancy_share_size", "default_redundancy_required_shares", "default_redundancy_repair_shares", "default_redundancy_optimal_shares", "default_redundancy_total_shares") VALUES (E'\\334/\\302;\\225\\355O\\323\\276f\\247\\354/6\\241\\033'::bytea, E'\\022\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, E'testbucketuniquename'::bytea, NULL, '2019-06-14 08:28:24.677953+00', 1, 65536, 1, 8192, 1, 4096, 4, 6, 8, 10);
INSERT INTO "pending_audits" ("node_id", "piece_id", "stripe_index", "share_size", "expected_share_hash", "reverify_count", "path") VALUES (E'\\153\\313\\233\\074\\327\\177\\136\\070\\346\\001'::bytea, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 5, 1024, E'\\070\\127\\144\\013\\332\\344\\102\\376\\306\\056\\303\\130\\106\\132\\321\\276\\321\\274\\170\\264\\054\\333\\221\\116\\154\\221\\335\\070\\220\\146\\344\\216'::bytea, 1, 'not null');
INSERT INTO "peer_identities" VALUES (E'\\334/\\302;\\225\\355O\\323\\276f\\247\\354/6\\241\\033'::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, '2019-02-14 08:07:31.335028+00');
INSERT INTO "graceful_exit_progress" ("node_id", "bytes_transferred", "pieces_transferred", "pieces_failed", "updated_at") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', 1000000000000000, 0, 0, '2019-09-12 10:07:31.028103');
INSERT INTO "graceful_exit_transfer_queue" ("node_id", "path", "piece_num", "durability_ratio", "queued_at", "requested_at", "last_failed_at", "last_failed_code", "failed_count", "finished_at", "order_limit_send_count") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', E'f8419768-5baa-4901-b3ba-62808013ec45/s0/test3/\\240\\243\\223n \\334~b}\\2624)\\250m\\201\\202\\235\\276\\361\\3304\\323\\352\\311\\361\\353;\\326\\311', 8, 1.0, '2019-09-12 10:07:31.028103', '2019-09-12 10:07:32.028103', null, null, 0, '2019-09-12 10:07:33.028103', 0);
INSERT INTO "graceful_exit_transfer_queue" ("node_id", "path", "piece_num", "durability_ratio", "queued_at", "requested_at", "last_failed_at", "last_failed_code", "failed_count", "finished_at", "order_limit_send_count") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', E'f8419768-5baa-4901-b3ba-62808013ec45/s0/test3/\\240\\243\\223n \\334~b}\\2624)\\250m\\201\\202\\235\\276\\361\\3304\\323\\352\\311\\361\\353;\\326\\312', 8, 1.0, '2019-09-12 10:07:31.028103', '2019-09-12 10:07:32.028103', null, null, 0, '2019-09-12 10:07:33.028103', 0);
INSERT INTO "stripe_customers" ("user_id", "customer_id", "created_at") VALUES (E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 'stripe_id', '2019-06-01 08:28:24.267934+00');
INSERT INTO "graceful_exit_transfer_queue" ("node_id", "path", "piece_num", "durability_ratio", "queued_at", "requested_at", "last_failed_at", "last_failed_code", "failed_count", "finished_at", "order_limit_send_count") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', E'f8419768-5baa-4901-b3ba-62808013ec45/s0/test3/\\240\\243\\223n \\334~b}\\2624)\\250m\\201\\202\\235\\276\\361\\3304\\323\\352\\311\\361\\353;\\326\\311', 9, 1.0, '2019-09-12 10:07:31.028103', '2019-09-12 10:07:32.028103', null, null, 0, '2019-09-12 10:07:33.028103', 0);
INSERT INTO "graceful_exit_transfer_queue" ("node_id", "path", "piece_num", "durability_ratio", "queued_at", "requested_at", "last_failed_at", "last_failed_code", "failed_count", "finished_at", "order_limit_send_count") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', E'f8419768-5baa-4901-b3ba-62808013ec45/s0/test3/\\240\\243\\223n \\334~b}\\2624)\\250m\\201\\202\\235\\276\\361\\3304\\323\\352\\311\\361\\353;\\326\\312', 9, 1.0, '2019-09-12 10:07:31.028103', '2019-09-12 10:07:32.028103', null, null, 0, '2019-09-12 10:07:33.028103', 0);
INSERT INTO "stripecoinpayments_invoice_project_records"("id", "project_id", "storage", "egress", "objects", "period_start", "period_end", "state", "created_at") VALUES (E'\\022\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, E'\\021\\217/\\014\\376!K\\023\\276\\031\\311}m\\236\\205\\300'::bytea, 0, 0, 0, '2019-06-01 08:28:24.267934+00', '2019-06-01 08:28:24.267934+00', 0, '2019-06-01 08:28:24.267934+00');
INSERT INTO "graceful_exit_transfer_queue" ("node_id", "path", "piece_num", "root_piece_id", "durability_ratio", "queued_at", "requested_at", "last_failed_at", "last_failed_code", "failed_count", "finished_at", "order_limit_send_count") VALUES (E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\016', E'f8419768-5baa-4901-b3ba-62808013ec45/s0/test3/\\240\\243\\223n \\334~b}\\2624)\\250m\\201\\202\\235\\276\\361\\3304\\323\\352\\311\\361\\353;\\326\\311', 10, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 1.0, '2019-09-12 10:07:31.028103', '2019-09-12 10:07:32.028103', null, null, 0, '2019-09-12 10:07:33.028103', 0);
INSERT INTO "stripecoinpayments_tx_conversion_rates" ("tx_id", "rate", "created_at") VALUES ('tx_id', E'\\363\\311\\033w\\222\\303Ci,'::bytea, '2019-06-01 08:28:24.267934+00');
INSERT INTO "coinpayments_transactions" ("id", "user_id", "address", "amount", "received", "status", "key", "timeout", "created_at") VALUES ('tx_id', E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 'address', E'\\363\\311\\033w'::bytea, E'\\363\\311\\033w'::bytea, 1, 'key', 60, '2019-06-01 08:28:24.267934+00');
INSERT INTO "stripecoinpayments_apply_balance_intents" ("tx_id", "state", "created_at") VALUES ('tx_id', 0, '2019-06-01 08:28:24.267934+00');
INSERT INTO "nodes_offline_times" ("node_id", "tracked_at", "seconds") VALUES (E'\\153\\313\\233\\074\\327\\177\\136\\070\\346\\001'::bytea, '2019-06-01 09:28:24.267934+00', 3600);
INSERT INTO "nodes_offline_times" ("node_id", "tracked_at", "seconds") VALUES (E'\\153\\313\\233\\074\\327\\177\\136\\070\\346\\001'::bytea, '2017-06-01 09:28:24.267934+00', 100);
INSERT INTO "nodes_offline_times" ("node_id", "tracked_at", "seconds") VALUES (E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n'::bytea, '2019-06-01 09:28:24.267934+00', 3600);
INSERT INTO "storagenode_bandwidth_rollups" ("storagenode_id", "interval_start", "interval_seconds", "action", "settled") VALUES (E'\\006\\223\\250R\\221\\005\\365\\377v>0\\266\\365\\216\\255?\\347\\244\\371?2\\264\\262\\230\\007<\\001\\262\\263\\237\\247n', '2020-01-11 08:00:00.000000+00', 3600, 1, 2024);
-- NEW DATA --
INSERT INTO "coupons" ("id", "project_id", "user_id", "amount", "description", "type", "status", "duration", "created_at") VALUES (E'\\362\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea, E'\\363\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea, E'\\363\\311\\033w\\222\\303Ci\\265\\343U\\303\\312\\204",'::bytea, 50, 'description', 0, 0, 2, '2019-06-01 08:28:24.267934+00');
INSERT INTO "coupon_usages" ("coupon_id", "amount", "status", "period") VALUES (E'\\362\\342\\363\\371>+F\\256\\263\\300\\273|\\342N\\347\\014'::bytea, 22, 0, '2019-06-01 09:28:24.267934+00');

View File

@ -74,8 +74,7 @@ export function makePaymentsModule(api: PaymentsApi): StoreModule<PaymentsState>
state: new PaymentsState(),
mutations: {
[SET_BALANCE](state: PaymentsState, balance: number): void {
// we need -1 multiplication because negative balance from server is credits
state.balance = balance * -1;
state.balance = balance;
},
[SET_CREDIT_CARDS](state: PaymentsState, creditCards: CreditCard[]): void {
state.creditCards = creditCards;