ci: use external repository

Change-Id: If26a005df45f6067240511d603fb4dd613f92b79
This commit is contained in:
Egon Elbre 2019-12-16 17:25:12 +02:00 committed by Stefan Benten
parent 389d1821ea
commit ef8bc88328
18 changed files with 20 additions and 1874 deletions

View File

@ -1,9 +1,9 @@
pipeline {
agent {
dockerfile {
filename 'Dockerfile.jenkins'
args '-u root:root --cap-add SYS_PTRACE -v "/tmp/gomod":/go/pkg/mod -v "/tmp/npm":/tmp/npm'
docker {
label 'main'
image docker.build("storj-ci", "https://github.com/storj/ci.git").id
args '-u root:root --cap-add SYS_PTRACE -v "/tmp/gomod":/go/pkg/mod -v "/tmp/npm":/tmp/npm'
}
}
options {
@ -40,18 +40,18 @@ pipeline {
parallel {
stage('Lint') {
steps {
sh 'go run ./scripts/check-copyright.go'
sh 'go run ./scripts/check-large-files.go'
sh 'go run ./scripts/check-imports.go -race ./...'
sh 'go run ./scripts/check-peer-constraints.go -race'
sh 'go run ./scripts/protobuf.go --protoc=$HOME/protoc/bin/protoc lint'
sh 'go run ./scripts/protobuf.go --protoc=$HOME/protoc/bin/protoc check-lock'
sh 'go run ./scripts/atomicalign.go ./...'
sh 'go run ./scripts/check-errs.go ./...'
sh 'check-copyright'
sh 'check-large-files'
sh 'check-imports -race ./...'
sh 'check-peer-constraints -race'
sh 'storj-protobuf --protoc=$HOME/protoc/bin/protoc lint'
sh 'storj-protobuf --protoc=$HOME/protoc/bin/protoc check-lock'
sh 'check-atomic-align ./...'
sh 'check-errs ./...'
sh 'bash ./scripts/check-dbx-version.sh'
sh 'staticcheck ./...'
sh 'golangci-lint -j=2 run'
sh 'go run scripts/check-mod-tidy.go -mod .build/go.mod.orig'
sh 'golangci-lint --config /go/ci/.golangci.yml -j=2 run'
sh 'check-mod-tidy -mod .build/go.mod.orig'
sh 'make check-satellite-config-lock'
sh 'make check-monitoring'
}
@ -66,9 +66,9 @@ pipeline {
steps {
sh 'cockroach sql --insecure --host=localhost:26257 -e \'create database testcockroach;\''
sh 'psql -U postgres -c \'create database teststorj;\''
sh 'go run scripts/use-ports.go -from 1024 -to 10000 &'
sh 'go test -parallel 4 -p 6 -vet=off $COVERFLAGS -timeout 20m -json -race ./... 2>&1 | tee .build/tests.json | go run ./scripts/xunit.go -out .build/tests.xml'
sh 'go run scripts/check-clean-directory.go'
sh 'use-ports -from 1024 -to 10000 &'
sh 'go test -parallel 4 -p 6 -vet=off $COVERFLAGS -timeout 20m -json -race ./... 2>&1 | tee .build/tests.json | xunit -out .build/tests.xml'
sh 'check-clean-directory'
}
post {
@ -79,7 +79,7 @@ pipeline {
script {
if(fileExists(".build/coverprofile")){
sh script: 'go run ./scripts/cover-remove-generated.go < .build/coverprofile > .build/clean.coverprofile', returnStatus: true
sh script: 'filter-cover-profile < .build/coverprofile > .build/clean.coverprofile', returnStatus: true
sh script: 'gocov convert .build/clean.coverprofile > .build/cover.json', returnStatus: true
sh script: 'gocov-xml < .build/cover.json > .build/cobertura.xml', returnStatus: true
cobertura coberturaReportFile: '.build/cobertura.xml'

View File

@ -51,15 +51,10 @@ build-dev-deps: ## Install dependencies for builds
curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s -- -b ${GOPATH}/bin v1.21.0
.PHONY: lint
lint: check-copyrights ## Analyze and find programs in source code
lint: ## Analyze and find programs in source code
@echo "Running ${@}"
@golangci-lint run
.PHONY: check-copyrights
check-copyrights: ## Check source files for copyright headers
@echo "Running ${@}"
@go run ./scripts/check-copyright.go
.PHONY: goimports-fix
goimports-fix: ## Applies goimports to every go file (excluding vendored files)
goimports -w -local storj.io $$(find . -type f -name '*.go' -not -path "*/vendor/*")
@ -155,8 +150,8 @@ test-sim-backwards-compatible: ## Test uploading a file with lastest release (je
.PHONY: check-monitoring
check-monitoring: ## Check for locked monkit calls that have changed
@echo "Running ${@}"
@go run ./scripts/check-monitoring.go | diff -U0 ./monkit.lock - \
|| (echo "Locked monkit metrics have been changed. Notify #data-science and run \`go generate ./scripts/check-monitoring.go\` to update monkit.lock file." \
@check-monitoring ./... | diff -U0 ./monkit.lock - \
|| (echo "Locked monkit metrics have been changed. Notify #data-science and run \`go run github.com/storj/ci/check-monitoring -out monkit.lock ./...\` to update monkit.lock file." \
&& exit 1)
##@ Build

1
go.mod
View File

@ -105,7 +105,6 @@ require (
golang.org/x/net v0.0.0-20190916140828-c8589233b77d // indirect
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3
golang.org/x/tools v0.0.0-20190917215024-905c8ffbfa41
google.golang.org/appengine v1.6.0 // indirect
google.golang.org/genproto v0.0.0-20190716160619-c506a9f90610 // indirect
google.golang.org/grpc v1.23.1

4
go.sum
View File

@ -479,7 +479,6 @@ golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190424112056-4829fb13d2c6/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190916140828-c8589233b77d h1:mCMDWKhNO37A7GAhOpHPbIw1cjd0V86kX1/WA9c7FZ8=
golang.org/x/net v0.0.0-20190916140828-c8589233b77d/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be h1:vEDujvNQGv4jgYKudGeI/+DAX4Jffq6hpD55MmoEvKs=
@ -527,9 +526,6 @@ golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190425222832-ad9eeb80039a/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190917215024-905c8ffbfa41 h1:b81roplyyD40MvaAPpAaKtN/Aahd9L3t35zoiycwjRI=
golang.org/x/tools v0.0.0-20190917215024-905c8ffbfa41/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk=
google.golang.org/api v0.3.2/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk=
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=

View File

@ -1,194 +0,0 @@
// Copyright (C) 2019 Storj Labs, Inc.
// See LICENSE for copying information.
// +build ignore
package main
import (
"fmt"
"go/ast"
"go/token"
"go/types"
"log"
"os"
"golang.org/x/tools/go/packages"
)
// holds on to the eventual exit code
var exit int
func main() {
// load up the requested packages
pkgs, err := packages.Load(&packages.Config{
Mode: 0 |
packages.NeedTypes |
packages.NeedTypesInfo |
packages.NeedTypesSizes |
packages.NeedSyntax |
packages.NeedImports |
packages.NeedName,
}, os.Args[1:]...)
if err != nil {
log.Fatal(err)
}
// check all of their atomic alignment
for _, pkg := range pkgs {
for _, arg := range gatherAtomicArguments(pkg) {
checkArgument(pkg, arg)
}
}
// exit with the correct code
os.Exit(exit)
}
// gatherAtomicArguments looks for calls to 64bit atomics and gathers their first
// argument as an ast expression.
func gatherAtomicArguments(pkg *packages.Package) (args []ast.Expr) {
for _, file := range pkg.Syntax {
ast.Inspect(file, func(node ast.Node) bool {
call, ok := node.(*ast.CallExpr)
if !ok {
return true
}
sel, ok := call.Fun.(*ast.SelectorExpr)
if !ok {
return true
}
ident, ok := sel.X.(*ast.Ident)
if !ok {
return true
}
name, ok := pkg.TypesInfo.Uses[ident].(*types.PkgName)
if !ok || name.Imported().Path() != "sync/atomic" {
return true
}
switch sel.Sel.Name {
case "AddInt64", "AddUint64", "LoadInt64", "LoadUint64",
"StoreInt64", "StoreUint64", "SwapInt64", "SwapUint64",
"CompareAndSwapInt64", "CompareAndSwapUint64":
args = append(args, call.Args[0])
}
return true
})
}
return args
}
// checkArgument makes sure that the ast expression is not an address of some field
// access into a struct that is not 64 bit aligned.
func checkArgument(pkg *packages.Package, arg ast.Expr) {
// ensure the expression is an address of expression
unary, ok := arg.(*ast.UnaryExpr)
if !ok || unary.Op != token.AND {
return
}
// gather the fields through the whole selection (&foo.bar.baz)
var fields []*types.Var
var root types.Type
var x = unary.X
for {
sel, ok := x.(*ast.SelectorExpr)
if !ok {
break
}
field, ok := pkg.TypesInfo.Selections[sel].Obj().(*types.Var)
if !ok || !field.IsField() {
return
}
fields = append(fields, field)
root = pkg.TypesInfo.Types[sel.X].Type
x = sel.X
}
if len(fields) == 0 {
return
}
// walk in reverse keeping track of the indicies walked through
// this helps deal with embedded fields, etc.
var indicies []int
for base := root; len(fields) > 0; fields = fields[:len(fields)-1] {
obj, index, _ := types.LookupFieldOrMethod(base, true, pkg.Types, fields[len(fields)-1].Name())
field, ok := obj.(*types.Var)
if !ok {
return
}
base = field.Type()
indicies = append(indicies, index...)
}
// derefrence the root to start off at the base struct
base, _, ok := deref(root)
if !ok {
return
}
// now walk forward keeping track of offsets and indirections
var offset int64
var sizes = types.SizesFor("gc", "arm")
for _, index := range indicies {
// get the next field type and keep track of if it was a pointer. if so
// then we need to reset our offset (it's guaranteed 64 bit aligned).
next, wasPtr, ok := deref(base.Field(index).Type())
if wasPtr {
offset = 0
} else {
offset += sizes.Offsetsof(structFields(base))[index]
}
// if we're no longer at a struct, we're done walking.
if !ok {
break
}
base = next
}
// check if the offset is aligned
if offset&7 == 0 {
return
}
// report an error and update the status code
file := pkg.Fset.File(arg.Pos())
line := file.Line(arg.Pos())
fmt.Fprintf(os.Stderr,
"%s:%d: address of non 64-bit aligned field passed to atomic (offset: %d)\n",
file.Name(), line, offset)
exit = 1
}
// deref takes a type that can be
// 1. an unnamed struct
// 2. a named struct
// 3. a pointer to an unnamed struct
// 4. a pointer to a named struct
// and returns the unnamed struct as well as if it was a pointer.
func deref(in types.Type) (*types.Struct, bool, bool) {
wasPtr := false
if ptr, ok := in.(*types.Pointer); ok {
wasPtr = true
in = ptr.Elem()
}
if named, ok := in.(*types.Named); ok {
in = named.Underlying()
}
out, ok := in.(*types.Struct)
return out, wasPtr, ok
}
// structFields gathers all of the fields of the passed in struct.
func structFields(in *types.Struct) []*types.Var {
out := make([]*types.Var, in.NumFields())
for i := range out {
out[i] = in.Field(i)
}
return out
}

View File

@ -1,56 +0,0 @@
// Copyright (C) 2019 Storj Labs, Inc.
// See LICENSE for copying information.
// +build ignore
package main
import (
"fmt"
"os"
"os/exec"
"strings"
)
func main() {
cmd := exec.Command("git", "ls-files", ".", "--others")
out, err := cmd.Output()
if err != nil {
fmt.Println("Checking left-over files failed.")
fmt.Println(err)
os.Exit(1)
}
leftover := strings.Split(strings.TrimSpace(string(out)), "\n")
leftover = ignorePrefix(leftover, ".build")
// there's no easy way to modify npm to use tmp folders
leftover = ignorePrefix(leftover, "web/satellite/node_modules/")
leftover = ignorePrefix(leftover, "web/satellite/coverage/")
leftover = ignorePrefix(leftover, "web/satellite/dist/")
// TODO: shouldn't this be already up to date?
leftover = ignorePrefix(leftover, "web/satellite/package-lock.json")
if len(leftover) != 0 {
fmt.Println("Files left-over after running tests:")
for _, file := range leftover {
fmt.Println(file)
}
os.Exit(1)
}
}
func ignorePrefix(files []string, dir string) []string {
result := files[:0]
for _, file := range files {
if file == "" {
continue
}
if strings.HasPrefix(file, dir) {
continue
}
result = append(result, file)
}
return result
}

View File

@ -1,83 +0,0 @@
// Copyright (C) 2019 Storj Labs, Inc.
// See LICENSE for copying information.
// +build ignore
package main
import (
"bytes"
"fmt"
"io"
"os"
"path/filepath"
)
var checkFiles = map[string]bool{
".go": true,
".ts": true,
".js": true,
".vue": true,
}
var ignoreFolder = map[string]bool{
".git": true,
"node_modules": true,
"coverage": true,
"dist": true,
}
func main() {
var failed int
err := filepath.Walk(".", func(path string, info os.FileInfo, err error) error {
if err != nil {
fmt.Println(err)
return nil
}
if info.IsDir() && ignoreFolder[info.Name()] {
return filepath.SkipDir
}
if !checkFiles[filepath.Ext(path)] {
return nil
}
file, err := os.Open(path)
if err != nil {
failed++
fmt.Printf("failed to read %v: %v\n", path, err)
return nil
}
defer func() {
if err := file.Close(); err != nil {
fmt.Println(err)
}
}()
var header [256]byte
n, err := file.Read(header[:])
if err != nil && err != io.EOF {
fmt.Printf("failed to read %v: %v\n", path, err)
return nil
}
if bytes.Contains(header[:n], []byte(`AUTOGENERATED`)) ||
bytes.Contains(header[:n], []byte(`Code generated`)) {
return nil
}
if !bytes.Contains(header[:n], []byte(`Copyright `)) {
failed++
fmt.Printf("missing copyright: %v\n", path)
}
return nil
})
if err != nil {
fmt.Println(err)
}
if failed > 0 {
os.Exit(1)
}
}

View File

@ -1,83 +0,0 @@
// Copyright (C) 2019 Storj Labs, Inc.
// See LICENSE for copying information.
// +build ignore
package main
import (
"go/ast"
"go/token"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/analysis/singlechecker"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
)
func main() { singlechecker.Main(Analyzer) }
var Analyzer = &analysis.Analyzer{
Name: "errs",
Doc: "check for proper usage of errs package",
Run: run,
Requires: []*analysis.Analyzer{
inspect.Analyzer,
},
FactTypes: []analysis.Fact{},
}
func run(pass *analysis.Pass) (interface{}, error) {
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
nodeFilter := []ast.Node{
(*ast.CallExpr)(nil),
}
inspect.Preorder(nodeFilter, func(n ast.Node) {
call := n.(*ast.CallExpr)
fn := typeutil.StaticCallee(pass.TypesInfo, call)
if fn == nil {
return // not a static call
}
switch fn.FullName() {
case "github.com/zeebo/errs.Combine":
if len(call.Args) == 0 {
pass.Reportf(call.Lparen, "errs.Combine() can be simplified to nil")
}
if len(call.Args) == 1 && call.Ellipsis == token.NoPos {
pass.Reportf(call.Lparen, "errs.Combine(x) can be simplified to x")
}
case "(*github.com/zeebo/errs.Class).New":
if len(call.Args) == 0 {
return
}
// Disallow things like Error.New(err.Error())
switch arg := call.Args[0].(type) {
case *ast.BasicLit: // allow string constants
case *ast.Ident: // allow string variables
default:
// allow "alpha" + "beta" + "gamma"
if IsConcatString(arg) {
return
}
pass.Reportf(call.Lparen, "(*errs.Class).New with non-obvious format string")
}
}
})
return nil, nil
}
func IsConcatString(arg ast.Expr) bool {
switch arg := arg.(type) {
case *ast.BasicLit:
return arg.Kind == token.STRING
case *ast.BinaryExpr:
return arg.Op == token.ADD && IsConcatString(arg.X) && IsConcatString(arg.Y)
default:
return false
}
}

View File

@ -1,367 +0,0 @@
// Copyright (C) 2019 Storj Labs, Inc.
// See LICENSE for copying information.
// +build ignore
package main
import (
"bytes"
"flag"
"fmt"
"go/ast"
"go/token"
"io"
"os"
"reflect"
"runtime"
"sort"
"strconv"
"strings"
"golang.org/x/tools/go/packages"
)
/*
check-imports verifies whether imports are divided into three blocks:
std packages
external packages
storj.io packages
*/
var race = flag.Bool("race", false, "load with race tag")
func main() {
flag.Parse()
pkgNames := flag.Args()
if len(pkgNames) == 0 {
pkgNames = []string{"."}
}
var buildFlags []string
if *race {
buildFlags = append(buildFlags, "-race")
}
roots, err := packages.Load(&packages.Config{
Mode: packages.LoadAllSyntax,
Env: os.Environ(),
BuildFlags: buildFlags,
Tests: true,
}, pkgNames...)
if err != nil {
panic(err)
}
fmt.Println("checking import order:")
// load all packages
seen := map[*packages.Package]bool{}
pkgs := []*packages.Package{}
var visit func(*packages.Package)
visit = func(p *packages.Package) {
if seen[p] {
return
}
includeStd(p)
if strings.HasPrefix(p.ID, "storj.io") {
pkgs = append(pkgs, p)
}
seen[p] = true
for _, pkg := range p.Imports {
visit(pkg)
}
}
for _, pkg := range roots {
visit(pkg)
}
// sort the packages
sort.Slice(pkgs, func(i, k int) bool { return pkgs[i].ID < pkgs[k].ID })
var misgrouped, unsorted []Imports
for _, pkg := range pkgs {
pkgmisgrouped, pkgunsorted := verifyPackage(os.Stderr, pkg)
misgrouped = append(misgrouped, pkgmisgrouped...)
unsorted = append(unsorted, pkgunsorted...)
}
exitCode := 0
if len(misgrouped) > 0 {
exitCode = 1
fmt.Fprintln(os.Stderr)
fmt.Fprintln(os.Stderr, "Imports are not in the standard grouping [std storj other]:")
for _, imports := range misgrouped {
fmt.Fprintln(os.Stderr, "\t"+imports.Path, imports.Classes())
}
}
if len(unsorted) > 0 {
exitCode = 1
fmt.Fprintln(os.Stderr)
fmt.Fprintln(os.Stderr, "Imports are not sorted:")
for _, imports := range unsorted {
fmt.Fprintln(os.Stderr, "\t"+imports.Path)
}
}
os.Exit(exitCode)
}
func verifyPackage(stderr io.Writer, pkg *packages.Package) (misgrouped, unsorted []Imports) {
// ignore generated test binaries
if strings.HasSuffix(pkg.ID, ".test") {
return
}
for i, file := range pkg.Syntax {
path := pkg.CompiledGoFiles[i]
imports := LoadImports(pkg.Fset, path, file)
ordered := true
sorted := true
for _, section := range imports.Decls {
if !section.IsGrouped() {
ordered = false
}
if !section.IsSorted() {
sorted = false
}
}
if !ordered || !sorted {
if isGenerated(path) {
fmt.Fprintln(stderr, "(ignoring generated)", path)
continue
}
}
if !ordered {
misgrouped = append(misgrouped, imports)
}
if !sorted {
unsorted = append(unsorted, imports)
}
}
return
}
// Imports defines all imports for a single file.
type Imports struct {
Path string
Generated bool
Decls []ImportDecl
}
// Classes returns all import groupings
func (imports Imports) Classes() [][]Class {
var classes [][]Class
for _, decl := range imports.Decls {
classes = append(classes, decl.Classes())
}
return classes
}
// ImportDecl defines a single import declaration
type ImportDecl []ImportGroup
// allowedGroups lists all valid groupings
var allowedGroups = [][]Class{
{Standard},
{Storj},
{Other},
{Standard, Storj},
{Standard, Other},
{Other, Storj},
{Standard, Other, Storj},
}
// IsGrouped returns whether the grouping is allowed.
func (decls ImportDecl) IsGrouped() bool {
classes := decls.Classes()
for _, allowedGroup := range allowedGroups {
if reflect.DeepEqual(allowedGroup, classes) {
return true
}
}
return false
}
// Classes returns each group class.
func (decl ImportDecl) Classes() []Class {
classes := make([]Class, len(decl))
for i := range classes {
classes[i] = decl[i].Class()
}
return classes
}
// IsSorted returns whether the group is sorted.
func (decls ImportDecl) IsSorted() bool {
for _, decl := range decls {
if !decl.IsSorted() {
return false
}
}
return true
}
// ImportGroup defines a single import statement.
type ImportGroup struct {
Specs []*ast.ImportSpec
Paths []string
}
// IsSorted returns whether the group is sorted.
func (group ImportGroup) IsSorted() bool {
return sort.StringsAreSorted(group.Paths)
}
// Class returns the classification of this import group.
func (group ImportGroup) Class() Class {
var class Class
for _, path := range group.Paths {
class |= ClassifyImport(path)
}
return class
}
// Class defines a bitset of import classification
type Class byte
// Class defines three different groups
const (
// Standard is all go standard packages
Standard Class = 1 << iota
// Storj is imports that start with `storj.io`
Storj
// Other is everything else
Other
)
// ClassifyImport classifies an import path to a class.
func ClassifyImport(pkgPath string) Class {
if strings.HasPrefix(pkgPath, "storj.io/") {
return Storj
}
if stdlib[pkgPath] {
return Standard
}
return Other
}
// String returns contents of the class.
func (class Class) String() string {
var s []string
if class&Standard != 0 {
s = append(s, "std")
}
if class&Storj != 0 {
s = append(s, "storj")
}
if class&Other != 0 {
s = append(s, "other")
}
return strings.Join(s, "|")
}
// LoadImports loads import groups from a given fileset.
func LoadImports(fset *token.FileSet, name string, f *ast.File) Imports {
var imports Imports
imports.Path = name
for _, d := range f.Decls {
d, ok := d.(*ast.GenDecl)
if !ok || d.Tok != token.IMPORT {
// Not an import declaration, so we're done.
// Imports are always first.
break
}
if !d.Lparen.IsValid() {
// Not a block: sorted by default.
continue
}
// identify specs on successive lines
lastGroup := 0
specgroups := [][]ast.Spec{}
for i, s := range d.Specs {
if i > lastGroup && fset.Position(s.Pos()).Line > 1+fset.Position(d.Specs[i-1].End()).Line {
// i begins a new run. End this one.
specgroups = append(specgroups, d.Specs[lastGroup:i])
lastGroup = i
}
}
specgroups = append(specgroups, d.Specs[lastGroup:])
// convert ast.Spec-s groups into import groups
var decl ImportDecl
for _, specgroup := range specgroups {
var group ImportGroup
for _, importSpec := range specgroup {
importSpec := importSpec.(*ast.ImportSpec)
path, err := strconv.Unquote(importSpec.Path.Value)
if err != nil {
panic(err)
}
group.Specs = append(group.Specs, importSpec)
group.Paths = append(group.Paths, path)
}
decl = append(decl, group)
}
imports.Decls = append(imports.Decls, decl)
}
return imports
}
var root = runtime.GOROOT()
var stdlib = map[string]bool{}
func includeStd(p *packages.Package) {
if len(p.GoFiles) == 0 {
stdlib[p.ID] = true
return
}
if strings.HasPrefix(p.GoFiles[0], root) {
stdlib[p.ID] = true
return
}
}
func isGenerated(path string) bool {
file, err := os.Open(path)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to read %v: %v\n", path, err)
return false
}
defer func() {
if err := file.Close(); err != nil {
fmt.Fprintln(os.Stderr, err)
}
}()
var header [256]byte
n, err := file.Read(header[:])
if err != nil && err != io.EOF {
fmt.Fprintf(os.Stderr, "failed to read %v: %v\n", path, err)
return false
}
return bytes.Contains(header[:n], []byte(`AUTOGENERATED`)) ||
bytes.Contains(header[:n], []byte(`Code generated`))
}

View File

@ -1,54 +0,0 @@
// Copyright (C) 2019 Storj Labs, Inc.
// See LICENSE for copying information.
// +build ignore
package main
import (
"fmt"
"os"
"path/filepath"
"storj.io/storj/private/memory"
)
var ignoreFolder = map[string]bool{
".build": true,
".git": true,
"node_modules": true,
"coverage": true,
"dist": true,
}
func main() {
const fileSizeLimit = 650 * memory.KB
var failed int
err := filepath.Walk(".", func(path string, info os.FileInfo, err error) error {
if err != nil {
fmt.Println(err)
return nil
}
if info.IsDir() && ignoreFolder[info.Name()] {
return filepath.SkipDir
}
size := memory.Size(info.Size())
if size > fileSizeLimit {
failed++
fmt.Printf("%v (%v)\n", path, size)
}
return nil
})
if err != nil {
fmt.Println(err)
}
if failed > 0 {
fmt.Printf("some files were over size limit %v\n", fileSizeLimit)
os.Exit(1)
}
}

View File

@ -1,112 +0,0 @@
// Copyright (C) 2019 Storj Labs, Inc.
// See LICENSE for copying information.
// +build ignore
package main
import (
"bytes"
"flag"
"fmt"
"io/ioutil"
"os"
"os/exec"
"strings"
"github.com/kylelemons/godebug/diff"
)
var modfile = flag.String("mod", "go.mod", "original mod file")
func main() {
flag.Parse()
tempdir, err := ioutil.TempDir("", "check-mod-tidy")
checkf(err, "failed to create a temporary directory: %v\n", err)
defer func() {
err := os.RemoveAll(tempdir)
fmt.Fprintf(os.Stderr, "failed to delete temporary directory: %v\n", err)
}()
err = copyDir(".", tempdir)
checkf(err, "failed to copy directory: %v\n", err)
workingDir, err := os.Getwd()
checkf(err, "failed to get working directory: %v\n", err)
err = os.Chdir(tempdir)
checkf(err, "failed to change directory: %v\n", err)
defer os.Chdir(workingDir)
original, err := ioutil.ReadFile(*modfile)
checkf(err, "failed to read %q: %v\n", *modfile, err)
err = ioutil.WriteFile("go.mod", original, 0755)
checkf(err, "failed to write go.mod: %v\n", err)
err = tidy()
checkf(err, "failed to tidy go.mod: %v\n", err)
changed, err := ioutil.ReadFile("go.mod")
checkf(err, "failed to read go.mod: %v\n", err)
if !bytes.Equal(original, changed) {
diff, removed := difflines(string(original), string(changed))
fmt.Fprintln(os.Stderr, "go.mod is not tidy")
fmt.Fprintln(os.Stderr, diff)
if removed {
os.Exit(1)
}
}
}
func tidy() error {
var err error
for repeat := 2; repeat > 0; repeat-- {
cmd := exec.Command("go", "mod", "tidy")
cmd.Stdout, cmd.Stderr = os.Stderr, os.Stderr
err = cmd.Run()
if err != nil {
fmt.Fprintf(os.Stderr, "go mod tidy failed, retrying: %v", err)
continue
}
break
}
return err
}
func copyDir(src, dst string) error {
cmd := exec.Command("cp", "-a", src, dst)
cmd.Stdout, cmd.Stderr = os.Stderr, os.Stderr
return cmd.Run()
}
func checkf(err error, format string, args ...interface{}) {
if err == nil {
return
}
fmt.Fprintf(os.Stderr, format, args...)
os.Exit(1)
}
func difflines(a, b string) (patch string, removed bool) {
alines, blines := strings.Split(a, "\n"), strings.Split(b, "\n")
chunks := diff.DiffChunks(alines, blines)
buf := new(bytes.Buffer)
for _, c := range chunks {
for _, line := range c.Added {
fmt.Fprintf(buf, "+%s\n", line)
}
for _, line := range c.Deleted {
fmt.Fprintf(buf, "-%s\n", line)
removed = true
}
}
return strings.TrimRight(buf.String(), "\n"), removed
}

View File

@ -1,181 +0,0 @@
// Copyright (C) 2019 Storj Labs, Inc.
// See LICENSE for copying information.
//go:generate go run check-monitoring.go ../monkit.lock
// +build ignore
package main
import (
"fmt"
"go/ast"
"go/token"
"go/types"
"io/ioutil"
"log"
"os"
"sort"
"strings"
"golang.org/x/tools/go/packages"
)
var (
monkitPath = "gopkg.in/spacemonkeygo/monkit.v2"
lockFilePerms = os.FileMode(0644)
)
func main() {
pkgs, err := packages.Load(&packages.Config{
Mode: packages.NeedCompiledGoFiles | packages.NeedSyntax | packages.NeedName | packages.NeedTypes | packages.NeedTypesInfo,
}, "storj.io/storj/...")
if err != nil {
log.Fatalf("error while loading packages: %s", err)
}
var lockedFnNames []string
for _, pkg := range pkgs {
lockedFnNames = append(lockedFnNames, findLockedFnNames(pkg)...)
}
sortedNames := sortAndUnique(lockedFnNames)
outputStr := strings.Join(sortedNames, "\n")
if len(os.Args) == 2 {
file := os.Args[1]
if err := ioutil.WriteFile(file, []byte(outputStr+"\n"), lockFilePerms); err != nil {
log.Fatalf("error while writing to file \"%s\": %s", file, err)
}
} else {
fmt.Println(outputStr)
}
}
func findLockedFnNames(pkg *packages.Package) []string {
var (
lockedTasksPos []token.Pos
lockedTaskFns []*ast.FuncDecl
lockedFnInfos []string
)
// Collect locked comments and what line they are on.
for _, file := range pkg.Syntax {
lockedLines := make(map[int]struct{})
for _, group := range file.Comments {
for _, comment := range group.List {
if comment.Text == "//locked" {
commentLine := pkg.Fset.Position(comment.Pos()).Line
lockedLines[commentLine] = struct{}{}
}
}
}
if len(lockedLines) == 0 {
continue
}
// Find calls to monkit functions we're interested in that are on the
// same line as a "locked" comment and keep track of their position.
// NB: always return true to walk entire node tree.
ast.Inspect(file, func(node ast.Node) bool {
if node == nil {
return true
}
if !isMonkitCall(pkg, node) {
return true
}
// Ensure call line matches a "locked" comment line.
callLine := pkg.Fset.Position(node.End()).Line
if _, ok := lockedLines[callLine]; !ok {
return true
}
// We are already checking to ensure that these type assertions are valid in `isMonkitCall`.
sel := node.(*ast.CallExpr).Fun.(*ast.SelectorExpr)
// Track `mon.Task` calls.
if sel.Sel.Name == "Task" {
lockedTasksPos = append(lockedTasksPos, node.End())
return true
}
// Track other monkit calls that have one string argument (e.g. monkit.FloatVal, etc.)
// and transform them to representative string.
if len(node.(*ast.CallExpr).Args) != 1 {
return true
}
argLiteral, ok := node.(*ast.CallExpr).Args[0].(*ast.BasicLit)
if !ok {
return true
}
if argLiteral.Kind == token.STRING {
lockedFnInfo := pkg.PkgPath + "." + argLiteral.Value + " " + sel.Sel.Name
lockedFnInfos = append(lockedFnInfos, lockedFnInfo)
}
return true
})
// Track all function declarations containing locked `mon.Task` calls.
ast.Inspect(file, func(node ast.Node) bool {
fn, ok := node.(*ast.FuncDecl)
if !ok {
return true
}
for _, locked := range lockedTasksPos {
if fn.Pos() < locked && locked < fn.End() {
lockedTaskFns = append(lockedTaskFns, fn)
}
}
return true
})
}
// Transform the ast.FuncDecls containing locked `mon.Task` calls to representative string.
for _, fn := range lockedTaskFns {
object := pkg.TypesInfo.Defs[fn.Name]
var receiver string
if fn.Recv != nil {
typ := fn.Recv.List[0].Type
if star, ok := typ.(*ast.StarExpr); ok {
receiver = ".*"
typ = star.X
} else {
receiver = "."
}
recvObj := pkg.TypesInfo.Uses[typ.(*ast.Ident)]
receiver += recvObj.Name()
}
lockedFnInfo := object.Pkg().Path() + receiver + "." + object.Name() + " Task"
lockedFnInfos = append(lockedFnInfos, lockedFnInfo)
}
return lockedFnInfos
}
// isMonkitCall returns whether the node is a call to a function in the monkit package.
func isMonkitCall(pkg *packages.Package, in ast.Node) bool {
defer func() { recover() }()
ident := in.(*ast.CallExpr).Fun.(*ast.SelectorExpr).X.(*ast.Ident)
return pkg.TypesInfo.Uses[ident].(*types.Var).Type().(*types.Pointer).Elem().(*types.Named).Obj().Pkg().Path() == monkitPath
}
func sortAndUnique(input []string) (unique []string) {
set := make(map[string]struct{})
for _, item := range input {
if _, ok := set[item]; ok {
continue
} else {
set[item] = struct{}{}
}
}
for item := range set {
unique = append(unique, item)
}
sort.Strings(unique)
return unique
}

View File

@ -1,214 +0,0 @@
// Copyright (C) 2019 Storj Labs, Inc.
// See LICENSE for copying information.
// +build ignore
// check-peer-constraints checks that none of the core packages import peers directly.
package main
import (
"flag"
"fmt"
"os"
"regexp"
"sort"
"strings"
"golang.org/x/tools/go/packages"
)
var IgnorePackages = []string{
// Currently overlay contains NodeDossier which is used in multiple places.
"storj.io/storj/satellite/overlay",
}
var Libraries = []string{
"storj.io/storj/pkg/...",
"storj.io/storj/lib/...",
"storj.io/storj/uplink/...",
"storj.io/storj/mobile/...",
"storj.io/storj/storage/...",
}
var Peers = []string{
"storj.io/storj/satellite/...",
"storj.io/storj/storagenode/...",
"storj.io/storj/versioncontrol/...",
"storj.io/storj/linksharing/...",
"storj.io/storj/certificate/...",
}
var Cmds = []string{
"storj.io/storj/cmd/...",
}
var race = flag.Bool("race", false, "load with race tag")
func main() {
flag.Parse()
var buildFlags []string
if *race {
buildFlags = append(buildFlags, "-race")
}
pkgs, err := packages.Load(&packages.Config{
Mode: packages.LoadImports,
Env: os.Environ(),
BuildFlags: buildFlags,
Tests: false,
}, "storj.io/storj/...")
if err != nil {
fmt.Fprintf(os.Stderr, "failed to load pacakges: %v\n", err)
os.Exit(1)
}
pkgs = flatten(pkgs)
exitcode := 0
// libraries shouldn't depend on peers nor commands
for _, library := range Libraries {
source := match(pkgs, library)
for _, peer := range include(Cmds, Peers) {
destination := match(pkgs, peer)
if links(source, destination) {
fmt.Fprintf(os.Stdout, "%q is importing %q\n", library, peer)
exitcode = 1
}
}
}
// peer code shouldn't depend on command code
for _, peer := range Peers {
source := match(pkgs, peer)
for _, cmd := range Cmds {
destination := match(pkgs, cmd)
if links(source, destination) {
fmt.Fprintf(os.Stdout, "%q is importing %q\n", peer, cmd)
exitcode = 1
}
}
}
// one peer shouldn't depend on another peers
for _, peerA := range Peers {
source := match(pkgs, peerA)
for _, peerB := range Peers {
// ignore subpackages
if strings.HasPrefix(peerA, peerB) || strings.HasPrefix(peerB, peerA) {
continue
}
destination := match(pkgs, peerB)
if links(source, destination) {
fmt.Fprintf(os.Stdout, "%q is importing %q\n", peerA, peerB)
exitcode = 1
}
}
}
os.Exit(exitcode)
}
func include(globlists ...[]string) []string {
var xs []string
for _, globs := range globlists {
xs = append(xs, globs...)
}
return xs
}
func match(pkgs []*packages.Package, globs ...string) []*packages.Package {
for i, glob := range globs {
globs[i] = strings.ReplaceAll(glob, "...", ".*")
}
rx := regexp.MustCompile("^(" + strings.Join(globs, "|") + ")$")
var rs []*packages.Package
for _, pkg := range pkgs {
if rx.MatchString(pkg.PkgPath) {
rs = append(rs, pkg)
}
}
return rs
}
func links(source, destination []*packages.Package) bool {
targets := map[string]bool{}
for _, dst := range destination {
if ignorePkg(dst) {
continue
}
targets[dst.PkgPath] = true
}
links := false
visited := map[string]bool{}
var visit func(pkg *packages.Package, path []*packages.Package)
visit = func(pkg *packages.Package, path []*packages.Package) {
for id, imp := range pkg.Imports {
if _, visited := visited[id]; visited {
continue
}
visited[id] = true
if targets[id] {
links = true
fmt.Printf("# import chain %q\n", pathstr(append(path, pkg, imp)))
}
visit(imp, append(path, pkg))
}
}
for _, pkg := range source {
visit(pkg, nil)
}
return links
}
func ignorePkg(pkg *packages.Package) bool {
for _, ignorePkg := range IgnorePackages {
if strings.HasPrefix(pkg.PkgPath, ignorePkg) {
return true
}
}
return false
}
func flatten(pkgs []*packages.Package) []*packages.Package {
var all []*packages.Package
visited := map[string]bool{}
var visit func(pkg *packages.Package)
visit = func(pkg *packages.Package) {
if _, visited := visited[pkg.PkgPath]; visited {
return
}
visited[pkg.PkgPath] = true
all = append(all, pkg)
for _, imp := range pkg.Imports {
visit(imp)
}
}
for _, pkg := range pkgs {
visit(pkg)
}
sort.Slice(all, func(i, k int) bool {
return all[i].PkgPath < all[k].PkgPath
})
return all
}
func pathstr(path []*packages.Package) string {
ids := []string{}
for _, pkg := range path {
ids = append(ids, pkg.PkgPath)
}
return strings.Join(ids, " > ")
}

View File

@ -1,27 +0,0 @@
// Copyright (C) 2019 Storj Labs, Inc.
// See LICENSE for copying information.
// +build ignore
package main
import (
"bufio"
"fmt"
"os"
"strings"
)
func main() {
scanner := bufio.NewScanner(os.Stdin)
for scanner.Scan() {
line := scanner.Text()
if strings.Contains(line, ".pb.") {
continue
}
if strings.Contains(line, ".dbx.") {
continue
}
fmt.Println(line)
}
}

View File

@ -1,17 +0,0 @@
// Copyright (C) 2019 Storj Labs, Inc.
// See LICENSE for copying information.
package scripts_test
// this ensures that we download the necessary packages for the tools in scripts folder
// without actually being a binary
import (
"golang.org/x/tools/go/ast/astutil"
"golang.org/x/tools/go/packages"
"golang.org/x/tools/imports"
)
var _ = imports.Process
var _ = packages.NeedName
var _ = astutil.PathEnclosingInterval

View File

@ -1,68 +0,0 @@
// Copyright (C) 2019 Storj Labs, Inc.
// See LICENSE for copying information.
// +build ignore
package main
import (
"bytes"
"io"
"os"
)
// fail-on-race detects for keyword "DATA RACE" in output
// and returns error code, if the output contains it.
func main() {
var buffer [8192]byte
problemDetected := false
search := [][]byte{
[]byte("DATA RACE"),
[]byte("panic"),
}
maxsearch := 0
for _, keyword := range search {
if maxsearch < len(keyword) {
maxsearch = len(keyword)
}
}
start := 0
for {
n, readErr := os.Stdin.Read(buffer[start:])
end := start + n
_, writeErr := os.Stdout.Write(buffer[start:end])
if writeErr != nil {
os.Stderr.Write([]byte(writeErr.Error()))
os.Exit(2)
}
for _, keyword := range search {
if bytes.Contains(buffer[:end], keyword) {
problemDetected = true
break
}
}
// copy buffer tail to the beginning of the content
if end > maxsearch {
copy(buffer[:], buffer[end-maxsearch:end])
start = maxsearch
}
if readErr != nil {
break
}
}
_, _ = io.Copy(os.Stdout, os.Stdin)
if problemDetected {
os.Stderr.Write([]byte("\nTest failed due to data race or panic.\n"))
os.Exit(1)
}
}

View File

@ -1,49 +0,0 @@
// Copyright (C) 2019 Storj Labs, Inc.
// See LICENSE for copying information.
// +build ignore
package main
import (
"flag"
"fmt"
"net"
"os"
"os/signal"
"strconv"
"syscall"
)
var (
fromPort = flag.Int("from", 0, "first port")
toPort = flag.Int("to", 10000, "last port")
)
func main() {
flag.Parse()
var listeners []net.Listener
var unableToStart []int
for port := *fromPort; port < *toPort; port++ {
listener, err := net.Listen("tcp", net.JoinHostPort("127.0.0.1", strconv.Itoa(port)))
if err != nil {
unableToStart = append(unableToStart, port)
continue
}
listeners = append(listeners, listener)
}
fmt.Printf("use-ports: unable to start on %v\n", unableToStart)
fmt.Printf("use-ports: listening on ports %v to %v\n", *fromPort, *toPort)
sigs := make(chan os.Signal, 1)
signal.Notify(sigs, syscall.SIGQUIT)
<-sigs
for _, listener := range listeners {
err := listener.Close()
if err != nil {
fmt.Printf("unable to close: %v\n", err)
}
}
}

View File

@ -1,339 +0,0 @@
// Copyright (C) 2019 Storj Labs, Inc.
// See LICENSE for copying information.
// +build ignore
package main
import (
"bufio"
"bytes"
"encoding/json"
"encoding/xml"
"flag"
"fmt"
"io"
"os"
"strconv"
"strings"
"unicode"
"github.com/mfridman/tparse/parse"
)
var xunit = flag.String("out", "", "xunit output file")
func main() {
flag.Parse()
if *xunit == "" {
fmt.Fprintf(os.Stderr, "xunit file not specified\n")
os.Exit(1)
}
var buffer bytes.Buffer
stdin := io.TeeReader(os.Stdin, &buffer)
pkgs, err := ProcessWithEcho(stdin)
if err != nil {
if err == parse.ErrNotParseable {
fmt.Fprintf(os.Stderr, "tparse error: no parseable events: call go test with -json flag\n\n")
} else {
fmt.Fprintf(os.Stderr, "tparse error: %v\n\n", err)
}
defer os.Exit(1)
} else {
defer os.Exit(pkgs.ExitCode())
}
output, err := os.Create(*xunit)
if err != nil {
fmt.Fprintf(os.Stderr, "create error: %v\n\n", err)
return
}
defer func() {
if err := output.Close(); err != nil {
fmt.Fprintf(os.Stderr, "close error: %v\n\n", err)
}
}()
_, _ = output.Write([]byte(xml.Header))
encoder := xml.NewEncoder(output)
encoder.Indent("", "\t")
defer encoder.Flush()
encoder.EncodeToken(xml.StartElement{Name: xml.Name{Local: "testsuites"}, Attr: nil})
defer encoder.EncodeToken(xml.EndElement{Name: xml.Name{Local: "testsuites"}})
for _, pkg := range pkgs {
failed := TestsByAction(pkg, parse.ActionFail)
skipped := TestsByAction(pkg, parse.ActionSkip)
passed := TestsByAction(pkg, parse.ActionPass)
skipped = withoutEmptyName(skipped)
all := []*parse.Test{}
all = append(all, failed...)
all = append(all, skipped...)
all = append(all, passed...)
if !pkg.HasPanic && (pkg.NoTests || len(all) == 0) {
continue
}
func() {
encoder.EncodeToken(xml.StartElement{
Name: xml.Name{Local: "testsuite"},
Attr: []xml.Attr{
{xml.Name{Local: "name"}, pkg.Summary.Package},
{xml.Name{Local: "time"}, fmt.Sprintf("%.2f", pkg.Summary.Elapsed)},
{xml.Name{Local: "tests"}, strconv.Itoa(len(all))},
{xml.Name{Local: "failures"}, strconv.Itoa(len(failed))},
{xml.Name{Local: "skips"}, strconv.Itoa(len(skipped))},
},
})
defer encoder.EncodeToken(xml.EndElement{Name: xml.Name{Local: "testsuite"}})
if pkg.HasPanic {
encoder.EncodeToken(xml.StartElement{
Name: xml.Name{Local: "testcase"},
Attr: []xml.Attr{
{xml.Name{Local: "classname"}, pkg.Summary.Package},
{xml.Name{Local: "name"}, "Panic"},
},
})
encoder.EncodeToken(xml.StartElement{Name: xml.Name{Local: "failure"}, Attr: nil})
encoder.EncodeToken(xml.CharData(eventOutput(pkg.PanicEvents)))
encoder.EncodeToken(xml.EndElement{Name: xml.Name{Local: "failure"}})
encoder.EncodeToken(xml.EndElement{Name: xml.Name{Local: "testcase"}})
}
for _, t := range all {
t.SortEvents()
func() {
encoder.EncodeToken(xml.StartElement{
Name: xml.Name{Local: "testcase"},
Attr: []xml.Attr{
{xml.Name{Local: "classname"}, t.Package},
{xml.Name{Local: "name"}, t.Name},
{xml.Name{Local: "time"}, fmt.Sprintf("%.2f", t.Elapsed())},
},
})
defer encoder.EncodeToken(xml.EndElement{Name: xml.Name{Local: "testcase"}})
encoder.EncodeToken(xml.StartElement{xml.Name{Local: "system-out"}, nil})
encoder.EncodeToken(xml.CharData(eventOutput(t.Events)))
encoder.EncodeToken(xml.EndElement{xml.Name{Local: "system-out"}})
switch TestStatus(t) {
case parse.ActionSkip:
encoder.EncodeToken(xml.StartElement{
Name: xml.Name{Local: "skipped"},
Attr: []xml.Attr{
{xml.Name{Local: "message"}, t.Stack()},
},
})
encoder.EncodeToken(xml.EndElement{Name: xml.Name{Local: "skipped"}})
case parse.ActionFail:
encoder.EncodeToken(xml.StartElement{Name: xml.Name{Local: "failure"}, Attr: nil})
encoder.EncodeToken(xml.CharData(t.Stack()))
encoder.EncodeToken(xml.EndElement{Name: xml.Name{Local: "failure"}})
}
}()
}
}()
}
}
func eventOutput(events parse.Events) string {
var out strings.Builder
for _, event := range events {
out.WriteString(event.Output)
}
return out.String()
}
func withoutEmptyName(tests []*parse.Test) []*parse.Test {
out := tests[:0]
for _, test := range tests {
if test.Name != "" {
out = append(out, test)
}
}
return out
}
// Code based on: https://github.com/mfridman/tparse/blob/master/parse/process.go#L27
func ProcessWithEcho(r io.Reader) (parse.Packages, error) {
pkgs := parse.Packages{}
var hasRace bool
var scan bool
var badLines int
scanner := bufio.NewScanner(r)
for scanner.Scan() {
// Scan up-to 50 lines for a parseable event, if we get one, expect
// no errors to follow until EOF.
event, err := parse.NewEvent(scanner.Bytes())
if err != nil {
badLines++
if scan || badLines > 50 {
switch err.(type) {
case *json.SyntaxError:
return nil, parse.ErrNotParseable
default:
return nil, err
}
}
continue
}
scan = true
if line := strings.TrimRightFunc(event.Output, unicode.IsSpace); line != "" {
fmt.Fprintln(os.Stdout, line)
}
pkg, ok := pkgs[event.Package]
if !ok {
pkg = parse.NewPackage()
pkgs[event.Package] = pkg
}
if event.IsPanic() {
pkg.HasPanic = true
pkg.Summary.Action = parse.ActionFail
pkg.Summary.Package = event.Package
pkg.Summary.Test = event.Test
}
// Short circuit output when panic is detected.
if pkg.HasPanic {
pkg.PanicEvents = append(pkg.PanicEvents, event)
continue
}
if event.IsRace() {
hasRace = true
}
if event.IsCached() {
pkg.Cached = true
}
if event.NoTestFiles() {
pkg.NoTestFiles = true
// Manually mark [no test files] as "pass", because the go test tool reports the
// package Summary action as "skip".
pkg.Summary.Package = event.Package
pkg.Summary.Action = parse.ActionPass
}
if event.NoTestsWarn() {
// One or more tests within the package contains no tests.
pkg.NoTestSlice = append(pkg.NoTestSlice, event)
}
if event.NoTestsToRun() {
// Only pkgs marked as "pass" will contain a summary line appended with [no tests to run].
// This indicates one or more tests is marked as having no tests to run.
pkg.NoTests = true
pkg.Summary.Package = event.Package
pkg.Summary.Action = parse.ActionPass
}
if event.LastLine() {
pkg.Summary = event
continue
}
cover, ok := event.Cover()
if ok {
pkg.Cover = true
pkg.Coverage = cover
}
// special case for tooling checking
if event.Action == parse.ActionOutput && strings.HasPrefix(event.Output, "FAIL\t") {
event.Action = parse.ActionFail
}
if !Discard(event) {
pkg.AddEvent(event)
}
}
if err := scanner.Err(); err != nil {
return nil, fmt.Errorf("bufio scanner error: %v", err)
}
if !scan {
return nil, parse.ErrNotParseable
}
if hasRace {
return pkgs, parse.ErrRaceDetected
}
return pkgs, nil
}
func Discard(e *parse.Event) bool {
for i := range updates {
if strings.HasPrefix(e.Output, updates[i]) {
return true
}
}
return false
}
var (
updates = []string{
"=== RUN ",
"=== PAUSE ",
"=== CONT ",
}
)
// Status reports the outcome of the test represented as a single Action: pass, fail or skip.
//
// Custom status to check packages properly.
func TestStatus(t *parse.Test) parse.Action {
// sort by time and scan for an action in reverse order.
// The first action we come across (in reverse order) is
// the outcome of the test, which will be one of pass|fail|skip.
t.SortEvents()
for i := len(t.Events) - 1; i >= 0; i-- {
switch t.Events[i].Action {
case parse.ActionPass:
return parse.ActionPass
case parse.ActionSkip:
return parse.ActionSkip
case parse.ActionFail:
return parse.ActionFail
}
}
if t.Name == "" {
return parse.ActionPass
}
return parse.ActionFail
}
// TestsByAction returns all tests that identify as one of the following
// actions: pass, skip or fail.
//
// An empty slice if returned if there are no tests.
func TestsByAction(p *parse.Package, action parse.Action) []*parse.Test {
tests := []*parse.Test{}
for _, t := range p.Tests {
if TestStatus(t) == action {
tests = append(tests, t)
}
}
return tests
}