Add gocyclo into source build, choosing cyclomatic complexity tolerance of 15 and below

master
Harshavardhana 10 years ago
parent c366fdd2d8
commit 58082cd8dc
  1. 7
      Makefile
  2. 85
      buildscripts/verifier.go
  3. 48
      pkg/api/range.go
  4. 59
      pkg/drivers/bucket_policy.go
  5. 48
      pkg/drivers/file/file_filter.go
  6. 44
      pkg/drivers/memory/memory.go
  7. 1
      pkg/storage/donut/erasure.go

@ -14,8 +14,9 @@ getdeps: checkdeps checkgopath
@go get github.com/tools/godep && echo "Installed godep" @go get github.com/tools/godep && echo "Installed godep"
@go get github.com/golang/lint/golint && echo "Installed golint" @go get github.com/golang/lint/golint && echo "Installed golint"
@go get golang.org/x/tools/cmd/vet && echo "Installed vet" @go get golang.org/x/tools/cmd/vet && echo "Installed vet"
@go get github.com/fzipp/gocyclo && echo "Installed gocyclo"
verifiers: getdeps vet fmt lint verifiers: getdeps vet fmt lint cyclo
vet: vet:
@echo "Running $@" @echo "Running $@"
@ -28,6 +29,10 @@ lint:
@echo "Running $@" @echo "Running $@"
@test -z "$$(golint ./... | grep -v Godeps/_workspace/src/ | tee /dev/stderr)" @test -z "$$(golint ./... | grep -v Godeps/_workspace/src/ | tee /dev/stderr)"
cyclo:
@echo "Running $@"
@test -z "$$(gocyclo -over 15 . | grep -v Godeps/_workspace/src/ | tee /dev/stderr)"
build-all: verifiers build-all: verifiers
@echo "Building Libraries" @echo "Building Libraries"
@godep go generate ./... @godep go generate ./...

@ -128,6 +128,50 @@ func doDir(name string) {
} }
} }
func doDecl(p *goPackage, decl interface{}, cmap ast.CommentMap) bool {
switch n := decl.(type) {
case *ast.GenDecl:
// var, const, types
for _, spec := range n.Specs {
switch s := spec.(type) {
case *ast.ValueSpec:
// constants and variables.
for _, name := range s.Names {
p.decl[name.Name] = n
}
case *ast.TypeSpec:
// type definitions.
p.decl[s.Name.Name] = n
}
}
case *ast.FuncDecl:
// if function is 'main', never check
if n.Name.Name == "main" {
return true
}
// Do not be strict on non-exported functions
if !ast.IsExported(n.Name.Name) {
return true
}
// Do not be strict for field list functions
// if n.Recv != nil {
// continue
//}
// Be strict for global functions
_, ok := cmap[n]
if ok == false {
p.missingcomments[n.Name.Name] = n
}
// function declarations
// TODO(remy): do methods
if n.Recv == nil {
p.decl[n.Name.Name] = n
}
}
return false
}
func doPackage(fs *token.FileSet, pkg *ast.Package) { func doPackage(fs *token.FileSet, pkg *ast.Package) {
p := &goPackage{ p := &goPackage{
p: pkg, p: pkg,
@ -139,45 +183,8 @@ func doPackage(fs *token.FileSet, pkg *ast.Package) {
for _, file := range pkg.Files { for _, file := range pkg.Files {
cmap := ast.NewCommentMap(fs, file, file.Comments) cmap := ast.NewCommentMap(fs, file, file.Comments)
for _, decl := range file.Decls { for _, decl := range file.Decls {
switch n := decl.(type) { if doDecl(p, decl, cmap) {
case *ast.GenDecl: continue
// var, const, types
for _, spec := range n.Specs {
switch s := spec.(type) {
case *ast.ValueSpec:
// constants and variables.
for _, name := range s.Names {
p.decl[name.Name] = n
}
case *ast.TypeSpec:
// type definitions.
p.decl[s.Name.Name] = n
}
}
case *ast.FuncDecl:
// if function is 'main', never check
if n.Name.Name == "main" {
continue
}
// Do not be strict on non-exported functions
if !ast.IsExported(n.Name.Name) {
continue
}
// Do not be strict for field list functions
// if n.Recv != nil {
// continue
//}
// Be strict for global functions
_, ok := cmap[n]
if ok == false {
p.missingcomments[n.Name.Name] = n
}
// function declarations
// TODO(remy): do methods
if n.Recv == nil {
p.decl[n.Name.Name] = n
}
} }
} }
} }

@ -56,28 +56,7 @@ func newRange(req *http.Request, size int64) (*httpRange, error) {
return r, nil return r, nil
} }
// parseRange parses a Range header string as per RFC 2616. func (r *httpRange) parse(ra string) error {
func (r *httpRange) parseRange(s string) error {
if s == "" {
return errors.New("header not present")
}
if !strings.HasPrefix(s, b) {
return errors.New("invalid range")
}
ras := strings.Split(s[len(b):], ",")
if len(ras) == 0 {
return errors.New("invalid request")
}
// Just pick the first one and ignore the rest, we only support one range per object
if len(ras) > 1 {
return errors.New("multiple ranges specified")
}
ra := strings.TrimSpace(ras[0])
if ra == "" {
return errors.New("invalid range")
}
i := strings.Index(ra, "-") i := strings.Index(ra, "-")
if i < 0 { if i < 0 {
return errors.New("invalid range") return errors.New("invalid range")
@ -117,3 +96,28 @@ func (r *httpRange) parseRange(s string) error {
} }
return nil return nil
} }
// parseRange parses a Range header string as per RFC 2616.
func (r *httpRange) parseRange(s string) error {
if s == "" {
return errors.New("header not present")
}
if !strings.HasPrefix(s, b) {
return errors.New("invalid range")
}
ras := strings.Split(s[len(b):], ",")
if len(ras) == 0 {
return errors.New("invalid request")
}
// Just pick the first one and ignore the rest, we only support one range per object
if len(ras) > 1 {
return errors.New("multiple ranges specified")
}
ra := strings.TrimSpace(ras[0])
if ra == "" {
return errors.New("invalid range")
}
return r.parse(ra)
}

@ -137,6 +137,37 @@ func isValidPrincipal(principal string) bool {
return ok return ok
} }
func parseStatement(statement Statement) bool {
if len(statement.Sid) == 0 {
return false
}
if len(statement.Effect) == 0 {
return false
}
if !isValidEffect(statement.Effect) {
return false
}
if len(statement.Principal.AWS) == 0 {
return false
}
if !isValidPrincipal(statement.Principal.AWS) {
return false
}
if len(statement.Action) == 0 {
return false
}
if !isValidAction(statement.Action) && !isValidActionS3(statement.Action) {
return false
}
if len(statement.Resource) == 0 {
return false
}
if !isValidResource(statement.Resource) {
return false
}
return true
}
// Parsepolicy - validate request body is proper JSON and in accordance with policy standards // Parsepolicy - validate request body is proper JSON and in accordance with policy standards
func Parsepolicy(data io.Reader) (BucketPolicy, bool) { func Parsepolicy(data io.Reader) (BucketPolicy, bool) {
var policy BucketPolicy var policy BucketPolicy
@ -157,33 +188,7 @@ func Parsepolicy(data io.Reader) (BucketPolicy, bool) {
} }
for _, statement := range policy.Statement { for _, statement := range policy.Statement {
if len(statement.Sid) == 0 { if !parseStatement(statement) {
goto error
}
if len(statement.Effect) == 0 {
goto error
}
if !isValidEffect(statement.Effect) {
goto error
}
if len(statement.Principal.AWS) == 0 {
goto error
}
if !isValidPrincipal(statement.Principal.AWS) {
goto error
}
if len(statement.Action) == 0 {
goto error
}
if !isValidAction(statement.Action) && !isValidActionS3(statement.Action) {
goto error
}
if len(statement.Resource) == 0 {
goto error
}
if !isValidResource(statement.Resource) {
goto error goto error
} }
} }

@ -23,6 +23,32 @@ import (
"github.com/minio-io/minio/pkg/drivers" "github.com/minio-io/minio/pkg/drivers"
) )
func (file *fileDriver) filterDelimiterPrefix(bucket, name, fname, delimitedName string, resources drivers.BucketResourcesMetadata) (drivers.ObjectMetadata, drivers.BucketResourcesMetadata, error) {
var err error
var metadata drivers.ObjectMetadata
switch true {
case name == resources.Prefix:
// Use resources.Prefix to filter out delimited files
metadata, err = file.GetObjectMetadata(bucket, name, resources.Prefix)
if err != nil {
return drivers.ObjectMetadata{}, resources, drivers.EmbedError(bucket, "", err)
}
case delimitedName == fname:
// Use resources.Prefix to filter out delimited files
metadata, err = file.GetObjectMetadata(bucket, name, resources.Prefix)
if err != nil {
return drivers.ObjectMetadata{}, resources, drivers.EmbedError(bucket, "", err)
}
case delimitedName != "":
if delimitedName == resources.Delimiter {
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, resources.Prefix+delimitedName)
} else {
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, delimitedName)
}
}
return metadata, resources, nil
}
// TODO handle resources.Marker // TODO handle resources.Marker
func (file *fileDriver) filter(bucket, name string, f os.FileInfo, resources drivers.BucketResourcesMetadata) (drivers.ObjectMetadata, drivers.BucketResourcesMetadata, error) { func (file *fileDriver) filter(bucket, name string, f os.FileInfo, resources drivers.BucketResourcesMetadata) (drivers.ObjectMetadata, drivers.BucketResourcesMetadata, error) {
var err error var err error
@ -34,25 +60,9 @@ func (file *fileDriver) filter(bucket, name string, f os.FileInfo, resources dri
if strings.HasPrefix(name, resources.Prefix) { if strings.HasPrefix(name, resources.Prefix) {
trimmedName := strings.TrimPrefix(name, resources.Prefix) trimmedName := strings.TrimPrefix(name, resources.Prefix)
delimitedName := delimiter(trimmedName, resources.Delimiter) delimitedName := delimiter(trimmedName, resources.Delimiter)
switch true { metadata, resources, err = file.filterDelimiterPrefix(bucket, name, f.Name(), delimitedName, resources)
case name == resources.Prefix: if err != nil {
// Use resources.Prefix to filter out delimited files return drivers.ObjectMetadata{}, resources, err
metadata, err = file.GetObjectMetadata(bucket, name, resources.Prefix)
if err != nil {
return drivers.ObjectMetadata{}, resources, drivers.EmbedError(bucket, "", err)
}
case delimitedName == f.Name():
// Use resources.Prefix to filter out delimited files
metadata, err = file.GetObjectMetadata(bucket, name, resources.Prefix)
if err != nil {
return drivers.ObjectMetadata{}, resources, drivers.EmbedError(bucket, "", err)
}
case delimitedName != "":
if delimitedName == resources.Delimiter {
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, resources.Prefix+delimitedName)
} else {
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, delimitedName)
}
} }
} }
// Delimiter present and Prefix is absent // Delimiter present and Prefix is absent

@ -173,6 +173,23 @@ func appendUniq(slice []string, i string) []string {
return append(slice, i) return append(slice, i)
} }
func (memory memoryDriver) filterDelimiterPrefix(keys []string, key, delimitedName string, resources drivers.BucketResourcesMetadata) (drivers.BucketResourcesMetadata, []string) {
switch true {
case key == resources.Prefix:
keys = appendUniq(keys, key)
// DelimitedName - requires resources.Prefix as it was trimmed off earlier in the flow
case key == resources.Prefix+delimitedName:
keys = appendUniq(keys, key)
case delimitedName != "":
if delimitedName == resources.Delimiter {
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, resources.Prefix+delimitedName)
} else {
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, delimitedName)
}
}
return resources, keys
}
// ListObjects - list objects from memory // ListObjects - list objects from memory
func (memory memoryDriver) ListObjects(bucket string, resources drivers.BucketResourcesMetadata) ([]drivers.ObjectMetadata, drivers.BucketResourcesMetadata, error) { func (memory memoryDriver) ListObjects(bucket string, resources drivers.BucketResourcesMetadata) ([]drivers.ObjectMetadata, drivers.BucketResourcesMetadata, error) {
if _, ok := memory.bucketdata[bucket]; ok == false { if _, ok := memory.bucketdata[bucket]; ok == false {
@ -183,7 +200,7 @@ func (memory memoryDriver) ListObjects(bucket string, resources drivers.BucketRe
for key := range memory.objectdata { for key := range memory.objectdata {
switch true { switch true {
// Prefix absent, delimit object key based on delimiter // Prefix absent, delimit object key based on delimiter
case resources.Delimiter != "" && resources.Prefix == "": case resources.IsDelimiterSet():
delimitedName := delimiter(key, resources.Delimiter) delimitedName := delimiter(key, resources.Delimiter)
switch true { switch true {
case delimitedName == "" || delimitedName == key: case delimitedName == "" || delimitedName == key:
@ -192,31 +209,20 @@ func (memory memoryDriver) ListObjects(bucket string, resources drivers.BucketRe
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, delimitedName) resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, delimitedName)
} }
// Prefix present, delimit object key with prefix key based on delimiter // Prefix present, delimit object key with prefix key based on delimiter
case resources.Delimiter != "" && resources.Prefix != "" && strings.HasPrefix(key, resources.Prefix): case resources.IsDelimiterPrefixSet():
trimmedName := strings.TrimPrefix(key, resources.Prefix) if strings.HasPrefix(key, resources.Prefix) {
delimitedName := delimiter(trimmedName, resources.Delimiter) trimmedName := strings.TrimPrefix(key, resources.Prefix)
switch true { delimitedName := delimiter(trimmedName, resources.Delimiter)
case key == resources.Prefix: resources, keys = memory.filterDelimiterPrefix(keys, key, delimitedName, resources)
keys = appendUniq(keys, key)
// DelimitedName - requires resources.Prefix as it was trimmed off earlier in the flow
case key == resources.Prefix+delimitedName:
keys = appendUniq(keys, key)
case delimitedName != "":
if delimitedName == resources.Delimiter {
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, resources.Prefix+delimitedName)
} else {
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, delimitedName)
}
} }
// Prefix present, nothing to delimit // Prefix present, nothing to delimit
case resources.Delimiter == "" && resources.Prefix != "" && strings.HasPrefix(key, resources.Prefix): case resources.IsPrefixSet():
keys = appendUniq(keys, key) keys = appendUniq(keys, key)
// Prefix and delimiter absent // Prefix and delimiter absent
case resources.Prefix == "" && resources.Delimiter == "": case resources.IsDefault():
keys = appendUniq(keys, key) keys = appendUniq(keys, key)
} }
} }
sort.Strings(keys) sort.Strings(keys)
for _, key := range keys { for _, key := range keys {
if len(results) == resources.Maxkeys { if len(results) == resources.Maxkeys {

@ -74,7 +74,6 @@ func erasureReader(readers []io.ReadCloser, donutMetadata map[string]string, wri
} }
curChunkSize := erasure.GetEncodedBlockLen(curBlockSize, uint8(k)) curChunkSize := erasure.GetEncodedBlockLen(curBlockSize, uint8(k))
encodedBytes := make([][]byte, 16) encodedBytes := make([][]byte, 16)
for i, reader := range readers { for i, reader := range readers {
defer reader.Close() defer reader.Close()

Loading…
Cancel
Save