Merge pull request #450 from harshavardhana/pr_out_update_to_new_changes_at_minio_io_objectdriver_and_minio_io_donut
commit
81e2dafe36
@ -0,0 +1,223 @@ |
||||
package donut |
||||
|
||||
import ( |
||||
"bytes" |
||||
"io" |
||||
"io/ioutil" |
||||
"os" |
||||
"path" |
||||
"strconv" |
||||
"testing" |
||||
"time" |
||||
|
||||
. "github.com/minio-io/check" |
||||
) |
||||
|
||||
func Test(t *testing.T) { TestingT(t) } |
||||
|
||||
type MySuite struct{} |
||||
|
||||
var _ = Suite(&MySuite{}) |
||||
|
||||
// create a dummy TestNodeDiskMap
|
||||
func createTestNodeDiskMap(p string) map[string][]string { |
||||
nodes := make(map[string][]string) |
||||
nodes["localhost"] = make([]string, 16) |
||||
for i := 0; i < len(nodes["localhost"]); i++ { |
||||
diskPath := path.Join(p, strconv.Itoa(i)) |
||||
if _, err := os.Stat(diskPath); err != nil { |
||||
if os.IsNotExist(err) { |
||||
os.MkdirAll(diskPath, 0700) |
||||
} |
||||
} |
||||
nodes["localhost"][i] = diskPath |
||||
} |
||||
return nodes |
||||
} |
||||
|
||||
func (s *MySuite) TestEmptyBucket(c *C) { |
||||
root, err := ioutil.TempDir(os.TempDir(), "donut-") |
||||
c.Assert(err, IsNil) |
||||
defer os.RemoveAll(root) |
||||
donut, err := NewDonut("test", createTestNodeDiskMap(root)) |
||||
c.Assert(err, IsNil) |
||||
|
||||
// check buckets are empty
|
||||
buckets, err := donut.ListBuckets() |
||||
c.Assert(err, IsNil) |
||||
c.Assert(buckets, IsNil) |
||||
} |
||||
|
||||
func (s *MySuite) TestBucketWithoutNameFails(c *C) { |
||||
root, err := ioutil.TempDir(os.TempDir(), "donut-") |
||||
c.Assert(err, IsNil) |
||||
defer os.RemoveAll(root) |
||||
donut, err := NewDonut("test", createTestNodeDiskMap(root)) |
||||
c.Assert(err, IsNil) |
||||
// fail to create new bucket without a name
|
||||
err = donut.MakeBucket("") |
||||
c.Assert(err, Not(IsNil)) |
||||
|
||||
err = donut.MakeBucket(" ") |
||||
c.Assert(err, Not(IsNil)) |
||||
} |
||||
|
||||
func (s *MySuite) TestMakeBucketAndList(c *C) { |
||||
root, err := ioutil.TempDir(os.TempDir(), "donut-") |
||||
c.Assert(err, IsNil) |
||||
defer os.RemoveAll(root) |
||||
donut, err := NewDonut("test", createTestNodeDiskMap(root)) |
||||
c.Assert(err, IsNil) |
||||
// create bucket
|
||||
err = donut.MakeBucket("foo") |
||||
c.Assert(err, IsNil) |
||||
|
||||
// check bucket exists
|
||||
buckets, err := donut.ListBuckets() |
||||
c.Assert(err, IsNil) |
||||
c.Assert(buckets, DeepEquals, []string{"foo"}) |
||||
} |
||||
|
||||
func (s *MySuite) TestMakeBucketWithSameNameFails(c *C) { |
||||
root, err := ioutil.TempDir(os.TempDir(), "donut-") |
||||
c.Assert(err, IsNil) |
||||
defer os.RemoveAll(root) |
||||
donut, err := NewDonut("test", createTestNodeDiskMap(root)) |
||||
c.Assert(err, IsNil) |
||||
err = donut.MakeBucket("foo") |
||||
c.Assert(err, IsNil) |
||||
|
||||
err = donut.MakeBucket("foo") |
||||
c.Assert(err, Not(IsNil)) |
||||
} |
||||
|
||||
func (s *MySuite) TestCreateMultipleBucketsAndList(c *C) { |
||||
root, err := ioutil.TempDir(os.TempDir(), "donut-") |
||||
c.Assert(err, IsNil) |
||||
defer os.RemoveAll(root) |
||||
donut, err := NewDonut("test", createTestNodeDiskMap(root)) |
||||
c.Assert(err, IsNil) |
||||
// add a second bucket
|
||||
err = donut.MakeBucket("foo") |
||||
c.Assert(err, IsNil) |
||||
|
||||
err = donut.MakeBucket("bar") |
||||
c.Assert(err, IsNil) |
||||
|
||||
buckets, err := donut.ListBuckets() |
||||
c.Assert(err, IsNil) |
||||
c.Assert(buckets, DeepEquals, []string{"bar", "foo"}) |
||||
|
||||
err = donut.MakeBucket("foobar") |
||||
c.Assert(err, IsNil) |
||||
|
||||
buckets, err = donut.ListBuckets() |
||||
c.Assert(err, IsNil) |
||||
c.Assert(buckets, DeepEquals, []string{"bar", "foo", "foobar"}) |
||||
} |
||||
|
||||
func (s *MySuite) TestNewObjectFailsWithoutBucket(c *C) { |
||||
root, err := ioutil.TempDir(os.TempDir(), "donut-") |
||||
c.Assert(err, IsNil) |
||||
defer os.RemoveAll(root) |
||||
donut, err := NewDonut("test", createTestNodeDiskMap(root)) |
||||
c.Assert(err, IsNil) |
||||
err = donut.PutObject("foo", "obj", nil, nil) |
||||
c.Assert(err, Not(IsNil)) |
||||
} |
||||
|
||||
func (s *MySuite) TestNewObjectFailsWithEmptyName(c *C) { |
||||
root, err := ioutil.TempDir(os.TempDir(), "donut-") |
||||
c.Assert(err, IsNil) |
||||
defer os.RemoveAll(root) |
||||
donut, err := NewDonut("test", createTestNodeDiskMap(root)) |
||||
c.Assert(err, IsNil) |
||||
|
||||
err = donut.PutObject("foo", "", nil, nil) |
||||
c.Assert(err, Not(IsNil)) |
||||
|
||||
err = donut.PutObject("foo", " ", nil, nil) |
||||
c.Assert(err, Not(IsNil)) |
||||
} |
||||
|
||||
func (s *MySuite) TestNewObjectCanBeWritten(c *C) { |
||||
root, err := ioutil.TempDir(os.TempDir(), "donut-") |
||||
c.Assert(err, IsNil) |
||||
defer os.RemoveAll(root) |
||||
donut, err := NewDonut("test", createTestNodeDiskMap(root)) |
||||
c.Assert(err, IsNil) |
||||
|
||||
err = donut.MakeBucket("foo") |
||||
c.Assert(err, IsNil) |
||||
|
||||
metadata := make(map[string]string) |
||||
metadata["contentType"] = "application/octet-stream" |
||||
|
||||
data := "Hello World" |
||||
reader := ioutil.NopCloser(bytes.NewReader([]byte(data))) |
||||
|
||||
err = donut.PutObject("foo", "obj", reader, metadata) |
||||
c.Assert(err, IsNil) |
||||
|
||||
reader, size, err := donut.GetObject("foo", "obj") |
||||
c.Assert(err, IsNil) |
||||
c.Assert(size, Equals, int64(len(data))) |
||||
|
||||
var actualData bytes.Buffer |
||||
_, err = io.Copy(&actualData, reader) |
||||
c.Assert(err, IsNil) |
||||
c.Assert(actualData.Bytes(), DeepEquals, []byte(data)) |
||||
|
||||
actualMetadata, err := donut.GetObjectMetadata("foo", "obj") |
||||
c.Assert(err, IsNil) |
||||
c.Assert("b10a8db164e0754105b7a99be72e3fe5", Equals, actualMetadata["md5"]) |
||||
c.Assert("11", Equals, actualMetadata["size"]) |
||||
_, err = time.Parse(time.RFC3339Nano, actualMetadata["created"]) |
||||
c.Assert(err, IsNil) |
||||
} |
||||
|
||||
func (s *MySuite) TestMultipleNewObjects(c *C) { |
||||
root, err := ioutil.TempDir(os.TempDir(), "donut-") |
||||
c.Assert(err, IsNil) |
||||
defer os.RemoveAll(root) |
||||
donut, err := NewDonut("test", createTestNodeDiskMap(root)) |
||||
c.Assert(err, IsNil) |
||||
|
||||
c.Assert(donut.MakeBucket("foo"), IsNil) |
||||
|
||||
one := ioutil.NopCloser(bytes.NewReader([]byte("one"))) |
||||
err = donut.PutObject("foo", "obj1", one, nil) |
||||
c.Assert(err, IsNil) |
||||
|
||||
two := ioutil.NopCloser(bytes.NewReader([]byte("two"))) |
||||
err = donut.PutObject("foo", "obj2", two, nil) |
||||
c.Assert(err, IsNil) |
||||
|
||||
obj1, size, err := donut.GetObject("foo", "obj1") |
||||
c.Assert(err, IsNil) |
||||
c.Assert(size, Equals, int64(len([]byte("one")))) |
||||
|
||||
var readerBuffer1 bytes.Buffer |
||||
_, err = io.CopyN(&readerBuffer1, obj1, size) |
||||
c.Assert(err, IsNil) |
||||
c.Assert(readerBuffer1.Bytes(), DeepEquals, []byte("one")) |
||||
|
||||
obj2, size, err := donut.GetObject("foo", "obj2") |
||||
c.Assert(err, IsNil) |
||||
c.Assert(size, Equals, int64(len([]byte("two")))) |
||||
|
||||
var readerBuffer2 bytes.Buffer |
||||
_, err = io.CopyN(&readerBuffer2, obj2, size) |
||||
c.Assert(err, IsNil) |
||||
c.Assert(readerBuffer2.Bytes(), DeepEquals, []byte("two")) |
||||
// test list objects
|
||||
listObjects, _, isTruncated, err := donut.ListObjects("foo", "o", "", "", 1) |
||||
c.Assert(err, IsNil) |
||||
c.Assert(isTruncated, Equals, true) |
||||
c.Assert(listObjects, DeepEquals, []string{"obj1"}) |
||||
|
||||
listObjects, _, isTruncated, err = donut.ListObjects("foo", "o", "", "", 10) |
||||
c.Assert(err, IsNil) |
||||
c.Assert(isTruncated, Equals, false) |
||||
c.Assert(listObjects, DeepEquals, []string{"obj1", "obj2"}) |
||||
} |
@ -0,0 +1,67 @@ |
||||
package donut |
||||
|
||||
import ( |
||||
"encoding/json" |
||||
"errors" |
||||
"path" |
||||
) |
||||
|
||||
func (d donut) Heal() error { |
||||
return errors.New("Not Implemented") |
||||
} |
||||
|
||||
func (d donut) Info() (nodeDiskMap map[string][]string, err error) { |
||||
nodeDiskMap = make(map[string][]string) |
||||
for nodeName, node := range d.nodes { |
||||
disks, err := node.ListDisks() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
diskList := make([]string, len(disks)) |
||||
for diskName, disk := range disks { |
||||
diskList[disk.GetOrder()] = diskName |
||||
} |
||||
nodeDiskMap[nodeName] = diskList |
||||
} |
||||
return nodeDiskMap, nil |
||||
} |
||||
|
||||
func (d donut) AttachNode(node Node) error { |
||||
if node == nil { |
||||
return errors.New("invalid argument") |
||||
} |
||||
d.nodes[node.GetNodeName()] = node |
||||
return nil |
||||
} |
||||
func (d donut) DetachNode(node Node) error { |
||||
delete(d.nodes, node.GetNodeName()) |
||||
return nil |
||||
} |
||||
|
||||
func (d donut) SaveConfig() error { |
||||
nodeDiskMap := make(map[string][]string) |
||||
for hostname, node := range d.nodes { |
||||
disks, err := node.ListDisks() |
||||
if err != nil { |
||||
return err |
||||
} |
||||
for _, disk := range disks { |
||||
donutConfigPath := path.Join(d.name, donutConfig) |
||||
donutConfigWriter, err := disk.MakeFile(donutConfigPath) |
||||
defer donutConfigWriter.Close() |
||||
if err != nil { |
||||
return err |
||||
} |
||||
nodeDiskMap[hostname][disk.GetOrder()] = disk.GetPath() |
||||
jenc := json.NewEncoder(donutConfigWriter) |
||||
if err := jenc.Encode(nodeDiskMap); err != nil { |
||||
return err |
||||
} |
||||
} |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
func (d donut) LoadConfig() error { |
||||
return errors.New("Not Implemented") |
||||
} |
@ -0,0 +1,162 @@ |
||||
package donut |
||||
|
||||
import ( |
||||
"errors" |
||||
"io" |
||||
"sort" |
||||
"strconv" |
||||
"strings" |
||||
|
||||
"github.com/minio-io/iodine" |
||||
) |
||||
|
||||
func (d donut) MakeBucket(bucket string) error { |
||||
if bucket == "" || strings.TrimSpace(bucket) == "" { |
||||
return errors.New("invalid argument") |
||||
} |
||||
return d.makeBucket(bucket) |
||||
} |
||||
|
||||
func (d donut) GetBucketMetadata(bucket string) (map[string]string, error) { |
||||
return nil, errors.New("Not implemented") |
||||
} |
||||
|
||||
func (d donut) SetBucketMetadata(bucket string, metadata map[string]string) error { |
||||
return errors.New("Not implemented") |
||||
} |
||||
|
||||
func (d donut) ListBuckets() (results []string, err error) { |
||||
err = d.getAllBuckets() |
||||
if err != nil { |
||||
return nil, iodine.New(err, nil) |
||||
} |
||||
for name := range d.buckets { |
||||
results = append(results, name) |
||||
} |
||||
sort.Strings(results) |
||||
return results, nil |
||||
} |
||||
|
||||
func (d donut) ListObjects(bucket, prefix, marker, delimiter string, maxkeys int) ([]string, []string, bool, error) { |
||||
// TODO: Marker is not yet handled please handle it
|
||||
errParams := map[string]string{ |
||||
"bucket": bucket, |
||||
"prefix": prefix, |
||||
"marker": marker, |
||||
"delimiter": delimiter, |
||||
"maxkeys": strconv.Itoa(maxkeys), |
||||
} |
||||
err := d.getAllBuckets() |
||||
if err != nil { |
||||
return nil, nil, false, iodine.New(err, errParams) |
||||
} |
||||
if _, ok := d.buckets[bucket]; !ok { |
||||
return nil, nil, false, iodine.New(errors.New("bucket does not exist"), errParams) |
||||
} |
||||
objectList, err := d.buckets[bucket].ListObjects() |
||||
if err != nil { |
||||
return nil, nil, false, iodine.New(err, errParams) |
||||
} |
||||
var donutObjects []string |
||||
for objectName := range objectList { |
||||
donutObjects = append(donutObjects, objectName) |
||||
} |
||||
if maxkeys <= 0 { |
||||
maxkeys = 1000 |
||||
} |
||||
if strings.TrimSpace(prefix) != "" { |
||||
donutObjects = filterPrefix(donutObjects, prefix) |
||||
donutObjects = removePrefix(donutObjects, prefix) |
||||
} |
||||
|
||||
var actualObjects []string |
||||
var commonPrefixes []string |
||||
var isTruncated bool |
||||
if strings.TrimSpace(delimiter) != "" { |
||||
actualObjects = filterDelimited(donutObjects, delimiter) |
||||
commonPrefixes = filterNotDelimited(donutObjects, delimiter) |
||||
commonPrefixes = extractDir(commonPrefixes, delimiter) |
||||
commonPrefixes = uniqueObjects(commonPrefixes) |
||||
} else { |
||||
actualObjects = donutObjects |
||||
} |
||||
var results []string |
||||
for _, objectName := range actualObjects { |
||||
if len(results) >= maxkeys { |
||||
isTruncated = true |
||||
break |
||||
} |
||||
results = append(results, prefix+objectName) |
||||
} |
||||
sort.Strings(results) |
||||
return results, commonPrefixes, isTruncated, nil |
||||
} |
||||
|
||||
func (d donut) PutObject(bucket, object string, reader io.ReadCloser, metadata map[string]string) error { |
||||
errParams := map[string]string{ |
||||
"bucket": bucket, |
||||
"object": object, |
||||
} |
||||
if bucket == "" || strings.TrimSpace(bucket) == "" { |
||||
return iodine.New(errors.New("invalid argument"), errParams) |
||||
} |
||||
if object == "" || strings.TrimSpace(object) == "" { |
||||
return iodine.New(errors.New("invalid argument"), errParams) |
||||
} |
||||
err := d.getAllBuckets() |
||||
if err != nil { |
||||
return iodine.New(err, errParams) |
||||
} |
||||
if _, ok := d.buckets[bucket]; !ok { |
||||
return iodine.New(errors.New("bucket does not exist"), nil) |
||||
} |
||||
err = d.buckets[bucket].PutObject(object, reader, metadata) |
||||
if err != nil { |
||||
return iodine.New(err, errParams) |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
func (d donut) GetObject(bucket, object string) (reader io.ReadCloser, size int64, err error) { |
||||
errParams := map[string]string{ |
||||
"bucket": bucket, |
||||
"object": object, |
||||
} |
||||
if bucket == "" || strings.TrimSpace(bucket) == "" { |
||||
return nil, 0, iodine.New(errors.New("invalid argument"), errParams) |
||||
} |
||||
if object == "" || strings.TrimSpace(object) == "" { |
||||
return nil, 0, iodine.New(errors.New("invalid argument"), errParams) |
||||
} |
||||
err = d.getAllBuckets() |
||||
if err != nil { |
||||
return nil, 0, iodine.New(err, nil) |
||||
} |
||||
if _, ok := d.buckets[bucket]; !ok { |
||||
return nil, 0, iodine.New(errors.New("bucket does not exist"), errParams) |
||||
} |
||||
return d.buckets[bucket].GetObject(object) |
||||
} |
||||
|
||||
func (d donut) GetObjectMetadata(bucket, object string) (map[string]string, error) { |
||||
errParams := map[string]string{ |
||||
"bucket": bucket, |
||||
"object": object, |
||||
} |
||||
err := d.getAllBuckets() |
||||
if err != nil { |
||||
return nil, iodine.New(err, errParams) |
||||
} |
||||
if _, ok := d.buckets[bucket]; !ok { |
||||
return nil, iodine.New(errors.New("bucket does not exist"), errParams) |
||||
} |
||||
objectList, err := d.buckets[bucket].ListObjects() |
||||
if err != nil { |
||||
return nil, iodine.New(err, errParams) |
||||
} |
||||
objectStruct, ok := objectList[object] |
||||
if !ok { |
||||
return nil, iodine.New(errors.New("object does not exist"), errParams) |
||||
} |
||||
return objectStruct.GetObjectMetadata() |
||||
} |
@ -0,0 +1,129 @@ |
||||
package donut |
||||
|
||||
import ( |
||||
"errors" |
||||
"fmt" |
||||
"path" |
||||
"sort" |
||||
"strings" |
||||
) |
||||
|
||||
func filterPrefix(objects []string, prefix string) []string { |
||||
var results []string |
||||
for _, object := range objects { |
||||
if strings.HasPrefix(object, prefix) { |
||||
results = append(results, object) |
||||
} |
||||
} |
||||
return results |
||||
} |
||||
|
||||
func removePrefix(objects []string, prefix string) []string { |
||||
var results []string |
||||
for _, object := range objects { |
||||
results = append(results, strings.TrimPrefix(object, prefix)) |
||||
} |
||||
return results |
||||
} |
||||
|
||||
func filterDelimited(objects []string, delim string) []string { |
||||
var results []string |
||||
for _, object := range objects { |
||||
if !strings.Contains(object, delim) { |
||||
results = append(results, object) |
||||
} |
||||
} |
||||
return results |
||||
} |
||||
|
||||
func filterNotDelimited(objects []string, delim string) []string { |
||||
var results []string |
||||
for _, object := range objects { |
||||
if strings.Contains(object, delim) { |
||||
results = append(results, object) |
||||
} |
||||
} |
||||
return results |
||||
} |
||||
|
||||
func extractDir(objects []string, delim string) []string { |
||||
var results []string |
||||
for _, object := range objects { |
||||
parts := strings.Split(object, delim) |
||||
results = append(results, parts[0]+"/") |
||||
} |
||||
return results |
||||
} |
||||
|
||||
func uniqueObjects(objects []string) []string { |
||||
objectMap := make(map[string]string) |
||||
for _, v := range objects { |
||||
objectMap[v] = v |
||||
} |
||||
var results []string |
||||
for k := range objectMap { |
||||
results = append(results, k) |
||||
} |
||||
sort.Strings(results) |
||||
return results |
||||
} |
||||
|
||||
func (d donut) makeBucket(bucketName string) error { |
||||
err := d.getAllBuckets() |
||||
if err != nil { |
||||
return err |
||||
} |
||||
if _, ok := d.buckets[bucketName]; ok { |
||||
return errors.New("bucket exists") |
||||
} |
||||
bucket, err := NewBucket(bucketName, d.name, d.nodes) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
nodeNumber := 0 |
||||
d.buckets[bucketName] = bucket |
||||
for _, node := range d.nodes { |
||||
disks, err := node.ListDisks() |
||||
if err != nil { |
||||
return err |
||||
} |
||||
for _, disk := range disks { |
||||
bucketSlice := fmt.Sprintf("%s$%d$%d", bucketName, nodeNumber, disk.GetOrder()) |
||||
err := disk.MakeDir(path.Join(d.name, bucketSlice)) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
} |
||||
nodeNumber = nodeNumber + 1 |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
func (d donut) getAllBuckets() error { |
||||
for _, node := range d.nodes { |
||||
disks, err := node.ListDisks() |
||||
if err != nil { |
||||
return err |
||||
} |
||||
for _, disk := range disks { |
||||
dirs, err := disk.ListDir(d.name) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
for _, dir := range dirs { |
||||
splitDir := strings.Split(dir.Name(), "$") |
||||
if len(splitDir) < 3 { |
||||
return errors.New("corrupted backend") |
||||
} |
||||
bucketName := splitDir[0] |
||||
// we dont need this NewBucket once we cache these
|
||||
bucket, err := NewBucket(bucketName, d.name, d.nodes) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
d.buckets[bucketName] = bucket |
||||
} |
||||
} |
||||
} |
||||
return nil |
||||
} |
@ -0,0 +1,45 @@ |
||||
{ |
||||
"ImportPath": "github.com/minio-io/objectdriver", |
||||
"GoVersion": "go1.4", |
||||
"Packages": [ |
||||
"./..." |
||||
], |
||||
"Deps": [ |
||||
{ |
||||
"ImportPath": "github.com/minio-io/check", |
||||
"Rev": "bc4e66da8cd7ff58a4b9b84301f906352b8f2c94" |
||||
}, |
||||
{ |
||||
"ImportPath": "github.com/minio-io/donut", |
||||
"Rev": "1df31a9834eb6acef9ff0fb7ee3e597776faf966" |
||||
}, |
||||
{ |
||||
"ImportPath": "github.com/minio-io/erasure", |
||||
"Rev": "8a72b14991a6835b4d30403e7cb201f373b7cb3a" |
||||
}, |
||||
{ |
||||
"ImportPath": "github.com/minio-io/iodine", |
||||
"Rev": "55cc4d4256c68fbd6f0775f1a25e37e6a2f6457e" |
||||
}, |
||||
{ |
||||
"ImportPath": "github.com/minio-io/minio/pkg/utils/log", |
||||
"Rev": "81e4a3332cd3e1b44eb21fdbb7171ba715625ab7" |
||||
}, |
||||
{ |
||||
"ImportPath": "github.com/minio-io/minio/pkg/utils/split", |
||||
"Rev": "81e4a3332cd3e1b44eb21fdbb7171ba715625ab7" |
||||
}, |
||||
{ |
||||
"ImportPath": "github.com/stretchr/objx", |
||||
"Rev": "cbeaeb16a013161a98496fad62933b1d21786672" |
||||
}, |
||||
{ |
||||
"ImportPath": "github.com/stretchr/testify/assert", |
||||
"Rev": "e4ec8152c15fc46bd5056ce65997a07c7d415325" |
||||
}, |
||||
{ |
||||
"ImportPath": "github.com/stretchr/testify/mock", |
||||
"Rev": "e4ec8152c15fc46bd5056ce65997a07c7d415325" |
||||
} |
||||
] |
||||
} |
@ -0,0 +1,5 @@ |
||||
This directory tree is generated automatically by godep. |
||||
|
||||
Please do not edit. |
||||
|
||||
See https://github.com/tools/godep for more information. |
@ -0,0 +1,66 @@ |
||||
MINIOPATH=$(GOPATH)/src/github.com/minio-io/objectdriver
|
||||
|
||||
all: getdeps install |
||||
|
||||
checkdeps: |
||||
@echo "Checking deps:"
|
||||
@(env bash $(PWD)/buildscripts/checkdeps.sh)
|
||||
|
||||
checkgopath: |
||||
@echo "Checking if project is at ${MINIOPATH}"
|
||||
@if [ ! -d ${MINIOPATH} ]; then echo "Project not found in $GOPATH, please follow instructions provided at https://github.com/Minio-io/minio/blob/master/CONTRIBUTING.md#setup-your-minio-github-repository" && exit 1; fi
|
||||
|
||||
getdeps: checkdeps checkgopath |
||||
@go get github.com/minio-io/godep && echo "Installed godep:"
|
||||
@go get github.com/golang/lint/golint && echo "Installed golint:"
|
||||
@go get golang.org/x/tools/cmd/vet && echo "Installed vet:"
|
||||
@go get github.com/fzipp/gocyclo && echo "Installed gocyclo:"
|
||||
|
||||
verifiers: getdeps vet fmt lint cyclo |
||||
|
||||
vet: |
||||
@echo "Running $@:"
|
||||
@go vet ./...
|
||||
fmt: |
||||
@echo "Running $@:"
|
||||
@test -z "$$(gofmt -s -l . | grep -v Godeps/_workspace/src/ | tee /dev/stderr)" || \
|
||||
echo "+ please format Go code with 'gofmt -s'"
|
||||
lint: |
||||
@echo "Running $@:"
|
||||
@test -z "$$(golint ./... | grep -v Godeps/_workspace/src/ | tee /dev/stderr)"
|
||||
|
||||
cyclo: |
||||
@echo "Running $@:"
|
||||
@test -z "$$(gocyclo -over 15 . | grep -v Godeps/_workspace/src/ | tee /dev/stderr)"
|
||||
|
||||
build-all: verifiers |
||||
@echo "Building Libraries:"
|
||||
@godep go generate github.com/minio-io/erasure
|
||||
@godep go generate ./...
|
||||
@godep go build -a ./... # have no stale packages
|
||||
|
||||
test-all: build-all |
||||
@echo "Running Test Suites:"
|
||||
@godep go test -race ./...
|
||||
|
||||
test: test-all |
||||
|
||||
minio: build-all test-all |
||||
|
||||
install: minio |
||||
|
||||
save: restore |
||||
@godep save ./...
|
||||
|
||||
restore: |
||||
@godep restore
|
||||
|
||||
env: |
||||
@godep go env
|
||||
|
||||
docs-deploy: |
||||
@mkdocs gh-deploy --clean
|
||||
|
||||
clean: |
||||
@echo "Cleaning up all the generated files:"
|
||||
@rm -fv cover.out
|
201
Godeps/_workspace/src/github.com/minio-io/objectdriver/buildscripts/checkdeps.sh
generated
vendored
201
Godeps/_workspace/src/github.com/minio-io/objectdriver/buildscripts/checkdeps.sh
generated
vendored
@ -0,0 +1,201 @@ |
||||
#!/usr/bin/env bash |
||||
# |
||||
# Minimalist Object Storage, (C) 2015 Minio, Inc. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
# |
||||
|
||||
_init() { |
||||
## Minimum required versions for build dependencies |
||||
GCC_VERSION="4.0" |
||||
CLANG_VERSION="3.5" |
||||
YASM_VERSION="1.2.0" |
||||
GIT_VERSION="1.0" |
||||
GO_VERSION="1.4" |
||||
OSX_VERSION="10.8" |
||||
UNAME=$(uname -sm) |
||||
|
||||
## Check all dependencies are present |
||||
MISSING="" |
||||
} |
||||
|
||||
### |
||||
# |
||||
# Takes two arguments |
||||
# arg1: version number in `x.x.x` format |
||||
# arg2: version number in `x.x.x` format |
||||
# |
||||
# example: check_version "$version1" "$version2" |
||||
# |
||||
# returns: |
||||
# 0 - Installed version is equal to required |
||||
# 1 - Installed version is greater than required |
||||
# 2 - Installed version is lesser than required |
||||
# 3 - If args have length zero |
||||
# |
||||
#### |
||||
check_version () { |
||||
## validate args |
||||
[[ -z "$1" ]] && return 3 |
||||
[[ -z "$2" ]] && return 3 |
||||
|
||||
if [[ $1 == $2 ]]; then |
||||
return 0 |
||||
fi |
||||
|
||||
local IFS=. |
||||
local i ver1=($1) ver2=($2) |
||||
# fill empty fields in ver1 with zeros |
||||
for ((i=${#ver1[@]}; i<${#ver2[@]}; i++)); do |
||||
ver1[i]=0 |
||||
done |
||||
for ((i=0; i<${#ver1[@]}; i++)); do |
||||
if [[ -z ${ver2[i]} ]]; then |
||||
# fill empty fields in ver2 with zeros |
||||
ver2[i]=0 |
||||
fi |
||||
if ((10#${ver1[i]} > 10#${ver2[i]})); then |
||||
|
||||
return 1 |
||||
fi |
||||
if ((10#${ver1[i]} < 10#${ver2[i]})); then |
||||
## Installed version is lesser than required - Bad condition |
||||
return 2 |
||||
fi |
||||
done |
||||
return 0 |
||||
} |
||||
|
||||
check_golang_env() { |
||||
echo ${GOROOT:?} 2>&1 >/dev/null |
||||
if [ $? -eq 1 ]; then |
||||
echo "ERROR" |
||||
echo "GOROOT environment variable missing, please refer to Go installation document" |
||||
echo "https://github.com/Minio-io/minio/blob/master/BUILDDEPS.md#install-go-13" |
||||
exit 1 |
||||
fi |
||||
|
||||
echo ${GOPATH:?} 2>&1 >/dev/null |
||||
if [ $? -eq 1 ]; then |
||||
echo "ERROR" |
||||
echo "GOPATH environment variable missing, please refer to Go installation document" |
||||
echo "https://github.com/Minio-io/minio/blob/master/BUILDDEPS.md#install-go-13" |
||||
exit 1 |
||||
fi |
||||
} |
||||
|
||||
is_supported_os() { |
||||
case ${UNAME%% *} in |
||||
"Linux") |
||||
os="linux" |
||||
;; |
||||
"Darwin") |
||||
osx_host_version=$(env sw_vers -productVersion) |
||||
check_version "${osx_host_version}" "${OSX_VERSION}" |
||||
[[ $? -ge 2 ]] && die "Minimum OSX version supported is ${OSX_VERSION}" |
||||
;; |
||||
"*") |
||||
echo "Exiting.. unsupported operating system found" |
||||
exit 1; |
||||
esac |
||||
} |
||||
|
||||
is_supported_arch() { |
||||
local supported |
||||
case ${UNAME##* } in |
||||
"x86_64") |
||||
supported=1 |
||||
;; |
||||
*) |
||||
supported=0 |
||||
;; |
||||
esac |
||||
if [ $supported -eq 0 ]; then |
||||
echo "Invalid arch: ${UNAME} not supported, please use x86_64/amd64" |
||||
exit 1; |
||||
fi |
||||
} |
||||
|
||||
check_deps() { |
||||
check_version "$(env go version 2>/dev/null | sed 's/^.* go\([0-9.]*\).*$/\1/')" "${GO_VERSION}" |
||||
if [ $? -ge 2 ]; then |
||||
MISSING="${MISSING} golang(1.4)" |
||||
fi |
||||
|
||||
check_version "$(env git --version 2>/dev/null | sed -e 's/^.* \([0-9.\].*\).*$/\1/' -e 's/^\([0-9.\]*\).*/\1/g')" "${GIT_VERSION}" |
||||
if [ $? -ge 2 ]; then |
||||
MISSING="${MISSING} git" |
||||
fi |
||||
|
||||
case ${UNAME%% *} in |
||||
"Linux") |
||||
check_version "$(env gcc --version 2>/dev/null | sed 's/^.* \([0-9.]*\).*$/\1/' | head -1)" "${GCC_VERSION}" |
||||
if [ $? -ge 2 ]; then |
||||
MISSING="${MISSING} build-essential" |
||||
fi |
||||
;; |
||||
"Darwin") |
||||
check_version "$(env gcc --version 2>/dev/null | sed 's/^.* \([0-9.]*\).*$/\1/' | head -1)" "${CLANG_VERSION}" |
||||
if [ $? -ge 2 ]; then |
||||
MISSING="${MISSING} xcode-cli" |
||||
fi |
||||
;; |
||||
"*") |
||||
;; |
||||
esac |
||||
|
||||
check_version "$(env yasm --version 2>/dev/null | sed 's/^.* \([0-9.]*\).*$/\1/' | head -1)" "${YASM_VERSION}" |
||||
if [ $? -ge 2 ]; then |
||||
MISSING="${MISSING} yasm(1.2.0)" |
||||
fi |
||||
|
||||
env mkdocs help >/dev/null 2>&1 |
||||
if [ $? -ne 0 ]; then |
||||
MISSING="${MISSING} mkdocs" |
||||
fi |
||||
} |
||||
|
||||
main() { |
||||
echo -n "Check for supported arch.. " |
||||
is_supported_arch |
||||
|
||||
echo -n "Check for supported os.. " |
||||
is_supported_os |
||||
|
||||
echo -n "Checking if proper environment variables are set.. " |
||||
check_golang_env |
||||
|
||||
echo "Done" |
||||
echo "Using GOPATH=${GOPATH} and GOROOT=${GOROOT}" |
||||
|
||||
echo -n "Checking dependencies for Minio.. " |
||||
check_deps |
||||
|
||||
## If dependencies are missing, warn the user and abort |
||||
if [ "x${MISSING}" != "x" ]; then |
||||
echo "ERROR" |
||||
echo |
||||
echo "The following build tools are missing:" |
||||
echo |
||||
echo "** ${MISSING} **" |
||||
echo |
||||
echo "Please install them " |
||||
echo "${MISSING}" |
||||
echo |
||||
echo "Follow https://github.com/Minio-io/minio/blob/master/BUILDDEPS.md for further instructions" |
||||
exit 1 |
||||
fi |
||||
echo "Done" |
||||
} |
||||
|
||||
_init && main "$@" |
247
Godeps/_workspace/src/github.com/minio-io/objectdriver/buildscripts/verifier.go
generated
vendored
247
Godeps/_workspace/src/github.com/minio-io/objectdriver/buildscripts/verifier.go
generated
vendored
@ -0,0 +1,247 @@ |
||||
package main |
||||
|
||||
import ( |
||||
"flag" |
||||
"fmt" |
||||
"go/ast" |
||||
"go/parser" |
||||
"go/token" |
||||
"os" |
||||
"path/filepath" |
||||
"sort" |
||||
"strings" |
||||
) |
||||
|
||||
var exitCode int |
||||
var dirs []string |
||||
|
||||
func appendUniq(slice []string, i string) []string { |
||||
for _, ele := range slice { |
||||
if ele == i { |
||||
return slice |
||||
} |
||||
} |
||||
return append(slice, i) |
||||
} |
||||
|
||||
// error formats the error to standard error, adding program
|
||||
// identification and a newline
|
||||
func errorf(format string, args ...interface{}) { |
||||
fmt.Fprintf(os.Stderr, "verifier: "+format+"\n", args...) |
||||
exitCode = 2 |
||||
} |
||||
|
||||
type goPackage struct { |
||||
p *ast.Package |
||||
fs *token.FileSet |
||||
decl map[string]ast.Node |
||||
missingcomments map[string]ast.Node |
||||
used map[string]bool |
||||
} |
||||
|
||||
type usedWalker goPackage |
||||
|
||||
// Walks through the AST marking used identifiers.
|
||||
func (p *usedWalker) Visit(node ast.Node) ast.Visitor { |
||||
// just be stupid and mark all *ast.Ident
|
||||
switch n := node.(type) { |
||||
case *ast.Ident: |
||||
p.used[n.Name] = true |
||||
} |
||||
return p |
||||
} |
||||
|
||||
type report struct { |
||||
pos token.Pos |
||||
name string |
||||
} |
||||
type reports []report |
||||
|
||||
// Len
|
||||
func (l reports) Len() int { return len(l) } |
||||
|
||||
// Less
|
||||
func (l reports) Less(i, j int) bool { return l[i].pos < l[j].pos } |
||||
|
||||
// Swap
|
||||
func (l reports) Swap(i, j int) { l[i], l[j] = l[j], l[i] } |
||||
|
||||
// Visits files for used nodes.
|
||||
func (p *goPackage) Visit(node ast.Node) ast.Visitor { |
||||
u := usedWalker(*p) // hopefully p fields are references.
|
||||
switch n := node.(type) { |
||||
// don't walk whole file, but only:
|
||||
case *ast.ValueSpec: |
||||
// - variable initializers
|
||||
for _, value := range n.Values { |
||||
ast.Walk(&u, value) |
||||
} |
||||
// variable types.
|
||||
if n.Type != nil { |
||||
ast.Walk(&u, n.Type) |
||||
} |
||||
case *ast.BlockStmt: |
||||
// - function bodies
|
||||
for _, stmt := range n.List { |
||||
ast.Walk(&u, stmt) |
||||
} |
||||
case *ast.FuncDecl: |
||||
// - function signatures
|
||||
ast.Walk(&u, n.Type) |
||||
case *ast.TypeSpec: |
||||
// - type declarations
|
||||
ast.Walk(&u, n.Type) |
||||
} |
||||
return p |
||||
} |
||||
|
||||
func getAllMinioPkgs(path string, fl os.FileInfo, err error) error { |
||||
if err != nil { |
||||
return err |
||||
} |
||||
if fl.IsDir() { |
||||
// Skip godeps
|
||||
if strings.Contains(path, "Godeps") { |
||||
return nil |
||||
} |
||||
dirs = appendUniq(dirs, path) |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
func doDir(name string) { |
||||
notests := func(info os.FileInfo) bool { |
||||
if !info.IsDir() && strings.HasSuffix(info.Name(), ".go") && |
||||
!strings.HasSuffix(info.Name(), "_test.go") { |
||||
return true |
||||
} |
||||
return false |
||||
} |
||||
fs := token.NewFileSet() |
||||
pkgs, err := parser.ParseDir(fs, name, notests, parser.ParseComments|parser.Mode(0)) |
||||
if err != nil { |
||||
errorf("%s", err) |
||||
return |
||||
} |
||||
for _, pkg := range pkgs { |
||||
doPackage(fs, pkg) |
||||
} |
||||
} |
||||
|
||||
func doDecl(p *goPackage, decl interface{}, cmap ast.CommentMap) bool { |
||||
switch n := decl.(type) { |
||||
case *ast.GenDecl: |
||||
// var, const, types
|
||||
for _, spec := range n.Specs { |
||||
switch s := spec.(type) { |
||||
case *ast.ValueSpec: |
||||
// constants and variables.
|
||||
for _, name := range s.Names { |
||||
p.decl[name.Name] = n |
||||
} |
||||
case *ast.TypeSpec: |
||||
// type definitions.
|
||||
p.decl[s.Name.Name] = n |
||||
} |
||||
} |
||||
case *ast.FuncDecl: |
||||
// if function is 'main', never check
|
||||
if n.Name.Name == "main" { |
||||
return true |
||||
} |
||||
// Do not be strict on non-exported functions
|
||||
if !ast.IsExported(n.Name.Name) { |
||||
return true |
||||
} |
||||
// Do not be strict for field list functions
|
||||
// if n.Recv != nil {
|
||||
// continue
|
||||
//}
|
||||
// Be strict for global functions
|
||||
_, ok := cmap[n] |
||||
if ok == false { |
||||
p.missingcomments[n.Name.Name] = n |
||||
} |
||||
|
||||
// function declarations
|
||||
// TODO(remy): do methods
|
||||
if n.Recv == nil { |
||||
p.decl[n.Name.Name] = n |
||||
} |
||||
} |
||||
return false |
||||
} |
||||
|
||||
func doPackage(fs *token.FileSet, pkg *ast.Package) { |
||||
p := &goPackage{ |
||||
p: pkg, |
||||
fs: fs, |
||||
decl: make(map[string]ast.Node), |
||||
missingcomments: make(map[string]ast.Node), |
||||
used: make(map[string]bool), |
||||
} |
||||
for _, file := range pkg.Files { |
||||
cmap := ast.NewCommentMap(fs, file, file.Comments) |
||||
for _, decl := range file.Decls { |
||||
if doDecl(p, decl, cmap) { |
||||
continue |
||||
} |
||||
} |
||||
} |
||||
// init() is always used
|
||||
p.used["init"] = true |
||||
if pkg.Name != "main" { |
||||
// exported names are marked used for non-main packages.
|
||||
for name := range p.decl { |
||||
if ast.IsExported(name) { |
||||
p.used[name] = true |
||||
} |
||||
} |
||||
} else { |
||||
// in main programs, main() is called.
|
||||
p.used["main"] = true |
||||
} |
||||
for _, file := range pkg.Files { |
||||
// walk file looking for used nodes.
|
||||
ast.Walk(p, file) |
||||
} |
||||
|
||||
// reports.
|
||||
reports := reports(nil) |
||||
for name, node := range p.decl { |
||||
if !p.used[name] { |
||||
reports = append(reports, report{node.Pos(), name}) |
||||
} |
||||
} |
||||
sort.Sort(reports) |
||||
for _, report := range reports { |
||||
errorf("%s: %s is unused", fs.Position(report.pos), report.name) |
||||
} |
||||
|
||||
for name, node := range p.missingcomments { |
||||
errorf("%s: comment is missing for 'func %s'", fs.Position(node.Pos()), name) |
||||
} |
||||
} |
||||
|
||||
func main() { |
||||
flag.Parse() |
||||
if flag.NArg() == 0 { |
||||
doDir(".") |
||||
} else { |
||||
for _, name := range flag.Args() { |
||||
// Is it a directory?
|
||||
if fi, err := os.Stat(name); err == nil && fi.IsDir() { |
||||
err := filepath.Walk(name, getAllMinioPkgs) |
||||
if err != nil { |
||||
errorf(err.Error()) |
||||
} |
||||
for _, dir := range dirs { |
||||
doDir(dir) |
||||
} |
||||
} else { |
||||
errorf("not a directory: %s", name) |
||||
} |
||||
} |
||||
} |
||||
os.Exit(exitCode) |
||||
} |
@ -1,68 +0,0 @@ |
||||
package donut |
||||
|
||||
import ( |
||||
"bufio" |
||||
"bytes" |
||||
"strings" |
||||
|
||||
"github.com/minio-io/objectdriver" |
||||
) |
||||
|
||||
func delimiter(object, delimiter string) string { |
||||
readBuffer := bytes.NewBufferString(object) |
||||
reader := bufio.NewReader(readBuffer) |
||||
stringReader := strings.NewReader(delimiter) |
||||
delimited, _ := stringReader.ReadByte() |
||||
delimitedStr, _ := reader.ReadString(delimited) |
||||
return delimitedStr |
||||
} |
||||
|
||||
func appendUniq(slice []string, i string) []string { |
||||
for _, ele := range slice { |
||||
if ele == i { |
||||
return slice |
||||
} |
||||
} |
||||
return append(slice, i) |
||||
} |
||||
|
||||
func (d donutDriver) filter(objects []string, resources drivers.BucketResourcesMetadata) ([]string, []string) { |
||||
var actualObjects []string |
||||
var commonPrefixes []string |
||||
for _, name := range objects { |
||||
switch true { |
||||
// Both delimiter and Prefix is present
|
||||
case resources.IsDelimiterPrefixSet(): |
||||
if strings.HasPrefix(name, resources.Prefix) { |
||||
trimmedName := strings.TrimPrefix(name, resources.Prefix) |
||||
delimitedName := delimiter(trimmedName, resources.Delimiter) |
||||
if delimitedName != "" { |
||||
if delimitedName == resources.Delimiter { |
||||
commonPrefixes = appendUniq(commonPrefixes, resources.Prefix+delimitedName) |
||||
} else { |
||||
commonPrefixes = appendUniq(commonPrefixes, delimitedName) |
||||
} |
||||
if trimmedName == delimitedName { |
||||
actualObjects = appendUniq(actualObjects, name) |
||||
} |
||||
} |
||||
} |
||||
// Delimiter present and Prefix is absent
|
||||
case resources.IsDelimiterSet(): |
||||
delimitedName := delimiter(name, resources.Delimiter) |
||||
switch true { |
||||
case delimitedName == name: |
||||
actualObjects = appendUniq(actualObjects, name) |
||||
case delimitedName != "": |
||||
commonPrefixes = appendUniq(commonPrefixes, delimitedName) |
||||
} |
||||
case resources.IsPrefixSet(): |
||||
if strings.HasPrefix(name, resources.Prefix) { |
||||
actualObjects = appendUniq(actualObjects, name) |
||||
} |
||||
case resources.IsDefault(): |
||||
return objects, nil |
||||
} |
||||
} |
||||
return actualObjects, commonPrefixes |
||||
} |
Loading…
Reference in new issue