Remove slow AppendUniq code, rolling through over a slice is in-efficient

Remove it and use map instead
master
Harshavardhana 10 years ago
parent 84810162f5
commit 38a6ce36e5
  1. 9
      pkg/storage/donut/bucket.go
  2. 60
      pkg/storage/donut/cache.go
  3. 27
      pkg/storage/donut/common.go
  4. 2
      pkg/storage/donut/donut_test.go

@ -145,7 +145,7 @@ func (b bucket) ListObjects(prefix, marker, delimiter string, maxkeys int) (List
for objectName := range bucketMetadata.Buckets[b.getBucketName()].BucketObjects { for objectName := range bucketMetadata.Buckets[b.getBucketName()].BucketObjects {
if strings.HasPrefix(objectName, strings.TrimSpace(prefix)) { if strings.HasPrefix(objectName, strings.TrimSpace(prefix)) {
if objectName > marker { if objectName > marker {
objects = AppendU(objects, objectName) objects = append(objects, objectName)
} }
} }
} }
@ -166,16 +166,19 @@ func (b bucket) ListObjects(prefix, marker, delimiter string, maxkeys int) (List
var commonPrefixes []string var commonPrefixes []string
for _, commonPrefix := range prefixes { for _, commonPrefix := range prefixes {
commonPrefixes = AppendU(commonPrefixes, prefix+commonPrefix) commonPrefixes = append(commonPrefixes, prefix+commonPrefix)
} }
filteredObjects = RemoveDuplicates(filteredObjects)
sort.Strings(filteredObjects) sort.Strings(filteredObjects)
for _, objectName := range filteredObjects { for _, objectName := range filteredObjects {
if len(results) >= maxkeys { if len(results) >= maxkeys {
isTruncated = true isTruncated = true
break break
} }
results = AppendU(results, prefix+objectName) results = append(results, prefix+objectName)
} }
results = RemoveDuplicates(results)
commonPrefixes = RemoveDuplicates(commonPrefixes)
sort.Strings(commonPrefixes) sort.Strings(commonPrefixes)
listObjects := ListObjects{} listObjects := ListObjects{}

@ -17,7 +17,6 @@
package donut package donut
import ( import (
"bufio"
"bytes" "bytes"
"crypto/md5" "crypto/md5"
"encoding/base64" "encoding/base64"
@ -419,63 +418,47 @@ func (cache Cache) CreateBucket(bucketName, acl string) error {
return nil return nil
} }
func delimiter(object, delimiter string) string { func (cache Cache) filterDelimiterPrefix(keys []string, key, prefix, delim string) ([]string, []string) {
readBuffer := bytes.NewBufferString(object) var commonPrefixes []string
reader := bufio.NewReader(readBuffer)
stringReader := strings.NewReader(delimiter)
delimited, _ := stringReader.ReadByte()
delimitedStr, _ := reader.ReadString(delimited)
return delimitedStr
}
func appendUniq(slice []string, i string) []string {
for _, ele := range slice {
if ele == i {
return slice
}
}
return append(slice, i)
}
func (cache Cache) filterDelimiterPrefix(keys []string, key, delim string, r BucketResourcesMetadata) ([]string, BucketResourcesMetadata) {
switch true { switch true {
case key == r.Prefix: case key == prefix:
keys = appendUniq(keys, key) keys = append(keys, key)
// delim - requires r.Prefix as it was trimmed off earlier // delim - requires r.Prefix as it was trimmed off earlier
case key == r.Prefix+delim: case key == prefix+delim:
keys = appendUniq(keys, key) keys = append(keys, key)
case delim != "": case delim != "":
r.CommonPrefixes = appendUniq(r.CommonPrefixes, r.Prefix+delim) commonPrefixes = append(commonPrefixes, prefix+delim)
} }
return keys, r return RemoveDuplicates(keys), RemoveDuplicates(commonPrefixes)
} }
func (cache Cache) listObjects(keys []string, key string, r BucketResourcesMetadata) ([]string, BucketResourcesMetadata) { func (cache Cache) listObjects(keys []string, key string, r BucketResourcesMetadata) ([]string, []string) {
var commonPrefixes []string
switch true { switch true {
// Prefix absent, delimit object key based on delimiter // Prefix absent, delimit object key based on delimiter
case r.IsDelimiterSet(): case r.IsDelimiterSet():
delim := delimiter(key, r.Delimiter) delim := Delimiter(key, r.Delimiter)
switch true { switch true {
case delim == "" || delim == key: case delim == "" || delim == key:
keys = appendUniq(keys, key) keys = append(keys, key)
case delim != "": case delim != "":
r.CommonPrefixes = appendUniq(r.CommonPrefixes, delim) commonPrefixes = append(commonPrefixes, delim)
} }
// Prefix present, delimit object key with prefix key based on delimiter // Prefix present, delimit object key with prefix key based on delimiter
case r.IsDelimiterPrefixSet(): case r.IsDelimiterPrefixSet():
if strings.HasPrefix(key, r.Prefix) { if strings.HasPrefix(key, r.Prefix) {
trimmedName := strings.TrimPrefix(key, r.Prefix) trimmedName := strings.TrimPrefix(key, r.Prefix)
delim := delimiter(trimmedName, r.Delimiter) delim := Delimiter(trimmedName, r.Delimiter)
keys, r = cache.filterDelimiterPrefix(keys, key, delim, r) keys, commonPrefixes = cache.filterDelimiterPrefix(keys, key, r.Prefix, delim)
} }
// Prefix present, nothing to delimit // Prefix present, nothing to delimit
case r.IsPrefixSet(): case r.IsPrefixSet():
keys = appendUniq(keys, key) keys = append(keys, key)
// Prefix and delimiter absent // Prefix and delimiter absent
case r.IsDefault(): case r.IsDefault():
keys = appendUniq(keys, key) keys = append(keys, key)
} }
return keys, r return RemoveDuplicates(keys), RemoveDuplicates(commonPrefixes)
} }
// ListObjects - list objects from cache // ListObjects - list objects from cache
@ -493,11 +476,12 @@ func (cache Cache) ListObjects(bucket string, resources BucketResourcesMetadata)
} }
var results []ObjectMetadata var results []ObjectMetadata
var keys []string var keys []string
var commonPrefixes []string
storedBucket := cache.storedBuckets[bucket] storedBucket := cache.storedBuckets[bucket]
for key := range storedBucket.objectMetadata { for key := range storedBucket.objectMetadata {
if strings.HasPrefix(key, bucket+"/") { if strings.HasPrefix(key, bucket+"/") {
key = key[len(bucket)+1:] key = key[len(bucket)+1:]
keys, resources = cache.listObjects(keys, key, resources) keys, commonPrefixes = cache.listObjects(keys, key, resources)
} }
} }
var newKeys []string var newKeys []string
@ -505,12 +489,13 @@ func (cache Cache) ListObjects(bucket string, resources BucketResourcesMetadata)
case resources.Marker != "": case resources.Marker != "":
for _, key := range keys { for _, key := range keys {
if key > resources.Marker { if key > resources.Marker {
newKeys = appendUniq(newKeys, key) newKeys = append(newKeys, key)
} }
} }
default: default:
newKeys = keys newKeys = keys
} }
newKeys = RemoveDuplicates(newKeys)
sort.Strings(newKeys) sort.Strings(newKeys)
for _, key := range newKeys { for _, key := range newKeys {
if len(results) == resources.Maxkeys { if len(results) == resources.Maxkeys {
@ -523,6 +508,7 @@ func (cache Cache) ListObjects(bucket string, resources BucketResourcesMetadata)
object := storedBucket.objectMetadata[bucket+"/"+key] object := storedBucket.objectMetadata[bucket+"/"+key]
results = append(results, object) results = append(results, object)
} }
resources.CommonPrefixes = commonPrefixes
return results, resources, nil return results, resources, nil
} }

@ -17,18 +17,33 @@
package donut package donut
import ( import (
"bufio"
"bytes"
"sort" "sort"
"strings" "strings"
) )
// AppendU append to an input slice if the element is unique and provides a new slice // Delimiter delims the string at delimiter
func AppendU(slice []string, i string) []string { func Delimiter(object, delimiter string) string {
for _, ele := range slice { readBuffer := bytes.NewBufferString(object)
if ele == i { reader := bufio.NewReader(readBuffer)
return slice stringReader := strings.NewReader(delimiter)
delimited, _ := stringReader.ReadByte()
delimitedStr, _ := reader.ReadString(delimited)
return delimitedStr
}
// RemoveDuplicates removes duplicate elements from a slice
func RemoveDuplicates(slice []string) []string {
newSlice := []string{}
seen := make(map[string]struct{})
for _, val := range slice {
if _, ok := seen[val]; !ok {
newSlice = append(newSlice, val)
seen[val] = struct{}{} // avoiding byte allocation
} }
} }
return append(slice, i) return newSlice
} }
// TrimPrefix trims off a prefix string from all the elements in a given slice // TrimPrefix trims off a prefix string from all the elements in a given slice

@ -94,7 +94,7 @@ func (s *MySuite) TestEmptyBucket(c *C) {
listObjects, err := donut.ListObjects("foo", "", "", "", 1) listObjects, err := donut.ListObjects("foo", "", "", "", 1)
c.Assert(err, IsNil) c.Assert(err, IsNil)
c.Assert(len(listObjects.Objects), Equals, 0) c.Assert(len(listObjects.Objects), Equals, 0)
c.Assert(listObjects.CommonPrefixes, IsNil) c.Assert(listObjects.CommonPrefixes, DeepEquals, []string{})
c.Assert(listObjects.IsTruncated, Equals, false) c.Assert(listObjects.IsTruncated, Equals, false)
} }

Loading…
Cancel
Save