fix: allow crawler to crawl on disks without usage constraints (#10677)

additionally also change the resolution usage wise
return of disks, allows to small byte level differences
to be masked.
master
Harshavardhana 4 years ago committed by GitHub
parent 23773bb32b
commit f9be783f3e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 35
      cmd/erasure-common.go
  2. 2
      cmd/erasure.go

@ -37,6 +37,37 @@ func (er erasureObjects) getLoadBalancedLocalDisks() (newDisks []StorageAPI) {
return newDisks
}
func (er erasureObjects) getOnlineDisks() (newDisks []StorageAPI) {
disks := er.getDisks()
var wg sync.WaitGroup
var mu sync.Mutex
for _, i := range hashOrder(UTCNow().String(), len(disks)) {
i := i
wg.Add(1)
go func() {
defer wg.Done()
if disks[i-1] == nil {
return
}
di, err := disks[i-1].DiskInfo(context.Background())
if err != nil || di.Healing {
// - Do not consume disks which are not reachable
// unformatted or simply not accessible for some reason.
//
// - Do not consume disks which are being healed
//
// - Future: skip busy disks
return
}
mu.Lock()
newDisks = append(newDisks, disks[i-1])
mu.Unlock()
}()
}
return newDisks
}
// getLoadBalancedNDisks - fetches load balanced (sufficiently randomized) disk slice
// with N disks online. If ndisks is zero or negative, then it will returns all disks,
// same if ndisks is greater than the number of all disks.
@ -89,8 +120,8 @@ func (er erasureObjects) getLoadBalancedDisks(optimized bool) []StorageAPI {
}
mu.Lock()
// Capture disks usage wise
newDisks[di.Used] = append(newDisks[di.Used], disks[i-1])
// Capture disks usage wise upto resolution of MiB
newDisks[di.Used/1024/1024] = append(newDisks[di.Used/1024/1024], disks[i-1])
mu.Unlock()
}()
}

@ -252,7 +252,7 @@ func (er erasureObjects) crawlAndGetDataUsage(ctx context.Context, buckets []Buc
}
// Collect disks we can use.
disks := er.getLoadBalancedDisks(true)
disks := er.getOnlineDisks()
if len(disks) == 0 {
logger.Info(color.Green("data-crawl:") + " all disks are offline or being healed, skipping crawl")
return nil

Loading…
Cancel
Save