Fix encoding bug in donut during encoding phase

Stream reading needs to check for length parameter being non zero,
after Reading() a predefined set of buffer length an EOF might be returned
with length == 0.

Erasure taking this zeroed data in might wrongly encode it as part of existing
data blocks which leads to errors while decoding even when the other contents
are intact.
master
Harshavardhana 9 years ago
parent f9174632bb
commit 4ed50a8004
  1. 38
      pkg/donut/bucket.go
  2. 9
      server-api-signature.go

@ -444,27 +444,29 @@ func (b bucket) writeObjectData(k, m uint8, writers []io.WriteCloser, objectData
var length int var length int
inputData := make([]byte, chunkSize) inputData := make([]byte, chunkSize)
length, e = objectData.Read(inputData) length, e = objectData.Read(inputData)
encodedBlocks, err := encoder.Encode(inputData) if length != 0 {
if err != nil { encodedBlocks, err := encoder.Encode(inputData[0:length])
return 0, 0, err.Trace() if err != nil {
} return 0, 0, err.Trace()
if _, err := hashWriter.Write(inputData[0:length]); err != nil { }
return 0, 0, probe.NewError(err) if _, err := hashWriter.Write(inputData[0:length]); err != nil {
}
for blockIndex, block := range encodedBlocks {
errCh := make(chan error, 1)
go func(writer io.Writer, reader io.Reader, errCh chan<- error) {
defer close(errCh)
_, err := io.Copy(writer, reader)
errCh <- err
}(writers[blockIndex], bytes.NewReader(block), errCh)
if err := <-errCh; err != nil {
// Returning error is fine here CleanupErrors() would cleanup writers
return 0, 0, probe.NewError(err) return 0, 0, probe.NewError(err)
} }
for blockIndex, block := range encodedBlocks {
errCh := make(chan error, 1)
go func(writer io.Writer, reader io.Reader, errCh chan<- error) {
defer close(errCh)
_, err := io.Copy(writer, reader)
errCh <- err
}(writers[blockIndex], bytes.NewReader(block), errCh)
if err := <-errCh; err != nil {
// Returning error is fine here CleanupErrors() would cleanup writers
return 0, 0, probe.NewError(err)
}
}
totalLength += length
chunkCount = chunkCount + 1
} }
totalLength += length
chunkCount = chunkCount + 1
} }
if e != io.EOF { if e != io.EOF {
return 0, 0, probe.NewError(e) return 0, 0, probe.NewError(e)

@ -17,7 +17,6 @@
package main package main
import ( import (
"bytes"
"encoding/base64" "encoding/base64"
"io" "io"
"io/ioutil" "io/ioutil"
@ -131,7 +130,7 @@ func initSignatureV4(req *http.Request) (*signv4.Signature, *probe.Error) {
func extractHTTPFormValues(reader *multipart.Reader) (io.Reader, map[string]string, *probe.Error) { func extractHTTPFormValues(reader *multipart.Reader) (io.Reader, map[string]string, *probe.Error) {
/// HTML Form values /// HTML Form values
formValues := make(map[string]string) formValues := make(map[string]string)
filePart := new(bytes.Buffer) var filePart io.Reader
var err error var err error
for err == nil { for err == nil {
var part *multipart.Part var part *multipart.Part
@ -144,11 +143,7 @@ func extractHTTPFormValues(reader *multipart.Reader) (io.Reader, map[string]stri
} }
formValues[part.FormName()] = string(buffer) formValues[part.FormName()] = string(buffer)
} else { } else {
// FIXME: this will hog memory filePart = part
_, err := io.Copy(filePart, part)
if err != nil {
return nil, nil, probe.NewError(err)
}
} }
} }
} }

Loading…
Cancel
Save