|
|
@ -178,10 +178,14 @@ func (self *TreeChunker) split(depth int, treeSize int64, key Key, data io.Reade |
|
|
|
// leaf nodes -> content chunks
|
|
|
|
// leaf nodes -> content chunks
|
|
|
|
chunkData := make([]byte, size+8) |
|
|
|
chunkData := make([]byte, size+8) |
|
|
|
binary.LittleEndian.PutUint64(chunkData[0:8], uint64(size)) |
|
|
|
binary.LittleEndian.PutUint64(chunkData[0:8], uint64(size)) |
|
|
|
_, err := data.Read(chunkData[8:]) |
|
|
|
var readBytes int64 |
|
|
|
if err != nil { |
|
|
|
for readBytes < size { |
|
|
|
errC <- err |
|
|
|
n, err := data.Read(chunkData[8+readBytes:]) |
|
|
|
return |
|
|
|
readBytes += int64(n) |
|
|
|
|
|
|
|
if err != nil && !(err == io.EOF && readBytes == size) { |
|
|
|
|
|
|
|
errC <- err |
|
|
|
|
|
|
|
return |
|
|
|
|
|
|
|
} |
|
|
|
} |
|
|
|
} |
|
|
|
select { |
|
|
|
select { |
|
|
|
case jobC <- &hashJob{key, chunkData, size, parentWg}: |
|
|
|
case jobC <- &hashJob{key, chunkData, size, parentWg}: |
|
|
@ -371,7 +375,6 @@ func (self *LazyChunkReader) join(b []byte, off int64, eoff int64, depth int, tr |
|
|
|
defer parentWg.Done() |
|
|
|
defer parentWg.Done() |
|
|
|
// return NewDPA(&LocalStore{})
|
|
|
|
// return NewDPA(&LocalStore{})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// chunk.Size = int64(binary.LittleEndian.Uint64(chunk.SData[0:8]))
|
|
|
|
// chunk.Size = int64(binary.LittleEndian.Uint64(chunk.SData[0:8]))
|
|
|
|
|
|
|
|
|
|
|
|
// find appropriate block level
|
|
|
|
// find appropriate block level
|
|
|
|