mirror of
https://github.com/restic/restic.git
synced 2025-03-30 00:00:14 +01:00
repository/packer: add unit test for Merge method
This commit is contained in:
parent
37aa4f824f
commit
ccb92f5bf0
2 changed files with 46 additions and 9 deletions
|
@ -163,6 +163,8 @@ func makeHeader(blobs []restic.Blob) ([]byte, error) {
|
||||||
return buf, nil
|
return buf, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Merge merges another packer into the current packer. Both packers must not be
|
||||||
|
// finalized yet.
|
||||||
func (p *Packer) Merge(other *Packer, otherData io.Reader) error {
|
func (p *Packer) Merge(other *Packer, otherData io.Reader) error {
|
||||||
other.m.Lock()
|
other.m.Lock()
|
||||||
defer other.m.Unlock()
|
defer other.m.Unlock()
|
||||||
|
|
|
@ -25,15 +25,7 @@ type Buf struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func newPack(t testing.TB, k *crypto.Key, lengths []int) ([]Buf, []byte, uint) {
|
func newPack(t testing.TB, k *crypto.Key, lengths []int) ([]Buf, []byte, uint) {
|
||||||
bufs := []Buf{}
|
bufs := createBuffers(t, lengths)
|
||||||
|
|
||||||
for _, l := range lengths {
|
|
||||||
b := make([]byte, l)
|
|
||||||
_, err := io.ReadFull(rand.Reader, b)
|
|
||||||
rtest.OK(t, err)
|
|
||||||
h := sha256.Sum256(b)
|
|
||||||
bufs = append(bufs, Buf{data: b, id: h})
|
|
||||||
}
|
|
||||||
|
|
||||||
// pack blobs
|
// pack blobs
|
||||||
var buf bytes.Buffer
|
var buf bytes.Buffer
|
||||||
|
@ -49,6 +41,18 @@ func newPack(t testing.TB, k *crypto.Key, lengths []int) ([]Buf, []byte, uint) {
|
||||||
return bufs, buf.Bytes(), p.Size()
|
return bufs, buf.Bytes(), p.Size()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func createBuffers(t testing.TB, lengths []int) []Buf {
|
||||||
|
bufs := []Buf{}
|
||||||
|
for _, l := range lengths {
|
||||||
|
b := make([]byte, l)
|
||||||
|
_, err := io.ReadFull(rand.Reader, b)
|
||||||
|
rtest.OK(t, err)
|
||||||
|
h := sha256.Sum256(b)
|
||||||
|
bufs = append(bufs, Buf{data: b, id: h})
|
||||||
|
}
|
||||||
|
return bufs
|
||||||
|
}
|
||||||
|
|
||||||
func verifyBlobs(t testing.TB, bufs []Buf, k *crypto.Key, rd io.ReaderAt, packSize uint) {
|
func verifyBlobs(t testing.TB, bufs []Buf, k *crypto.Key, rd io.ReaderAt, packSize uint) {
|
||||||
written := 0
|
written := 0
|
||||||
for _, buf := range bufs {
|
for _, buf := range bufs {
|
||||||
|
@ -144,3 +148,34 @@ func TestShortPack(t *testing.T) {
|
||||||
rtest.OK(t, b.Save(context.TODO(), handle, backend.NewByteReader(packData, b.Hasher())))
|
rtest.OK(t, b.Save(context.TODO(), handle, backend.NewByteReader(packData, b.Hasher())))
|
||||||
verifyBlobs(t, bufs, k, backend.ReaderAt(context.TODO(), b, handle), packSize)
|
verifyBlobs(t, bufs, k, backend.ReaderAt(context.TODO(), b, handle), packSize)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestPackMerge(t *testing.T) {
|
||||||
|
k := crypto.NewRandomKey()
|
||||||
|
|
||||||
|
bufs := createBuffers(t, []int{1000, 5000, 2000, 3000, 4000, 1500})
|
||||||
|
splitAt := 3
|
||||||
|
|
||||||
|
// Fill packers
|
||||||
|
var buf1 bytes.Buffer
|
||||||
|
packer1 := pack.NewPacker(k, &buf1)
|
||||||
|
for _, b := range bufs[:splitAt] {
|
||||||
|
_, err := packer1.Add(restic.TreeBlob, b.id, b.data, 2*len(b.data))
|
||||||
|
rtest.OK(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf2 bytes.Buffer
|
||||||
|
packer2 := pack.NewPacker(k, &buf2)
|
||||||
|
for _, b := range bufs[splitAt:] {
|
||||||
|
_, err := packer2.Add(restic.DataBlob, b.id, b.data, 2*len(b.data))
|
||||||
|
rtest.OK(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
err := packer1.Merge(packer2, &buf2)
|
||||||
|
rtest.OK(t, err)
|
||||||
|
err = packer1.Finalize()
|
||||||
|
rtest.OK(t, err)
|
||||||
|
|
||||||
|
// Verify all blobs are present in the merged pack
|
||||||
|
verifyBlobs(t, bufs, k, bytes.NewReader(buf1.Bytes()), packer1.Size())
|
||||||
|
rtest.Equals(t, len(bufs), packer1.Count())
|
||||||
|
}
|
||||||
|
|
Loading…
Add table
Reference in a new issue