summaryrefslogtreecommitdiff
path: root/vendor/github.com/containerd/stargz-snapshotter
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/github.com/containerd/stargz-snapshotter')
-rw-r--r--vendor/github.com/containerd/stargz-snapshotter/estargz/estargz.go34
-rw-r--r--vendor/github.com/containerd/stargz-snapshotter/estargz/gzip.go66
2 files changed, 70 insertions, 30 deletions
diff --git a/vendor/github.com/containerd/stargz-snapshotter/estargz/estargz.go b/vendor/github.com/containerd/stargz-snapshotter/estargz/estargz.go
index 3ef029116..e56319545 100644
--- a/vendor/github.com/containerd/stargz-snapshotter/estargz/estargz.go
+++ b/vendor/github.com/containerd/stargz-snapshotter/estargz/estargz.go
@@ -118,7 +118,7 @@ func Open(sr *io.SectionReader, opt ...OpenOption) (*Reader, error) {
}
}
- gzipCompressors := []Decompressor{new(GzipDecompressor), new(legacyGzipDecompressor)}
+ gzipCompressors := []Decompressor{new(GzipDecompressor), new(LegacyGzipDecompressor)}
decompressors := append(gzipCompressors, opts.decompressors...)
// Determine the size to fetch. Try to fetch as many bytes as possible.
@@ -184,7 +184,7 @@ func OpenFooter(sr *io.SectionReader) (tocOffset int64, footerSize int64, rErr e
return 0, 0, fmt.Errorf("error reading footer: %v", err)
}
var allErr []error
- for _, d := range []Decompressor{new(GzipDecompressor), new(legacyGzipDecompressor)} {
+ for _, d := range []Decompressor{new(GzipDecompressor), new(LegacyGzipDecompressor)} {
fSize := d.FooterSize()
fOffset := positive(int64(len(footer)) - fSize)
_, tocOffset, _, err := d.ParseFooter(footer[fOffset:])
@@ -279,12 +279,12 @@ func (r *Reader) initFields() error {
pdir := r.getOrCreateDir(pdirName)
ent.NumLink++ // at least one name(ent.Name) references this entry.
if ent.Type == "hardlink" {
- if org, ok := r.m[cleanEntryName(ent.LinkName)]; ok {
- org.NumLink++ // original entry is referenced by this ent.Name.
- ent = org
- } else {
- return fmt.Errorf("%q is a hardlink but the linkname %q isn't found", ent.Name, ent.LinkName)
+ org, err := r.getSource(ent)
+ if err != nil {
+ return err
}
+ org.NumLink++ // original entry is referenced by this ent.Name.
+ ent = org
}
pdir.addChild(path.Base(name), ent)
}
@@ -303,6 +303,20 @@ func (r *Reader) initFields() error {
return nil
}
+func (r *Reader) getSource(ent *TOCEntry) (_ *TOCEntry, err error) {
+ if ent.Type == "hardlink" {
+ org, ok := r.m[cleanEntryName(ent.LinkName)]
+ if !ok {
+ return nil, fmt.Errorf("%q is a hardlink but the linkname %q isn't found", ent.Name, ent.LinkName)
+ }
+ ent, err = r.getSource(org)
+ if err != nil {
+ return nil, err
+ }
+ }
+ return ent, nil
+}
+
func parentDir(p string) string {
dir, _ := path.Split(p)
return strings.TrimSuffix(dir, "/")
@@ -464,7 +478,11 @@ func (r *Reader) Lookup(path string) (e *TOCEntry, ok bool) {
}
e, ok = r.m[path]
if ok && e.Type == "hardlink" {
- e, ok = r.m[e.LinkName]
+ var err error
+ e, err = r.getSource(e)
+ if err != nil {
+ return nil, false
+ }
}
return
}
diff --git a/vendor/github.com/containerd/stargz-snapshotter/estargz/gzip.go b/vendor/github.com/containerd/stargz-snapshotter/estargz/gzip.go
index 88e1283d8..7330849cb 100644
--- a/vendor/github.com/containerd/stargz-snapshotter/estargz/gzip.go
+++ b/vendor/github.com/containerd/stargz-snapshotter/estargz/gzip.go
@@ -38,30 +38,34 @@ import (
)
type gzipCompression struct {
- *gzipCompressor
+ *GzipCompressor
*GzipDecompressor
}
func newGzipCompressionWithLevel(level int) Compression {
return &gzipCompression{
- &gzipCompressor{level},
+ &GzipCompressor{level},
&GzipDecompressor{},
}
}
-func NewGzipCompressorWithLevel(level int) Compressor {
- return &gzipCompressor{level}
+func NewGzipCompressor() *GzipCompressor {
+ return &GzipCompressor{gzip.BestCompression}
}
-type gzipCompressor struct {
+func NewGzipCompressorWithLevel(level int) *GzipCompressor {
+ return &GzipCompressor{level}
+}
+
+type GzipCompressor struct {
compressionLevel int
}
-func (gc *gzipCompressor) Writer(w io.Writer) (io.WriteCloser, error) {
+func (gc *GzipCompressor) Writer(w io.Writer) (io.WriteCloser, error) {
return gzip.NewWriterLevel(w, gc.compressionLevel)
}
-func (gc *gzipCompressor) WriteTOCAndFooter(w io.Writer, off int64, toc *JTOC, diffHash hash.Hash) (digest.Digest, error) {
+func (gc *GzipCompressor) WriteTOCAndFooter(w io.Writer, off int64, toc *JTOC, diffHash hash.Hash) (digest.Digest, error) {
tocJSON, err := json.MarshalIndent(toc, "", "\t")
if err != nil {
return "", err
@@ -155,17 +159,21 @@ func (gz *GzipDecompressor) FooterSize() int64 {
return FooterSize
}
-type legacyGzipDecompressor struct{}
+func (gz *GzipDecompressor) DecompressTOC(r io.Reader) (tocJSON io.ReadCloser, err error) {
+ return decompressTOCEStargz(r)
+}
+
+type LegacyGzipDecompressor struct{}
-func (gz *legacyGzipDecompressor) Reader(r io.Reader) (io.ReadCloser, error) {
+func (gz *LegacyGzipDecompressor) Reader(r io.Reader) (io.ReadCloser, error) {
return gzip.NewReader(r)
}
-func (gz *legacyGzipDecompressor) ParseTOC(r io.Reader) (toc *JTOC, tocDgst digest.Digest, err error) {
+func (gz *LegacyGzipDecompressor) ParseTOC(r io.Reader) (toc *JTOC, tocDgst digest.Digest, err error) {
return parseTOCEStargz(r)
}
-func (gz *legacyGzipDecompressor) ParseFooter(p []byte) (blobPayloadSize, tocOffset, tocSize int64, err error) {
+func (gz *LegacyGzipDecompressor) ParseFooter(p []byte) (blobPayloadSize, tocOffset, tocSize int64, err error) {
if len(p) != legacyFooterSize {
return 0, 0, 0, fmt.Errorf("legacy: invalid length %d cannot be parsed", len(p))
}
@@ -188,29 +196,43 @@ func (gz *legacyGzipDecompressor) ParseFooter(p []byte) (blobPayloadSize, tocOff
return tocOffset, tocOffset, 0, nil
}
-func (gz *legacyGzipDecompressor) FooterSize() int64 {
+func (gz *LegacyGzipDecompressor) FooterSize() int64 {
return legacyFooterSize
}
+func (gz *LegacyGzipDecompressor) DecompressTOC(r io.Reader) (tocJSON io.ReadCloser, err error) {
+ return decompressTOCEStargz(r)
+}
+
func parseTOCEStargz(r io.Reader) (toc *JTOC, tocDgst digest.Digest, err error) {
+ tr, err := decompressTOCEStargz(r)
+ if err != nil {
+ return nil, "", err
+ }
+ dgstr := digest.Canonical.Digester()
+ toc = new(JTOC)
+ if err := json.NewDecoder(io.TeeReader(tr, dgstr.Hash())).Decode(&toc); err != nil {
+ return nil, "", fmt.Errorf("error decoding TOC JSON: %v", err)
+ }
+ if err := tr.Close(); err != nil {
+ return nil, "", err
+ }
+ return toc, dgstr.Digest(), nil
+}
+
+func decompressTOCEStargz(r io.Reader) (tocJSON io.ReadCloser, err error) {
zr, err := gzip.NewReader(r)
if err != nil {
- return nil, "", fmt.Errorf("malformed TOC gzip header: %v", err)
+ return nil, fmt.Errorf("malformed TOC gzip header: %v", err)
}
- defer zr.Close()
zr.Multistream(false)
tr := tar.NewReader(zr)
h, err := tr.Next()
if err != nil {
- return nil, "", fmt.Errorf("failed to find tar header in TOC gzip stream: %v", err)
+ return nil, fmt.Errorf("failed to find tar header in TOC gzip stream: %v", err)
}
if h.Name != TOCTarName {
- return nil, "", fmt.Errorf("TOC tar entry had name %q; expected %q", h.Name, TOCTarName)
+ return nil, fmt.Errorf("TOC tar entry had name %q; expected %q", h.Name, TOCTarName)
}
- dgstr := digest.Canonical.Digester()
- toc = new(JTOC)
- if err := json.NewDecoder(io.TeeReader(tr, dgstr.Hash())).Decode(&toc); err != nil {
- return nil, "", fmt.Errorf("error decoding TOC JSON: %v", err)
- }
- return toc, dgstr.Digest(), nil
+ return readCloser{tr, zr.Close}, nil
}