summaryrefslogtreecommitdiff
path: root/vendor/github.com/klauspost/compress/zstd
diff options
context:
space:
mode:
authorDaniel J Walsh <dwalsh@redhat.com>2020-03-04 16:53:39 -0500
committerDaniel J Walsh <dwalsh@redhat.com>2020-03-04 16:53:39 -0500
commit0015c376a300ed5aaf5170f90159736a9e58212e (patch)
tree8282f254d1e12690a1b09d34d97b519c2e89a656 /vendor/github.com/klauspost/compress/zstd
parent797da2a57b43d1190dd1fb6ec78dcc9ec76c3bd3 (diff)
downloadpodman-0015c376a300ed5aaf5170f90159736a9e58212e.tar.gz
podman-0015c376a300ed5aaf5170f90159736a9e58212e.tar.bz2
podman-0015c376a300ed5aaf5170f90159736a9e58212e.zip
Vendor buildah 1.14.2
Signed-off-by: Daniel J Walsh <dwalsh@redhat.com>
Diffstat (limited to 'vendor/github.com/klauspost/compress/zstd')
-rw-r--r--vendor/github.com/klauspost/compress/zstd/blockenc.go8
-rw-r--r--vendor/github.com/klauspost/compress/zstd/bytebuf.go4
-rw-r--r--vendor/github.com/klauspost/compress/zstd/decoder.go2
-rw-r--r--vendor/github.com/klauspost/compress/zstd/enc_dfast.go47
-rw-r--r--vendor/github.com/klauspost/compress/zstd/enc_fast.go67
-rw-r--r--vendor/github.com/klauspost/compress/zstd/encoder.go2
-rw-r--r--vendor/github.com/klauspost/compress/zstd/framedec.go2
-rw-r--r--vendor/github.com/klauspost/compress/zstd/fse_decoder.go2
-rw-r--r--vendor/github.com/klauspost/compress/zstd/fse_encoder.go8
-rw-r--r--vendor/github.com/klauspost/compress/zstd/internal/xxhash/xxhash_amd64.s8
-rw-r--r--vendor/github.com/klauspost/compress/zstd/zstd.go12
11 files changed, 94 insertions, 68 deletions
diff --git a/vendor/github.com/klauspost/compress/zstd/blockenc.go b/vendor/github.com/klauspost/compress/zstd/blockenc.go
index 507757d52..4f0eba22f 100644
--- a/vendor/github.com/klauspost/compress/zstd/blockenc.go
+++ b/vendor/github.com/klauspost/compress/zstd/blockenc.go
@@ -806,7 +806,7 @@ func (b *blockEnc) genCodes() {
mlH[v]++
if v > mlMax {
mlMax = v
- if debug && mlMax > maxMatchLengthSymbol {
+ if debugAsserts && mlMax > maxMatchLengthSymbol {
panic(fmt.Errorf("mlMax > maxMatchLengthSymbol (%d), matchlen: %d", mlMax, seq.matchLen))
}
}
@@ -821,13 +821,13 @@ func (b *blockEnc) genCodes() {
}
return int(max)
}
- if mlMax > maxMatchLengthSymbol {
+ if debugAsserts && mlMax > maxMatchLengthSymbol {
panic(fmt.Errorf("mlMax > maxMatchLengthSymbol (%d)", mlMax))
}
- if ofMax > maxOffsetBits {
+ if debugAsserts && ofMax > maxOffsetBits {
panic(fmt.Errorf("ofMax > maxOffsetBits (%d)", ofMax))
}
- if llMax > maxLiteralLengthSymbol {
+ if debugAsserts && llMax > maxLiteralLengthSymbol {
panic(fmt.Errorf("llMax > maxLiteralLengthSymbol (%d)", llMax))
}
diff --git a/vendor/github.com/klauspost/compress/zstd/bytebuf.go b/vendor/github.com/klauspost/compress/zstd/bytebuf.go
index 07321acb1..658ef7838 100644
--- a/vendor/github.com/klauspost/compress/zstd/bytebuf.go
+++ b/vendor/github.com/klauspost/compress/zstd/bytebuf.go
@@ -30,7 +30,7 @@ type byteBuffer interface {
type byteBuf []byte
func (b *byteBuf) readSmall(n int) []byte {
- if debug && n > 8 {
+ if debugAsserts && n > 8 {
panic(fmt.Errorf("small read > 8 (%d). use readBig", n))
}
bb := *b
@@ -82,7 +82,7 @@ type readerWrapper struct {
}
func (r *readerWrapper) readSmall(n int) []byte {
- if debug && n > 8 {
+ if debugAsserts && n > 8 {
panic(fmt.Errorf("small read > 8 (%d). use readBig", n))
}
n2, err := io.ReadFull(r.r, r.tmp[:n])
diff --git a/vendor/github.com/klauspost/compress/zstd/decoder.go b/vendor/github.com/klauspost/compress/zstd/decoder.go
index 35a3cda91..73ac3c630 100644
--- a/vendor/github.com/klauspost/compress/zstd/decoder.go
+++ b/vendor/github.com/klauspost/compress/zstd/decoder.go
@@ -315,7 +315,7 @@ func (d *Decoder) DecodeAll(input, dst []byte) ([]byte, error) {
if size > 1<<20 {
size = 1 << 20
}
- dst = make([]byte, 0, frame.WindowSize)
+ dst = make([]byte, 0, size)
}
dst, err = frame.runDecoder(dst, block)
diff --git a/vendor/github.com/klauspost/compress/zstd/enc_dfast.go b/vendor/github.com/klauspost/compress/zstd/enc_dfast.go
index ee3b09b02..0ffea7655 100644
--- a/vendor/github.com/klauspost/compress/zstd/enc_dfast.go
+++ b/vendor/github.com/klauspost/compress/zstd/enc_dfast.go
@@ -4,6 +4,8 @@
package zstd
+import "fmt"
+
const (
dFastLongTableBits = 17 // Bits used in the long match table
dFastLongTableSize = 1 << dFastLongTableBits // Size of the table
@@ -29,7 +31,7 @@ func (e *doubleFastEncoder) Encode(blk *blockEnc, src []byte) {
)
// Protect against e.cur wraparound.
- for e.cur > (1<<30)+e.maxMatchOff {
+ for e.cur >= bufferReset {
if len(e.hist) == 0 {
for i := range e.table[:] {
e.table[i] = tableEntry{}
@@ -61,6 +63,7 @@ func (e *doubleFastEncoder) Encode(blk *blockEnc, src []byte) {
e.longTable[i].offset = v
}
e.cur = e.maxMatchOff
+ break
}
s := e.addBlock(src)
@@ -110,7 +113,7 @@ encodeLoop:
canRepeat := len(blk.sequences) > 2
for {
- if debug && canRepeat && offset1 == 0 {
+ if debugAsserts && canRepeat && offset1 == 0 {
panic("offset0 was 0")
}
@@ -229,10 +232,10 @@ encodeLoop:
// Reference encoder checks all 8 bytes, we only check 4,
// but the likelihood of both the first 4 bytes and the hash matching should be enough.
t = candidateL.offset - e.cur
- if debug && s <= t {
- panic("s <= t")
+ if debugAsserts && s <= t {
+ panic(fmt.Sprintf("s (%d) <= t (%d)", s, t))
}
- if debug && s-t > e.maxMatchOff {
+ if debugAsserts && s-t > e.maxMatchOff {
panic("s - t >e.maxMatchOff")
}
if debugMatches {
@@ -266,13 +269,13 @@ encodeLoop:
}
t = candidateS.offset - e.cur
- if debug && s <= t {
- panic("s <= t")
+ if debugAsserts && s <= t {
+ panic(fmt.Sprintf("s (%d) <= t (%d)", s, t))
}
- if debug && s-t > e.maxMatchOff {
+ if debugAsserts && s-t > e.maxMatchOff {
panic("s - t >e.maxMatchOff")
}
- if debug && t < 0 {
+ if debugAsserts && t < 0 {
panic("t<0")
}
if debugMatches {
@@ -294,11 +297,11 @@ encodeLoop:
offset2 = offset1
offset1 = s - t
- if debug && s <= t {
- panic("s <= t")
+ if debugAsserts && s <= t {
+ panic(fmt.Sprintf("s (%d) <= t (%d)", s, t))
}
- if debug && canRepeat && int(offset1) > len(src) {
+ if debugAsserts && canRepeat && int(offset1) > len(src) {
panic("invalid offset")
}
@@ -424,7 +427,7 @@ func (e *doubleFastEncoder) EncodeNoHist(blk *blockEnc, src []byte) {
)
// Protect against e.cur wraparound.
- if e.cur > (1<<30)+e.maxMatchOff {
+ if e.cur >= bufferReset {
for i := range e.table[:] {
e.table[i] = tableEntry{}
}
@@ -545,10 +548,10 @@ encodeLoop:
// Reference encoder checks all 8 bytes, we only check 4,
// but the likelihood of both the first 4 bytes and the hash matching should be enough.
t = candidateL.offset - e.cur
- if debug && s <= t {
- panic("s <= t")
+ if debugAsserts && s <= t {
+ panic(fmt.Sprintf("s (%d) <= t (%d)", s, t))
}
- if debug && s-t > e.maxMatchOff {
+ if debugAsserts && s-t > e.maxMatchOff {
panic("s - t >e.maxMatchOff")
}
if debugMatches {
@@ -582,13 +585,13 @@ encodeLoop:
}
t = candidateS.offset - e.cur
- if debug && s <= t {
- panic("s <= t")
+ if debugAsserts && s <= t {
+ panic(fmt.Sprintf("s (%d) <= t (%d)", s, t))
}
- if debug && s-t > e.maxMatchOff {
+ if debugAsserts && s-t > e.maxMatchOff {
panic("s - t >e.maxMatchOff")
}
- if debug && t < 0 {
+ if debugAsserts && t < 0 {
panic("t<0")
}
if debugMatches {
@@ -610,8 +613,8 @@ encodeLoop:
offset2 = offset1
offset1 = s - t
- if debug && s <= t {
- panic("s <= t")
+ if debugAsserts && s <= t {
+ panic(fmt.Sprintf("s (%d) <= t (%d)", s, t))
}
// Extend the 4-byte match as long as possible.
diff --git a/vendor/github.com/klauspost/compress/zstd/enc_fast.go b/vendor/github.com/klauspost/compress/zstd/enc_fast.go
index 0bdddac5b..28134b158 100644
--- a/vendor/github.com/klauspost/compress/zstd/enc_fast.go
+++ b/vendor/github.com/klauspost/compress/zstd/enc_fast.go
@@ -5,6 +5,7 @@
package zstd
import (
+ "fmt"
"math/bits"
"github.com/klauspost/compress/zstd/internal/xxhash"
@@ -74,7 +75,7 @@ func (e *fastEncoder) Encode(blk *blockEnc, src []byte) {
)
// Protect against e.cur wraparound.
- for e.cur > (1<<30)+e.maxMatchOff {
+ for e.cur >= bufferReset {
if len(e.hist) == 0 {
for i := range e.table[:] {
e.table[i] = tableEntry{}
@@ -94,6 +95,7 @@ func (e *fastEncoder) Encode(blk *blockEnc, src []byte) {
e.table[i].offset = v
}
e.cur = e.maxMatchOff
+ break
}
s := e.addBlock(src)
@@ -151,7 +153,7 @@ encodeLoop:
canRepeat := len(blk.sequences) > 2
for {
- if debug && canRepeat && offset1 == 0 {
+ if debugAsserts && canRepeat && offset1 == 0 {
panic("offset0 was 0")
}
@@ -212,10 +214,10 @@ encodeLoop:
if coffset0 < e.maxMatchOff && uint32(cv) == candidate.val {
// found a regular match
t = candidate.offset - e.cur
- if debug && s <= t {
- panic("s <= t")
+ if debugAsserts && s <= t {
+ panic(fmt.Sprintf("s (%d) <= t (%d)", s, t))
}
- if debug && s-t > e.maxMatchOff {
+ if debugAsserts && s-t > e.maxMatchOff {
panic("s - t >e.maxMatchOff")
}
break
@@ -225,13 +227,13 @@ encodeLoop:
// found a regular match
t = candidate2.offset - e.cur
s++
- if debug && s <= t {
- panic("s <= t")
+ if debugAsserts && s <= t {
+ panic(fmt.Sprintf("s (%d) <= t (%d)", s, t))
}
- if debug && s-t > e.maxMatchOff {
+ if debugAsserts && s-t > e.maxMatchOff {
panic("s - t >e.maxMatchOff")
}
- if debug && t < 0 {
+ if debugAsserts && t < 0 {
panic("t<0")
}
break
@@ -246,11 +248,11 @@ encodeLoop:
offset2 = offset1
offset1 = s - t
- if debug && s <= t {
- panic("s <= t")
+ if debugAsserts && s <= t {
+ panic(fmt.Sprintf("s (%d) <= t (%d)", s, t))
}
- if debug && canRepeat && int(offset1) > len(src) {
+ if debugAsserts && canRepeat && int(offset1) > len(src) {
panic("invalid offset")
}
@@ -343,7 +345,7 @@ func (e *fastEncoder) EncodeNoHist(blk *blockEnc, src []byte) {
}
}
// Protect against e.cur wraparound.
- if e.cur > (1<<30)+e.maxMatchOff {
+ if e.cur >= bufferReset {
for i := range e.table[:] {
e.table[i] = tableEntry{}
}
@@ -456,10 +458,10 @@ encodeLoop:
if coffset0 < e.maxMatchOff && uint32(cv) == candidate.val {
// found a regular match
t = candidate.offset - e.cur
- if debug && s <= t {
- panic("s <= t")
+ if debugAsserts && s <= t {
+ panic(fmt.Sprintf("s (%d) <= t (%d)", s, t))
}
- if debug && s-t > e.maxMatchOff {
+ if debugAsserts && s-t > e.maxMatchOff {
panic("s - t >e.maxMatchOff")
}
break
@@ -469,13 +471,13 @@ encodeLoop:
// found a regular match
t = candidate2.offset - e.cur
s++
- if debug && s <= t {
- panic("s <= t")
+ if debugAsserts && s <= t {
+ panic(fmt.Sprintf("s (%d) <= t (%d)", s, t))
}
- if debug && s-t > e.maxMatchOff {
+ if debugAsserts && s-t > e.maxMatchOff {
panic("s - t >e.maxMatchOff")
}
- if debug && t < 0 {
+ if debugAsserts && t < 0 {
panic("t<0")
}
break
@@ -490,8 +492,8 @@ encodeLoop:
offset2 = offset1
offset1 = s - t
- if debug && s <= t {
- panic("s <= t")
+ if debugAsserts && s <= t {
+ panic(fmt.Sprintf("s (%d) <= t (%d)", s, t))
}
// Extend the 4-byte match as long as possible.
@@ -570,6 +572,9 @@ encodeLoop:
}
func (e *fastEncoder) addBlock(src []byte) int32 {
+ if debugAsserts && e.cur > bufferReset {
+ panic(fmt.Sprintf("ecur (%d) > buffer reset (%d)", e.cur, bufferReset))
+ }
// check if we have space already
if len(e.hist)+len(src) > cap(e.hist) {
if cap(e.hist) == 0 {
@@ -608,15 +613,18 @@ func (e *fastEncoder) matchlenNoHist(s, t int32, src []byte) int32 {
}
func (e *fastEncoder) matchlen(s, t int32, src []byte) int32 {
- if debug {
+ if debugAsserts {
if s < 0 {
- panic("s<0")
+ err := fmt.Sprintf("s (%d) < 0", s)
+ panic(err)
}
if t < 0 {
- panic("t<0")
+ err := fmt.Sprintf("s (%d) < 0", s)
+ panic(err)
}
if s-t > e.maxMatchOff {
- panic(s - t)
+ err := fmt.Sprintf("s (%d) - t (%d) > maxMatchOff (%d)", s, t, e.maxMatchOff)
+ panic(err)
}
}
s1 := int(s) + maxMatchLength - 4
@@ -650,7 +658,10 @@ func (e *fastEncoder) Reset() {
}
e.hist = make([]byte, 0, l)
}
- // We offset current position so everything will be out of reach
- e.cur += e.maxMatchOff + int32(len(e.hist))
+ // We offset current position so everything will be out of reach.
+ // If above reset line, history will be purged.
+ if e.cur < bufferReset {
+ e.cur += e.maxMatchOff + int32(len(e.hist))
+ }
e.hist = e.hist[:0]
}
diff --git a/vendor/github.com/klauspost/compress/zstd/encoder.go b/vendor/github.com/klauspost/compress/zstd/encoder.go
index 366dd66bd..4032fb9fc 100644
--- a/vendor/github.com/klauspost/compress/zstd/encoder.go
+++ b/vendor/github.com/klauspost/compress/zstd/encoder.go
@@ -156,7 +156,7 @@ func (e *Encoder) Write(p []byte) (n int, err error) {
if err != nil {
return n, err
}
- if debug && len(s.filling) > 0 {
+ if debugAsserts && len(s.filling) > 0 {
panic(len(s.filling))
}
}
diff --git a/vendor/github.com/klauspost/compress/zstd/framedec.go b/vendor/github.com/klauspost/compress/zstd/framedec.go
index 40790747a..cda590b5f 100644
--- a/vendor/github.com/klauspost/compress/zstd/framedec.go
+++ b/vendor/github.com/klauspost/compress/zstd/framedec.go
@@ -50,7 +50,7 @@ type frameDec struct {
const (
// The minimum Window_Size is 1 KB.
MinWindowSize = 1 << 10
- MaxWindowSize = 1 << 30
+ MaxWindowSize = 1 << 29
)
var (
diff --git a/vendor/github.com/klauspost/compress/zstd/fse_decoder.go b/vendor/github.com/klauspost/compress/zstd/fse_decoder.go
index 9efe34feb..e002be98b 100644
--- a/vendor/github.com/klauspost/compress/zstd/fse_decoder.go
+++ b/vendor/github.com/klauspost/compress/zstd/fse_decoder.go
@@ -118,7 +118,7 @@ func (s *fseDecoder) readNCount(b *byteReader, maxSymbol uint16) error {
if int32(bitStream)&(threshold-1) < max {
count = int32(bitStream) & (threshold - 1)
- if debug && nbBits < 1 {
+ if debugAsserts && nbBits < 1 {
panic("nbBits underflow")
}
bitCount += nbBits - 1
diff --git a/vendor/github.com/klauspost/compress/zstd/fse_encoder.go b/vendor/github.com/klauspost/compress/zstd/fse_encoder.go
index 619836f52..aa9eba88b 100644
--- a/vendor/github.com/klauspost/compress/zstd/fse_encoder.go
+++ b/vendor/github.com/klauspost/compress/zstd/fse_encoder.go
@@ -327,7 +327,7 @@ func (s *fseEncoder) normalizeCount(length int) error {
if err != nil {
return err
}
- if debug {
+ if debugAsserts {
err = s.validateNorm()
if err != nil {
return err
@@ -336,7 +336,7 @@ func (s *fseEncoder) normalizeCount(length int) error {
return s.buildCTable()
}
s.norm[largest] += stillToDistribute
- if debug {
+ if debugAsserts {
err := s.validateNorm()
if err != nil {
return err
@@ -619,7 +619,7 @@ func (s *fseEncoder) writeCount(out []byte) ([]byte, error) {
func (s *fseEncoder) bitCost(symbolValue uint8, accuracyLog uint32) uint32 {
minNbBits := s.ct.symbolTT[symbolValue].deltaNbBits >> 16
threshold := (minNbBits + 1) << 16
- if debug {
+ if debugAsserts {
if !(s.actualTableLog < 16) {
panic("!s.actualTableLog < 16")
}
@@ -633,7 +633,7 @@ func (s *fseEncoder) bitCost(symbolValue uint8, accuracyLog uint32) uint32 {
// linear interpolation (very approximate)
normalizedDeltaFromThreshold := (deltaFromThreshold << accuracyLog) >> s.actualTableLog
bitMultiplier := uint32(1) << accuracyLog
- if debug {
+ if debugAsserts {
if s.ct.symbolTT[symbolValue].deltaNbBits+tableSize > threshold {
panic("s.ct.symbolTT[symbolValue].deltaNbBits+tableSize > threshold")
}
diff --git a/vendor/github.com/klauspost/compress/zstd/internal/xxhash/xxhash_amd64.s b/vendor/github.com/klauspost/compress/zstd/internal/xxhash/xxhash_amd64.s
index d580e32ae..2c9c5357a 100644
--- a/vendor/github.com/klauspost/compress/zstd/internal/xxhash/xxhash_amd64.s
+++ b/vendor/github.com/klauspost/compress/zstd/internal/xxhash/xxhash_amd64.s
@@ -179,13 +179,13 @@ TEXT ·writeBlocks(SB), NOSPLIT, $0-40
MOVQ ·prime2v(SB), R14
// Load slice.
- MOVQ b_base+8(FP), CX
- MOVQ b_len+16(FP), DX
+ MOVQ arg1_base+8(FP), CX
+ MOVQ arg1_len+16(FP), DX
LEAQ (CX)(DX*1), BX
SUBQ $32, BX
// Load vN from d.
- MOVQ d+0(FP), AX
+ MOVQ arg+0(FP), AX
MOVQ 0(AX), R8 // v1
MOVQ 8(AX), R9 // v2
MOVQ 16(AX), R10 // v3
@@ -209,7 +209,7 @@ blockLoop:
MOVQ R11, 24(AX)
// The number of bytes written is CX minus the old base pointer.
- SUBQ b_base+8(FP), CX
+ SUBQ arg1_base+8(FP), CX
MOVQ CX, ret+32(FP)
RET
diff --git a/vendor/github.com/klauspost/compress/zstd/zstd.go b/vendor/github.com/klauspost/compress/zstd/zstd.go
index 57a8a2f5b..5e0b64ccc 100644
--- a/vendor/github.com/klauspost/compress/zstd/zstd.go
+++ b/vendor/github.com/klauspost/compress/zstd/zstd.go
@@ -6,11 +6,20 @@ package zstd
import (
"errors"
"log"
+ "math"
"math/bits"
)
+// enable debug printing
const debug = false
+
+// Enable extra assertions.
+const debugAsserts = debug || false
+
+// print sequence details
const debugSequences = false
+
+// print detailed matching information
const debugMatches = false
// force encoder to use predefined tables.
@@ -19,6 +28,9 @@ const forcePreDef = false
// zstdMinMatch is the minimum zstd match length.
const zstdMinMatch = 3
+// Reset the buffer offset when reaching this.
+const bufferReset = math.MaxInt32 - MaxWindowSize
+
var (
// ErrReservedBlockType is returned when a reserved block type is found.
// Typically this indicates wrong or corrupted input.