@@ -415,8 +415,8 @@ func TestStreamSetNbWorkers(t *testing.T) {
415415}
416416
417417func TestStreamWindowSize (t * testing.T ) {
418- dict := []byte ("dictdata_for_compression_test" )
419- data := []byte ("hello world" )
418+ dict := []byte (strings . Repeat ( "dictdata_for_compression_test" , 1000 ) )
419+ data := []byte (strings . Repeat ( "abcdefghijklmnopqrstuvwxyz" , 10000 ) )
420420 testCases := []struct {
421421 name string
422422 dict []byte
@@ -464,7 +464,7 @@ func TestStreamWindowSize(t *testing.T) {
464464}
465465
466466func TestStreamMaxWindowSize (t * testing.T ) {
467- dict := []byte ("dictdata_for_compression_test" )
467+ dict := []byte (strings . Repeat ( "dictdata_for_compression_test" , 1000 ) )
468468 testCases := []struct {
469469 name string
470470 dict []byte
@@ -477,11 +477,11 @@ func TestStreamMaxWindowSize(t *testing.T) {
477477 for _ , tc := range testCases {
478478 t .Run (tc .name , func (t * testing.T ) {
479479 // Create compressed data with a 128KB window size
480- data := strings .Repeat ("abcdefghijklmnopqrstuvwxyz" , 1000 )
480+ data := [] byte ( strings .Repeat ("abcdefghijklmnopqrstuvwxyz" , 10000 ) )
481481 var buf bytes.Buffer
482482 w := NewWriterLevelDictWindowSize (& buf , DefaultCompression , tc .dict , 1 << 17 ) // 128 KB
483483
484- _ , err := w .Write ([] byte ( data ) )
484+ _ , err := w .Write (data )
485485 failOnError (t , "Write error" , err )
486486 failOnError (t , "Flush error" , w .Flush ())
487487 failOnError (t , "Close error" , w .Close ())
@@ -492,7 +492,7 @@ func TestStreamMaxWindowSize(t *testing.T) {
492492 r1 := NewReader (bytes .NewReader (compressedData ))
493493 decompressed1 , err := ioutil .ReadAll (r1 )
494494 failOnError (t , "ReadAll error (normal)" , err )
495- if ! bytes .Equal (decompressed1 , [] byte ( data ) ) {
495+ if ! bytes .Equal (decompressed1 , data ) {
496496 t .Fatal ("Regular decompression failed to match original data" )
497497 }
498498 failOnError (t , "Reader close error" , r1 .Close ())
@@ -503,7 +503,7 @@ func TestStreamMaxWindowSize(t *testing.T) {
503503 r2 := NewReaderDictMaxWindowSize (bytes .NewReader (compressedData ), tc .dict , 1 << 18 )
504504 decompressed2 , err := ioutil .ReadAll (r2 )
505505 failOnError (t , "ReadAll error (large max window)" , err )
506- if ! bytes .Equal (decompressed2 , [] byte ( data ) ) {
506+ if ! bytes .Equal (decompressed2 , data ) {
507507 t .Fatalf ("Decompression with larger max window failed to match original data - got len=%d, want len=%d" ,
508508 len (decompressed2 ), len (data ))
509509 }
@@ -512,6 +512,10 @@ func TestStreamMaxWindowSize(t *testing.T) {
512512
513513 // Decompression with max window size < original window should fail
514514 t .Run ("SmallerMaxWindowSize" , func (t * testing.T ) {
515+ // workaround for regression when setting window size & using dictionary (facebook/zstd#2442)
516+ if zstdVersion < 10409 && zstdVersion > 10405 && len (tc .dict ) > 0 {
517+ t .Skip ("Skipping: Zstd v1.4.5 - v1.4.9 won't set window size when streaming with dictionary" )
518+ }
515519 // We set it to 64KB, less than the 128KB used for compression
516520 r3 := NewReaderDictMaxWindowSize (bytes .NewReader (compressedData ), tc .dict , 1 << 16 )
517521 _ , err = ioutil .ReadAll (r3 )
0 commit comments