Skip to content

Commit

Permalink
v3: backport decompression limit fix (#107)
Browse files Browse the repository at this point in the history
Backport from #106.

Note that there was a merge conflict on the CHANGELOG and I decided to
just accept all the v4 entries. It's okay for the v3 branch's CHANGELOG
to also talk about v4 releases. I'll send a separate PR updating the v4
CHANGELOG to include v3.0.3.
  • Loading branch information
jsha committed Mar 7, 2024
1 parent 11bb4e7 commit add6a28
Show file tree
Hide file tree
Showing 4 changed files with 110 additions and 4 deletions.
53 changes: 53 additions & 0 deletions CHANGELOG.md
@@ -1,3 +1,56 @@
# v4.0.1

## Fixed

- An attacker could send a JWE containing compressed data that used large
amounts of memory and CPU when decompressed by `Decrypt` or `DecryptMulti`.
Those functions now return an error if the decompressed data would exceed
250kB or 10x the compressed size (whichever is larger). Thanks to
Enze Wang@Alioth and Jianjun Chen@Zhongguancun Lab (@zer0yu and @chenjj)
for reporting.

# v4.0.0

This release makes some breaking changes in order to more thoroughly
address the vulnerabilities discussed in [Three New Attacks Against JSON Web
Tokens][1], "Sign/encrypt confusion", "Billion hash attack", and "Polyglot
token".

## Changed

- Limit JWT encryption types (exclude password or public key types) (#78)
- Enforce minimum length for HMAC keys (#85)
- jwt: match any audience in a list, rather than requiring all audiences (#81)
- jwt: accept only Compact Serialization (#75)
- jws: Add expected algorithms for signatures (#74)
- Require specifying expected algorithms for ParseEncrypted,
ParseSigned, ParseDetached, jwt.ParseEncrypted, jwt.ParseSigned,
jwt.ParseSignedAndEncrypted (#69, #74)
- Usually there is a small, known set of appropriate algorithms for a program
to use and it's a mistake to allow unexpected algorithms. For instance the
"billion hash attack" relies in part on programs accepting the PBES2
encryption algorithm and doing the necessary work even if they weren't
specifically configured to allow PBES2.
- Revert "Strip padding off base64 strings" (#82)
- The specs require base64url encoding without padding.
- Minimum supported Go version is now 1.21

## Added

- ParseSignedCompact, ParseSignedJSON, ParseEncryptedCompact, ParseEncryptedJSON.
- These allow parsing a specific serialization, as opposed to ParseSigned and
ParseEncrypted, which try to automatically detect which serialization was
provided. It's common to require a specific serialization for a specific
protocol - for instance JWT requires Compact serialization.

[1]: https://i.blackhat.com/BH-US-23/Presentations/US-23-Tervoort-Three-New-Attacks-Against-JSON-Web-Tokens.pdf

# v3.0.3

## Fixed

- Limit decompression output size to prevent a DoS. Backport from v4.0.1.

# v3.0.2

## Fixed
Expand Down
6 changes: 6 additions & 0 deletions crypter.go
Expand Up @@ -440,6 +440,9 @@ func (ctx *genericEncrypter) Options() EncrypterOptions {
//
// Note that ed25519 is only available for signatures, not encryption, so is
// not an option here.
//
// Automatically decompresses plaintext, but returns an error if the decompressed
// data would be >250kB or >10x the size of the compressed data, whichever is larger.
func (obj JSONWebEncryption) Decrypt(decryptionKey interface{}) ([]byte, error) {
headers := obj.mergedHeaders(nil)

Expand Down Expand Up @@ -511,6 +514,9 @@ func (obj JSONWebEncryption) Decrypt(decryptionKey interface{}) ([]byte, error)
//
// The decryptionKey argument must have one of the types allowed for the
// decryptionKey argument of Decrypt().
//
// Automatically decompresses plaintext, but returns an error if the decompressed
// data would be >250kB or >3x the size of the compressed data, whichever is larger.
func (obj JSONWebEncryption) DecryptMulti(decryptionKey interface{}) (int, Header, []byte, error) {
globalHeaders := obj.mergedHeaders(nil)

Expand Down
21 changes: 17 additions & 4 deletions encoding.go
Expand Up @@ -21,6 +21,7 @@ import (
"compress/flate"
"encoding/base64"
"encoding/binary"
"fmt"
"io"
"math/big"
"strings"
Expand Down Expand Up @@ -85,7 +86,7 @@ func decompress(algorithm CompressionAlgorithm, input []byte) ([]byte, error) {
}
}

// Compress with DEFLATE
// deflate compresses the input.
func deflate(input []byte) ([]byte, error) {
output := new(bytes.Buffer)

Expand All @@ -97,15 +98,27 @@ func deflate(input []byte) ([]byte, error) {
return output.Bytes(), err
}

// Decompress with DEFLATE
// inflate decompresses the input.
//
// Errors if the decompressed data would be >250kB or >10x the size of the
// compressed data, whichever is larger.
func inflate(input []byte) ([]byte, error) {
output := new(bytes.Buffer)
reader := flate.NewReader(bytes.NewBuffer(input))

_, err := io.Copy(output, reader)
if err != nil {
maxCompressedSize := 10 * int64(len(input))
if maxCompressedSize < 250000 {
maxCompressedSize = 250000
}

limit := maxCompressedSize + 1
n, err := io.CopyN(output, reader, limit)
if err != nil && err != io.EOF {
return nil, err
}
if n == limit {
return nil, fmt.Errorf("uncompressed data would be too large (>%d bytes)", maxCompressedSize)
}

err = reader.Close()
return output.Bytes(), err
Expand Down
34 changes: 34 additions & 0 deletions encoding_test.go
Expand Up @@ -18,6 +18,8 @@ package jose

import (
"bytes"
"crypto/rand"
"io"
"strings"
"testing"
)
Expand Down Expand Up @@ -57,6 +59,38 @@ func TestInvalidCompression(t *testing.T) {
}
}

// TestLargeZip tests that we can decompress a large input, so long as its
// compression ratio is reasonable.
func TestLargeZip(t *testing.T) {
input := new(bytes.Buffer)
_, err := io.CopyN(input, rand.Reader, 251000)
if err != nil {
t.Fatalf("generating input: %s", err)
}
compressed, err := compress(DEFLATE, input.Bytes())
if err != nil {
t.Errorf("compressing: %s", err)
}
t.Logf("compression ratio: %g", float64(len(input.Bytes()))/float64(len(compressed)))
_, err = decompress(DEFLATE, compressed)
if err != nil {
t.Errorf("decompressing large input with low compression ratio: %s", err)
}
}

func TestZipBomb(t *testing.T) {
input := strings.Repeat("a", 251000)
compressed, err := compress(DEFLATE, []byte(input))
if err != nil {
t.Errorf("compressing: %s", err)
}
t.Logf("compression ratio: %d %g", len(compressed), float64(len(input))/float64(len(compressed)))
out, err := decompress(DEFLATE, compressed)
if err == nil {
t.Errorf("expected error decompressing zip bomb, got none. output size %d", len(out))
}
}

func TestByteBufferTrim(t *testing.T) {
buf := newBufferFromInt(1)
if !bytes.Equal(buf.data, []byte{1}) {
Expand Down

0 comments on commit add6a28

Please sign in to comment.