zlib: add maxOutputLength option

Fixes: https://github.com/nodejs/node/issues/27253

PR-URL: https://github.com/nodejs/node/pull/33516
Reviewed-By: Anna Henningsen <anna@addaleax.net>
This commit is contained in:
unknown
2020-05-22 13:22:09 -04:00
committed by Anna Henningsen
parent 4678e44bb2
commit 278aae28e1
6 changed files with 59 additions and 12 deletions

View File

@@ -486,6 +486,9 @@ These advanced options are available for controlling decompression:
<!-- YAML
added: v0.11.1
changes:
- version: REPLACEME
pr-url: https://github.com/nodejs/node/pull/33516
description: The `maxOutputLength` option is supported now.
- version: v9.4.0
pr-url: https://github.com/nodejs/node/pull/16042
description: The `dictionary` option can be an `ArrayBuffer`.
@@ -514,6 +517,8 @@ ignored by the decompression classes.
* `dictionary` {Buffer|TypedArray|DataView|ArrayBuffer} (deflate/inflate only,
empty dictionary by default)
* `info` {boolean} (If `true`, returns an object with `buffer` and `engine`.)
* `maxOutputLength` {integer} Limits output size when using
[convenience methods][]. **Default:** [`buffer.kMaxLength`][]
See the [`deflateInit2` and `inflateInit2`][] documentation for more
information.
@@ -521,6 +526,10 @@ information.
## Class: `BrotliOptions`
<!-- YAML
added: v11.7.0
changes:
- version: REPLACEME
pr-url: https://github.com/nodejs/node/pull/33516
description: The `maxOutputLength` option is supported now.
-->
<!--type=misc-->
@@ -531,6 +540,8 @@ Each Brotli-based class takes an `options` object. All options are optional.
* `finishFlush` {integer} **Default:** `zlib.constants.BROTLI_OPERATION_FINISH`
* `chunkSize` {integer} **Default:** `16 * 1024`
* `params` {Object} Key-value object containing indexed [Brotli parameters][].
* `maxOutputLength` {integer} Limits output size when using
[convenience methods][]. **Default:** [`buffer.kMaxLength`][]
For example:
@@ -1160,6 +1171,7 @@ Decompress a chunk of data with [`Unzip`][].
[`BrotliCompress`]: #zlib_class_zlib_brotlicompress
[`BrotliDecompress`]: #zlib_class_zlib_brotlidecompress
[`Buffer`]: buffer.html#buffer_class_buffer
[`buffer.kMaxLength`]: buffer.html#buffer_buffer_kmaxlength
[`Content-Encoding`]: https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11
[`DataView`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView
[`DeflateRaw`]: #zlib_class_zlib_deflateraw
@@ -1177,5 +1189,6 @@ Decompress a chunk of data with [`Unzip`][].
[Memory Usage Tuning]: #zlib_memory_usage_tuning
[RFC 7932]: https://www.rfc-editor.org/rfc/rfc7932.txt
[Streams API]: stream.md
[convenience methods]: #zlib_convenience_methods
[zlib documentation]: https://zlib.net/manual.html#Constants
[zlib.createGzip example]: #zlib_zlib

View File

@@ -47,8 +47,6 @@ const kTypes = [
'symbol'
];
const { kMaxLength } = internalBinding('buffer');
const MainContextError = Error;
const ErrorToString = Error.prototype.toString;
const overrideStackTrace = new WeakMap();
@@ -768,7 +766,7 @@ E('ERR_BUFFER_OUT_OF_BOUNDS',
return 'Attempt to access memory outside buffer bounds';
}, RangeError);
E('ERR_BUFFER_TOO_LARGE',
`Cannot create a Buffer larger than 0x${kMaxLength.toString(16)} bytes`,
'Cannot create a Buffer larger than %s bytes',
RangeError);
E('ERR_CANNOT_WATCH_SIGINT', 'Cannot watch for SIGINT signals', Error);
E('ERR_CHILD_CLOSED_BEFORE_REPLY',

View File

@@ -124,6 +124,11 @@ function zlibBufferOnData(chunk) {
else
this.buffers.push(chunk);
this.nread += chunk.length;
if (this.nread > this._maxOutputLength) {
this.close();
this.removeAllListeners('end');
this.cb(new ERR_BUFFER_TOO_LARGE(this._maxOutputLength));
}
}
function zlibBufferOnError(err) {
@@ -134,9 +139,7 @@ function zlibBufferOnError(err) {
function zlibBufferOnEnd() {
let buf;
let err;
if (this.nread >= kMaxLength) {
err = new ERR_BUFFER_TOO_LARGE();
} else if (this.nread === 0) {
if (this.nread === 0) {
buf = Buffer.alloc(0);
} else {
const bufs = this.buffers;
@@ -231,6 +234,7 @@ const checkRangesOrGetDefault = hideStackFrames(
// The base class for all Zlib-style streams.
function ZlibBase(opts, mode, handle, { flush, finishFlush, fullFlush }) {
let chunkSize = Z_DEFAULT_CHUNK;
let maxOutputLength = kMaxLength;
// The ZlibBase class is not exported to user land, the mode should only be
// passed in by us.
assert(typeof mode === 'number');
@@ -253,6 +257,10 @@ function ZlibBase(opts, mode, handle, { flush, finishFlush, fullFlush }) {
opts.finishFlush, 'options.finishFlush',
Z_NO_FLUSH, Z_BLOCK, finishFlush);
maxOutputLength = checkRangesOrGetDefault(
opts.maxOutputLength, 'options.maxOutputLength',
1, kMaxLength, kMaxLength);
if (opts.encoding || opts.objectMode || opts.writableObjectMode) {
opts = { ...opts };
opts.encoding = null;
@@ -276,6 +284,7 @@ function ZlibBase(opts, mode, handle, { flush, finishFlush, fullFlush }) {
this._finishFlushFlag = finishFlush;
this._defaultFullFlushFlag = fullFlush;
this._info = opts && opts.info;
this._maxOutputLength = maxOutputLength;
}
ObjectSetPrototypeOf(ZlibBase.prototype, Transform.prototype);
ObjectSetPrototypeOf(ZlibBase, Transform);
@@ -450,6 +459,12 @@ function processChunkSync(self, chunk, flushFlag) {
else
buffers.push(out);
nread += out.byteLength;
if (nread > self._maxOutputLength) {
_close(self);
throw new ERR_BUFFER_TOO_LARGE(self._maxOutputLength);
}
} else {
assert(have === 0, 'have should not go down');
}
@@ -476,10 +491,6 @@ function processChunkSync(self, chunk, flushFlag) {
self.bytesWritten = inputRead;
_close(self);
if (nread >= kMaxLength) {
throw new ERR_BUFFER_TOO_LARGE();
}
if (nread === 0)
return Buffer.alloc(0);

View File

@@ -11,7 +11,7 @@ const assert = require('assert');
// large Buffers.
const buffer = require('buffer');
const oldkMaxLength = buffer.kMaxLength;
buffer.kMaxLength = 128;
buffer.kMaxLength = 64;
const zlib = require('zlib');
buffer.kMaxLength = oldkMaxLength;

View File

@@ -11,7 +11,7 @@ const assert = require('assert');
// large Buffers.
const buffer = require('buffer');
const oldkMaxLength = buffer.kMaxLength;
buffer.kMaxLength = 128;
buffer.kMaxLength = 64;
const zlib = require('zlib');
buffer.kMaxLength = oldkMaxLength;

View File

@@ -0,0 +1,25 @@
'use strict';
const common = require('../common');
const assert = require('assert');
const zlib = require('zlib');
const encoded = Buffer.from('G38A+CXCIrFAIAM=', 'base64');
// Async
zlib.brotliDecompress(encoded, { maxOutputLength: 64 }, common.expectsError({
code: 'ERR_BUFFER_TOO_LARGE',
message: 'Cannot create a Buffer larger than 64 bytes'
}));
// Sync
assert.throws(function() {
zlib.brotliDecompressSync(encoded, { maxOutputLength: 64 });
}, RangeError);
// Async
zlib.brotliDecompress(encoded, { maxOutputLength: 256 }, function(err) {
assert.strictEqual(err, null);
});
// Sync
zlib.brotliDecompressSync(encoded, { maxOutputLength: 256 });