diff --git a/src/test/unit_tests/jest_tests/test_chunked_content_decoder.test.js b/src/test/unit_tests/jest_tests/test_chunked_content_decoder.test.js index 3e22cff489..54b078dbac 100644 --- a/src/test/unit_tests/jest_tests/test_chunked_content_decoder.test.js +++ b/src/test/unit_tests/jest_tests/test_chunked_content_decoder.test.js @@ -25,17 +25,20 @@ describe('ChunkedContentDecoder', function() { // 0\r\n // \r\n + // for easier debugging you can set the number of iteration here: + const NUMBER_OF_ITERATIONS_IMPORTANT_CASE = 100; + const NUMBER_OF_ITERATIONS_DEFAULT = 2; + describe('expected to parse the input', function() { test_parse_output({ - name: 'two_chunks', + name: 'one_chunk', input: '3\r\n' + 'foo\r\n' + - '3\r\n' + - 'bar\r\n' + '0\r\n' + '\r\n', - output: 'foobar', + output: 'foo', + iterations: NUMBER_OF_ITERATIONS_DEFAULT, }); test_parse_output({ @@ -48,6 +51,7 @@ describe('ChunkedContentDecoder', function() { '0\r\n' + '\r\n', output: 'foobar', + iterations: NUMBER_OF_ITERATIONS_IMPORTANT_CASE, }); test_parse_output({ @@ -60,14 +64,15 @@ describe('ChunkedContentDecoder', function() { 'ff\r\n' + 'f'.repeat(255) + '\r\n' + '0\r\n' + - 'x-trailer-1: value\r\n' + - 'x-trailer-2: value\r\n' + + 'x-trailer-1:value\r\n' + + 'x-trailer-2:value\r\n' + '\r\n', output: 'foobarbaz' + 'f'.repeat(255), + iterations: NUMBER_OF_ITERATIONS_IMPORTANT_CASE, check: decoder => { assert.deepStrictEqual(decoder.trailers, [ - 'x-trailer-1: value', - 'x-trailer-2: value', + 'x-trailer-1:value', + 'x-trailer-2:value', ]); }, }); @@ -80,6 +85,7 @@ describe('ChunkedContentDecoder', function() { 'semi:trailer\r\n' + '\r\n', output: '', + iterations: NUMBER_OF_ITERATIONS_IMPORTANT_CASE, check: decoder => { assert.deepStrictEqual(decoder.trailers, [ 'movie:trailer', @@ -89,23 +95,38 @@ describe('ChunkedContentDecoder', function() { }); test_parse_output({ - name: 'one_chunk_with_ext', + name: 'one_chunk_with_extension', input: '3;crc=1a2b3c4d\r\n' + 'EXT\r\n' + '0\r\n' + '\r\n', output: 'EXT', + iterations: NUMBER_OF_ITERATIONS_IMPORTANT_CASE, }); test_parse_output({ - name: 'one_chunk_with_ext', + name: 'one_chunk_with_extension_and_trailer', input: '3;crc=1a2b3c4d\r\n' + 'EXT\r\n' + '0\r\n' + + create_trailers(1) + '\r\n', output: 'EXT', + iterations: NUMBER_OF_ITERATIONS_IMPORTANT_CASE, + }); + + test_parse_output({ + name: 'one_chunk_with_trailers', // lower than MAX_CHUNK_HEADER_SIZE + input: + '3\r\n' + + 'foo\r\n' + + '0\r\n' + + create_trailers(19) + + '\r\n', + output: 'foo', + iterations: NUMBER_OF_ITERATIONS_DEFAULT, }); }); @@ -116,18 +137,33 @@ describe('ChunkedContentDecoder', function() { name: 'chunk_size_not_hex', input: 'invalid\r\n\r\n', error_pos: 7, // end of header + iterations: NUMBER_OF_ITERATIONS_IMPORTANT_CASE, }); test_parse_error({ - name: 'chunk_size_too_big', + name: 'chunk_size_too_big', // according to MAX_CHUNK_SIZE input: '10000000001\r\n\r\n', error_pos: 11, // end of header + iterations: NUMBER_OF_ITERATIONS_IMPORTANT_CASE, }); test_parse_error({ name: 'header_too_long', // according to MAX_CHUNK_HEADER_SIZE input: '0' + ';'.repeat(1024) + '\r\n\r\n', error_pos: 1025, // end of header + iterations: NUMBER_OF_ITERATIONS_IMPORTANT_CASE, + }); + + test_parse_error({ + name: 'too_many_trailers', // according to MAX_CHUNK_HEADER_SIZE + input: + '3\r\n' + + 'foo\r\n' + + '0\r\n' + + create_trailers(21) + + '\r\n', + error_pos: 420, // last trailer position + iterations: NUMBER_OF_ITERATIONS_DEFAULT, }); }); @@ -137,14 +173,15 @@ describe('ChunkedContentDecoder', function() { * name: string, * input: string, * output: string, + * iterations?: number * check?: (decoder: ChunkedContentDecoder) => void, * }} params */ - function test_parse_output({ name, input, output, check }) { + function test_parse_output({ name, input, output, check, iterations = NUMBER_OF_ITERATIONS_DEFAULT}) { it(name, async function() { - for (let i = 0; i < 100; ++i) { + for (let i = 0; i < iterations; ++i) { const decoder = new ChunkedContentDecoder(); - console.log(`test_parse_output(${name}): decoder input`, input, decoder); + console.log(`test_parse_output(${name}): decoder input`, input, decoder.get_debug_info()); const readable = new stream.Readable({ read() { // split at random position @@ -157,7 +194,7 @@ describe('ChunkedContentDecoder', function() { const writable = buffer_utils.write_stream(); await stream.promises.pipeline(readable, decoder, writable); const decoded = buffer_utils.join(writable.buffers, writable.total_length); - console.log(`test_parse_output(${name}): decoder returned`, decoded, decoder); + console.log(`test_parse_output(${name}): decoder returned`, decoded, decoder.get_debug_info()); assert.deepStrictEqual(decoded, Buffer.from(output)); if (check) check(decoder); } @@ -169,13 +206,14 @@ describe('ChunkedContentDecoder', function() { * name: string, * input: string, * error_pos?: number, + * iterations?: number * }} params */ - function test_parse_error({ name, input, error_pos }) { + function test_parse_error({ name, input, error_pos, iterations = NUMBER_OF_ITERATIONS_DEFAULT }) { it(name, async function() { - for (let i = 0; i < 100; ++i) { + for (let i = 0; i < iterations; ++i) { const decoder = new ChunkedContentDecoder(); - console.log(`test_parse_error(${name}): decoder input`, input, decoder); + console.log(`test_parse_error(${name}): decoder input`, input, decoder.get_debug_info()); console.log(name, 'decode', decoder); try { const readable = new stream.Readable({ @@ -184,18 +222,17 @@ describe('ChunkedContentDecoder', function() { const sp = Math.floor(input.length * Math.random()); this.push(input.slice(0, sp)); this.push(input.slice(sp)); - // this.push(input); this.push(null); } }); const writable = buffer_utils.write_stream(); await stream.promises.pipeline(readable, decoder, writable); const decoded = buffer_utils.join(writable.buffers, writable.total_length); - console.log(`test_parse_error(${name}): decoder returned`, decoded, decoder); + console.log(`test_parse_error(${name}): decoder returned`, decoded, decoder.get_debug_info()); assert.fail('Should have failed'); } catch (err) { if (err.message === 'Should have failed') throw err; - console.log(`test_parse_error(${name}): decoder caught`, err, decoder); + console.log(`test_parse_error(${name}): decoder caught`, err, decoder.get_debug_info()); if (error_pos !== undefined) { assert.strictEqual(decoder.stream_pos, error_pos); } @@ -204,4 +241,19 @@ describe('ChunkedContentDecoder', function() { }); } + + /** + * create_trailers will return a single string with the number of trailers + * @param {number} number_of_trailers + * @returns string + */ + function create_trailers(number_of_trailers) { + const trailers = []; + for (let index = 1; index <= number_of_trailers; ++index) { + const trailer = `x-trailer-${index}:value\r\n`; + trailers.push(trailer); + } + return trailers.join(''); + } + }); diff --git a/src/util/chunked_content_decoder.js b/src/util/chunked_content_decoder.js index 57fb38567b..393fdbb247 100644 --- a/src/util/chunked_content_decoder.js +++ b/src/util/chunked_content_decoder.js @@ -31,9 +31,9 @@ const MAX_TRAILERS = 20; * * Basic encoding structure: * --------------------------------------------------- - * 1fff;chunk-signature=1a2b\r\n - chunk header + * 1fff;chunk-signature=1a2b\r\n - chunk header (optional extension) * <1fff bytes of data>\r\n - chunk data - * 2fff;chunk-signature=1a2b\r\n - chunk header + * 2fff;chunk-signature=1a2b\r\n - chunk header (optional extension) * <2fff bytes of data>\r\n - chunk data * 0\r\n - last chunk * \r\n - optional trailer @@ -242,19 +242,25 @@ class ChunkedContentDecoder extends stream.Transform { index ||= 0; this.stream_pos += index; - const message = `Failed parsing aws-chunked data` + + const message = `Failed parsing aws-chunked data` + this.get_debug_info() + + // since the state machine is changing according to each byte attached the buffer view of the next 10 bytes + (buf ? ` buf[index..10]=[${buf.toString('hex', index, index + 10)}]` : ''); + + this.state = STATE_ERROR; + this.emit('error', new Error(message)); + return false; + } + + get_debug_info() { + const debug_info = `Debug info` + ` pos=${this.stream_pos}` + ` state=${this.state}` + ` chunk_header=${this.chunk_header}` + ` chunk_size=${this.chunk_size}` + ` last_chunk=${this.last_chunk}` + ` trailer=${this.trailer}` + - ` trailers=${this.trailers}` + - (buf ? ` buf[index..10]=[${buf.toString('utf8', index, index + 10)}]` : ''); - - this.state = STATE_ERROR; - this.emit('error', new Error(message)); - return false; + ` trailers=${this.trailers}`; + return debug_info; } }