Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for per-parser body limits #800

Merged
merged 5 commits into from
Feb 27, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
6 changes: 5 additions & 1 deletion docs/ContentTypeParser.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ if (!fastify.hasContentTypeParser('application/jsoff')){
```

#### Body Parser
You can parse the body of the request in two ways. The first one is shown above: you add a custom content type parser and handle the request stream. In the second one you should pass a `parseAs` option to the `addContentTypeParser` API, where you declare how you want to get the body, it could be `'string'` or `'buffer'`. If you use the `parseAs` option Fastify will internally handle the stream and perform some checks, such as the [maximum size](https://github.com/fastify/fastify/blob/master/docs/Factory.md#factory-body-limit) of the body and the content length.
You can parse the body of the request in two ways. The first one is shown above: you add a custom content type parser and handle the request stream. In the second one you should pass a `parseAs` option to the `addContentTypeParser` API, where you declare how you want to get the body, it could be `'string'` or `'buffer'`. If you use the `parseAs` option Fastify will internally handle the stream and perform some checks, such as the [maximum size](https://github.com/fastify/fastify/blob/master/docs/Factory.md#factory-body-limit) of the body and the content length. If the limit is exceeded the custom parser will not be invoked.
```js
fastify.addContentTypeParser('application/json', { parseAs: 'string' }, function (req, body, done) {
try {
Expand All @@ -48,6 +48,10 @@ As you can see, now the function signature is `(req, body, done)` instead of `(r

See [`example/parser.js`](https://github.com/fastify/fastify/blob/master/examples/parser.js) for an example.

##### Custom Parser Options
+ `parseAs` (string): Either `'string'` or `'buffer'` to designate how the incoming data should be collected. Default: `'buffer'`.
+ `bodyLimit` (number): The maximum payload size, in bytes, that the custom parser will accept. Defaults to the global body limit passed to the [`Fastify factory function`](https://github.com/fastify/fastify/blob/master/docs/Factory.md#bodylimit).

#### Catch All
There are some cases where you need to catch all requests regardless of their content type. With Fastify, you just need to add the `'*'` content type.
```js
Expand Down
13 changes: 10 additions & 3 deletions fastify.js
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ function build (options) {
// custom parsers
fastify.addContentTypeParser = addContentTypeParser
fastify.hasContentTypeParser = hasContentTypeParser
fastify._contentTypeParser = new ContentTypeParser()
fastify._contentTypeParser = new ContentTypeParser(fastify._bodyLimit)

fastify.setSchemaCompiler = setSchemaCompiler
fastify.setSchemaCompiler(buildSchemaCompiler())
Expand Down Expand Up @@ -471,7 +471,6 @@ function build (options) {
opts.path = url
opts.prefix = prefix
opts.logLevel = opts.logLevel || _fastify._logLevel
opts.bodyLimit = opts.bodyLimit || _fastify._bodyLimit

// run 'onRoute' hooks
for (var h of onRouteHooks) {
Expand Down Expand Up @@ -556,7 +555,7 @@ function build (options) {
this.errorHandler = errorHandler
this._middie = null
this._parserOptions = {
limit: bodyLimit
limit: bodyLimit || null
}
this._fastify = fastify
this.logLevel = logLevel
Expand Down Expand Up @@ -636,6 +635,14 @@ function build (options) {
opts = {}
}

if (!opts) {
opts = {}
}

if (!opts.bodyLimit) {
opts.bodyLimit = this._bodyLimit
}

this._contentTypeParser.add(contentType, opts, parser)
return this
}
Expand Down
10 changes: 6 additions & 4 deletions lib/ContentTypeParser.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@

const lru = require('tiny-lru')

function ContentTypeParser () {
function ContentTypeParser (bodyLimit) {
this.customParsers = {}
this.customParsers['application/json'] = new Parser(true, false, defaultJsonParser)
this.customParsers['application/json'] = new Parser(true, false, bodyLimit, defaultJsonParser)
this.parserList = ['application/json']
this.cache = lru(100)
}
Expand All @@ -28,6 +28,7 @@ ContentTypeParser.prototype.add = function (contentType, opts, parserFn) {
const parser = new Parser(
opts.parseAs === 'string',
opts.parseAs === 'buffer',
opts.bodyLimit,
parserFn
)

Expand Down Expand Up @@ -94,7 +95,7 @@ ContentTypeParser.prototype.run = function (contentType, handler, request, reply

function rawBody (request, reply, options, parser, done) {
var asString = parser.asString
var limit = options.limit
var limit = options.limit === null ? parser.bodyLimit : options.limit
var contentLength = request.headers['content-length'] === undefined
? NaN
: Number.parseInt(request.headers['content-length'], 10)
Expand Down Expand Up @@ -178,9 +179,10 @@ function buildContentTypeParser (c) {
return contentTypeParser
}

function Parser (asString, asBuffer, fn) {
function Parser (asString, asBuffer, bodyLimit, fn) {
this.asString = asString
this.asBuffer = asBuffer
this.bodyLimit = bodyLimit
this.fn = fn
}

Expand Down
78 changes: 78 additions & 0 deletions test/custom-parser.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -630,3 +630,81 @@ test('Wrong parseAs parameter', t => {
t.is(err.message, 'The body parser can only parse your data as \'string\' or \'buffer\', you asked \'fireworks\' which is not supported.')
}
})

test('Should allow defining the bodyLimit per parser', t => {
t.plan(3)
const fastify = Fastify()
t.tearDown(() => fastify.close())

fastify.post('/', (req, reply) => {
reply.send(req.body)
})

fastify.addContentTypeParser(
'x/foo',
{ parseAs: 'string', bodyLimit: 5 },
function (req, body, done) {
t.fail('should not be invoked')
done()
}
)

fastify.listen(0, err => {
t.error(err)

sget({
method: 'POST',
url: 'http://localhost:' + fastify.server.address().port,
body: '1234567890',
headers: {
'Content-Type': 'x/foo'
}
}, (err, response, body) => {
t.error(err)
t.strictDeepEqual(JSON.parse(body.toString()), {
statusCode: 413,
error: 'Payload Too Large',
message: 'Request body is too large'
})
fastify.close()
})
})
})

test('route bodyLimit should take precedence over a custom parser bodyLimit', t => {
t.plan(3)
const fastify = Fastify()
t.tearDown(() => fastify.close())

fastify.post('/', { bodyLimit: 5 }, (request, reply) => {
reply.send(request.body)
})

fastify.addContentTypeParser(
'x/foo',
{ parseAs: 'string', bodyLimit: 100 },
function (req, body, done) {
t.fail('should not be invoked')
done()
}
)

fastify.listen(0, err => {
t.error(err)

sget({
method: 'POST',
url: 'http://localhost:' + fastify.server.address().port,
body: '1234567890',
headers: { 'Content-Type': 'x/foo' }
}, (err, response, body) => {
t.error(err)
t.strictDeepEqual(JSON.parse(body.toString()), {
statusCode: 413,
error: 'Payload Too Large',
message: 'Request body is too large'
})
fastify.close()
})
})
})