Skip to content

Commit 23800fc

Browse files
EommUlisesGascon
andauthored
Merge commit from fork
* fix: limit the cache size * types: add cacheSize and mark default as optional --------- Co-authored-by: Ulises Gascon <[email protected]>
1 parent f5d876c commit 23800fc

6 files changed

Lines changed: 82 additions & 8 deletions

File tree

README.md

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,8 @@ fastify.register(require('@fastify/accepts-serializer'), {
5151
serializer: body => msgpack.encode(body)
5252
}
5353
],
54-
default: 'application/yaml' // MIME type used if Accept header does not match anything
54+
default: 'application/yaml', // MIME type used if Accept header does not match anything
55+
cacheSize: 100 // max number of Accept header combinations to cache (default: 100)
5556
})
5657

5758
// Per-router serializers
@@ -69,6 +70,14 @@ fastify.get('/request', { config }, function (req, reply) {
6970
})
7071
```
7172

73+
## Options
74+
75+
| Option | Type | Default | Description |
76+
|--------|------|---------|-------------|
77+
| `serializers` | `Array` | `[]` | List of serializer definitions, each with a `regex` and a `serializer` function |
78+
| `default` | `string` || MIME type to use when no serializer matches the `Accept` header. If omitted, unmatched requests receive a `406` response |
79+
| `cacheSize` | `number` | `100` | Maximum number of distinct `Accept` header combinations to cache. Entries are evicted in LRU order once the limit is reached |
80+
7281
## Behavior
7382

7483
For each route, a SerializerManager is defined, which has both per-route and global serializer definitions.
@@ -77,6 +86,8 @@ The MIME type `application/json` is always handled by `fastify` if no serializer
7786

7887
If no `default` key is specified in configuration, all requests with an unknown `Accept` header will be replied to with a 406 response (a boom error is used).
7988

89+
Serializer selection results are cached by `Accept` header value using an LRU cache bounded by `cacheSize`. This prevents unbounded memory growth from attacker-controlled `Accept` header variants.
90+
8091
## License
8192

8293
Licensed under [MIT](./LICENSE).

SerializerManager.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ class SerializerManager {
1818
}
1919

2020
findSerializer (types) {
21-
const cacheValue = this.cache[types]
21+
const cacheValue = this.cache.get(types)
2222

2323
if (cacheValue) return cacheValue
2424

@@ -28,7 +28,7 @@ class SerializerManager {
2828
for (let j = 0; j < this.serializers.length; j++) {
2929
const serializer = this.serializers[j]
3030
if (serializer.isAble(type)) {
31-
this.cache[types] = { serializer, type }
31+
this.cache.set(types, { serializer, type })
3232
return { serializer, type }
3333
}
3434
}

index.js

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,14 @@
11
'use strict'
22

33
const fp = require('fastify-plugin')
4+
const { LruObject } = require('toad-cache')
45
const SerializerManager = require('./SerializerManager')
56

67
const FASTIFY_DEFAULT_SERIALIZE_MIME_TYPE = 'application/json'
78

89
function fastifyAcceptsSerializer (fastify, options, next) {
9-
const serializerCache = {}
10+
const cacheSize = options.cacheSize ?? 100
11+
const serializerCache = new LruObject(cacheSize)
1012
options.cache = serializerCache
1113

1214
const globalSerializerManager = SerializerManager.build(options)
@@ -22,7 +24,9 @@ function fastifyAcceptsSerializer (fastify, options, next) {
2224

2325
if (request.routeOptions.config.serializers) {
2426
// keep route level cache in config to prevent messing with global cache
25-
request.routeOptions.config.serializers.cache = Object.assign({}, request.routeOptions.config.serializers.cache)
27+
if (!request.routeOptions.config.serializers.cache) {
28+
request.routeOptions.config.serializers.cache = new LruObject(cacheSize)
29+
}
2630
reply.serializer.serializerManager = SerializerManager.expand({
2731
serializers: request.routeOptions.config.serializers,
2832
cache: request.routeOptions.config.serializers.cache

package.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,8 @@
6363
},
6464
"dependencies": {
6565
"@fastify/accepts": "^5.0.0",
66-
"fastify-plugin": "^5.0.0"
66+
"fastify-plugin": "^5.0.0",
67+
"toad-cache": "^3.7.0"
6768
},
6869
"publishConfig": {
6970
"access": "public"

test/test.js

Lines changed: 58 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -458,7 +458,7 @@ test('serializer cache', async t => {
458458
t.plan(6)
459459

460460
fastify.get('/request', function (_req, reply) {
461-
t.assert.deepStrictEqual(Object.keys(reply.serializer.cache), ['application/cache'])
461+
t.assert.deepStrictEqual(reply.serializer.cache.keys(), ['application/cache'])
462462

463463
reply.send('cache')
464464
})
@@ -488,3 +488,60 @@ test('serializer cache', async t => {
488488
t.assert.deepStrictEqual(response2.payload, 'cache')
489489
})
490490
})
491+
492+
test('serializer cache is bounded (GHSA-qxhc-wx3p-2wmg)', async t => {
493+
t.plan(2)
494+
495+
const MAX_CACHE = 5
496+
const fastify = Fastify()
497+
498+
fastify.register(plugin, {
499+
cacheSize: MAX_CACHE,
500+
serializers: [
501+
{
502+
regex: /^application\/poc$/,
503+
serializer: body => JSON.stringify(body)
504+
}
505+
]
506+
})
507+
508+
fastify.get('/', async function (_request, reply) {
509+
return reply.code(204).send()
510+
})
511+
512+
// Registered before any inject so the server is not yet started
513+
let cacheSize = 0
514+
let cacheMax = 0
515+
fastify.get('/cache-size', function (_req, reply) {
516+
cacheSize = reply.serializer.cache.size
517+
cacheMax = reply.serializer.cache.max
518+
reply.send({ size: cacheSize })
519+
})
520+
521+
// Send more distinct Accept header variants than the cache max.
522+
// An attacker can craft headers like "application/vnd.dummy-0001, application/poc" to create
523+
// a separate cache entry per request while still matching the configured serializer.
524+
const attackVariants = Array.from(
525+
{ length: MAX_CACHE + 10 },
526+
(_, i) => `application/vnd.dummy-${String(i).padStart(4, '0')}, application/poc`
527+
)
528+
529+
for (const accept of attackVariants) {
530+
await fastify.inject({ method: 'GET', url: '/', headers: { accept } })
531+
}
532+
533+
await fastify.inject({ method: 'GET', url: '/cache-size', headers: { accept: 'application/json' } })
534+
535+
await t.test('cache max is set to the configured cacheSize option', t => {
536+
t.plan(1)
537+
t.assert.strictEqual(cacheMax, MAX_CACHE)
538+
})
539+
540+
await t.test('cache size does not exceed cacheSize even under attacker-controlled Accept variants', t => {
541+
t.plan(1)
542+
t.assert.ok(
543+
cacheSize <= MAX_CACHE,
544+
`Cache grew to ${cacheSize} entries after ${attackVariants.length} distinct Accept variants — unbounded growth confirmed`
545+
)
546+
})
547+
})

types/index.d.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,8 @@ declare namespace fastifyAcceptsSerializer {
1616

1717
export interface FastifyAcceptsSerializerPluginOptions {
1818
serializers: SerializerConfig[];
19-
default: string;
19+
default?: string;
20+
cacheSize?: number;
2021
}
2122

2223
export const fastifyAcceptsSerializer: FastifyAcceptsSerializer

0 commit comments

Comments
 (0)