-
-
Notifications
You must be signed in to change notification settings - Fork 675
Description
As pointed out by @ronag in #3657, we can improve our SQLite store design by using a composite cache key as the ID of the table instead of an auto increment field.
One specific problem of our SQLiteStore design is that on reading the cache, we read all the values for a given route, independently of the vary headers:
undici/lib/cache/sqlite-cache-store.js
Lines 375 to 421 in e49b575
| #findValue (key, canBeExpired = false) { | |
| const url = this.#makeValueUrl(key) | |
| /** | |
| * @type {SqliteStoreValue[]} | |
| */ | |
| const values = this.#getValuesQuery.all(url, key.method) | |
| if (values.length === 0) { | |
| // No responses, let's just return early | |
| return undefined | |
| } | |
| const now = Date.now() | |
| for (const value of values) { | |
| if (now >= value.deleteAt && !canBeExpired) { | |
| this.#deleteExpiredValuesQuery.run(now) | |
| return undefined | |
| } | |
| let matches = true | |
| if (value.vary) { | |
| if (!key.headers) { | |
| // Request doesn't have headers so it can't fulfill the vary | |
| // requirements no matter what, let's return early | |
| return undefined | |
| } | |
| value.vary = JSON.parse(value.vary) | |
| for (const header in value.vary) { | |
| if (key.headers[header] !== value.vary[header]) { | |
| matches = false | |
| break | |
| } | |
| } | |
| } | |
| if (matches) { | |
| return value | |
| } | |
| } | |
| return undefined | |
| } | |
| } |
For similar reasons in writing, we need to first read the cache, find if we have a matching row, and the go update that.
undici/lib/cache/sqlite-cache-store.js
Lines 276 to 315 in e49b575
| final (callback) { | |
| if (body === null) { | |
| return callback() | |
| } | |
| /** | |
| * @type {SqliteStoreValue | undefined} | |
| */ | |
| const existingValue = findValue(key, true) | |
| if (existingValue) { | |
| // Updating an existing response, let's delete it | |
| updateValueQuery.run( | |
| JSON.stringify(stringifyBufferArray(body)), | |
| value.deleteAt, | |
| value.statusCode, | |
| value.statusMessage, | |
| value.rawHeaders ? JSON.stringify(stringifyBufferArray(value.rawHeaders)) : null, | |
| value.etag, | |
| value.cachedAt, | |
| value.staleAt, | |
| value.deleteAt, | |
| existingValue.id | |
| ) | |
| } else { | |
| // New response, let's insert it | |
| insertValueQuery.run( | |
| url, | |
| key.method, | |
| JSON.stringify(stringifyBufferArray(body)), | |
| value.deleteAt, | |
| value.statusCode, | |
| value.statusMessage, | |
| value.rawHeaders ? JSON.stringify(stringifyBufferArray(value.rawHeaders)) : null, | |
| value.etag ? value.etag : null, | |
| value.vary ? JSON.stringify(value.vary) : null, | |
| value.cachedAt, | |
| value.staleAt, | |
| value.deleteAt | |
| ) | |
| } |
We should be able to fix both problems at the same time by using a primary key that includes the vary headers, so we can avoid loading everything at once during getting.
For the algorithm to compute the key, check #3657 (comment).