Skip to content

Commit 64a5ff9

Browse files
authored
[vitest-pool-workers] Bump to [email protected] and add support for wrangler.{toml,json} configs (#5060)
* chore: bump `vitest-pool-workers` to `[email protected]` * fix: ensure `vitest-pool-workers` is externalised during dev * feat: add support for merging `miniflare` `WorkerOptions` * feat: add support for getting `WorkerOptions` from `wrangler` * feat: add support for Wrangler config loading to Vitest pool * chore: remove `getScheduledResult()` function I noticed a message on the Discord recently stating `ScheduledController#noRetry()` doesn't actually do anything in prod. Therefore, there's no reason to set it, and no reason to check it's set in tests. This change therefore removes the `getScheduledResult()` function, since nothing else useful was reported from it. * fix: re-enable module mocking test vitest-dev/vitest#5050 rolled back the change that broke this for us * ci: only update prerelease dependency versions after `turbo build`
1 parent d0fb7b8 commit 64a5ff9

File tree

19 files changed

+667
-230
lines changed

19 files changed

+667
-230
lines changed

.github/prereleases/1-versions.mjs

Lines changed: 4 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,5 @@
11
import { execSync } from "node:child_process";
2-
import {
3-
getPackagesForPrerelease,
4-
getPrereleaseArtifactUrl,
5-
setPackage,
6-
} from "./0-packages.mjs";
2+
import { getPackagesForPrerelease, setPackage } from "./0-packages.mjs";
73

84
function getPrereleaseVersion() {
95
const sha = execSync("git rev-parse --short HEAD", { encoding: "utf8" });
@@ -18,26 +14,12 @@ function updateVersions(pkgs, newVersion) {
1814
for (const pkg of pkgs) pkg.json.version = newVersion;
1915
}
2016

21-
/**
22-
* @param {~Package[]} pkgs
23-
* @param {string} newVersion
24-
*/
25-
function updateDependencyVersions(pkgs, newVersion) {
26-
const prereleaseNames = new Set(pkgs.map((pkg) => pkg.json.name));
27-
for (const pkg of pkgs) {
28-
for (const dependency of Object.keys(pkg.json.dependencies ?? {})) {
29-
if (prereleaseNames.has(dependency)) {
30-
pkg.json.dependencies[dependency] =
31-
getPrereleaseArtifactUrl(dependency);
32-
}
33-
}
34-
}
35-
}
36-
3717
{
3818
const pkgs = getPackagesForPrerelease();
3919
const newVersion = getPrereleaseVersion();
4020
updateVersions(pkgs, newVersion);
41-
updateDependencyVersions(pkgs, newVersion);
21+
// Ideally, we'd update dependency versions here too, but Turborepo doesn't
22+
// respect `https://` version constraints for building dependent packages
23+
// first.
4224
pkgs.forEach(setPackage);
4325
}

.github/prereleases/2-build-pack-upload.mjs

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,9 @@ import { DefaultArtifactClient } from "@actions/artifact";
55
import {
66
getPackagesForPrerelease,
77
getPrereleaseArtifactName,
8+
getPrereleaseArtifactUrl,
89
projectRoot,
10+
setPackage,
911
} from "./0-packages.mjs";
1012

1113
const artifact = new DefaultArtifactClient();
@@ -14,6 +16,19 @@ function buildAllPackages() {
1416
execSync("pnpm build", { cwd: projectRoot, stdio: "inherit" });
1517
}
1618

19+
/** @param {~Package[]} pkgs */
20+
function updateDependencyVersions(pkgs) {
21+
const prereleaseNames = new Set(pkgs.map((pkg) => pkg.json.name));
22+
for (const pkg of pkgs) {
23+
for (const dependency of Object.keys(pkg.json.dependencies ?? {})) {
24+
if (prereleaseNames.has(dependency)) {
25+
pkg.json.dependencies[dependency] =
26+
getPrereleaseArtifactUrl(dependency);
27+
}
28+
}
29+
}
30+
}
31+
1732
/**
1833
* @param {~Package} pkg
1934
* @returns {string}
@@ -38,6 +53,12 @@ async function uploadPackageTarball(pkg, tarballPath) {
3853
{
3954
buildAllPackages();
4055
const pkgs = getPackagesForPrerelease();
56+
57+
// Update dependency versions *after* the build, so Turborepo knows to build
58+
// dependent packages first
59+
updateDependencyVersions(pkgs);
60+
pkgs.forEach(setPackage);
61+
4162
for (const pkg of pkgs) {
4263
const tarballPath = packPackage(pkg);
4364
await uploadPackageTarball(pkg, tarballPath);

fixtures/dev-env/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
"@types/ws": "^8.5.7",
1717
"@cloudflare/workers-tsconfig": "workspace:^",
1818
"get-port": "^7.0.0",
19-
"miniflare": "3.20240129.3",
19+
"miniflare": "workspace:*",
2020
"undici": "^5.28.3",
2121
"wrangler": "workspace:*",
2222
"ws": "^8.14.2"

packages/miniflare/src/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1705,4 +1705,5 @@ export * from "./plugins";
17051705
export * from "./runtime";
17061706
export * from "./shared";
17071707
export * from "./workers";
1708+
export * from "./merge";
17081709
export * from "./zod-format";

packages/miniflare/src/merge.ts

Lines changed: 111 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,111 @@
1+
import { WorkerOptions } from "./plugins";
2+
3+
// https://github.com/Rich-Harris/devalue/blob/50af63e2b2c648f6e6ea29904a14faac25a581fc/src/utils.js#L31-L51
4+
const objectProtoNames = Object.getOwnPropertyNames(Object.prototype)
5+
.sort()
6+
.join("\0");
7+
function isPlainObject(value: unknown): value is Record<string, unknown> {
8+
const proto = Object.getPrototypeOf(value);
9+
return (
10+
proto === Object.prototype ||
11+
proto === null ||
12+
Object.getOwnPropertyNames(proto).sort().join("\0") === objectProtoNames
13+
);
14+
}
15+
16+
// Get all the keys in `WorkerOptions` whose values can be either an array or
17+
// a record (e.g. `kvNamespaces` which can either be a `string[]` of namespaces
18+
// or a `Record<string, string>` mapping binding name to namespace ID)
19+
type ArrayRecordKeys<O extends object, K extends keyof O> = K extends unknown
20+
? Extract<O[K], unknown[]> extends never
21+
? never
22+
: Extract<O[K], Record<string, unknown>> extends never
23+
? never
24+
: K
25+
: never;
26+
// "kvNamespaces" | "r2Buckets" | "queueProducers" | "queueConsumers" | ...
27+
type WorkerOptionsArrayRecordKeys = ArrayRecordKeys<
28+
WorkerOptions,
29+
keyof WorkerOptions
30+
>;
31+
// Get the record type that can be used for key `K` in `WorkerOptions`
32+
type WorkerOptionsRecord<K extends WorkerOptionsArrayRecordKeys> = Extract<
33+
WorkerOptions[K],
34+
Record<string, unknown>
35+
>;
36+
/** Converts the array-form of key `K` in `WorkerOptions` to its object form */
37+
function convertWorkerOptionsArrayToObject<
38+
K extends WorkerOptionsArrayRecordKeys
39+
>(key: K, array: Extract<WorkerOptions[K], unknown[]>): WorkerOptionsRecord<K> {
40+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
41+
const _: string[] = array; // Static assert that `array` is a `string[]`
42+
if (key === "queueConsumers") {
43+
// Unfortunately, we can't just `return Object.fromEntries(...)` here, as
44+
// TypeScript isn't smart enough to substitute "queueConsumers" as `K` in
45+
// the return type. We'd still like to verify correct types, so try assign
46+
// it to that first, then return by casting.
47+
const object: WorkerOptionsRecord<"queueConsumers"> = Object.fromEntries(
48+
array.map((item) => [item, {}])
49+
);
50+
return object as WorkerOptionsRecord<K>;
51+
} else {
52+
const object: WorkerOptionsRecord<
53+
// `Exclude` encodes the `else` here
54+
Exclude<WorkerOptionsArrayRecordKeys, "queueConsumers">
55+
> = Object.fromEntries(array.map((item) => [item, item]));
56+
return object as WorkerOptionsRecord<K>;
57+
}
58+
}
59+
60+
/**
61+
* Merges all of `b`'s properties into `a`. Only merges 1 level deep, i.e.
62+
* `kvNamespaces` will be fully-merged, but `durableObject` object-designators
63+
* will be overwritten.
64+
*/
65+
export function mergeWorkerOptions(
66+
/* mut */ a: Partial<WorkerOptions>,
67+
b: Partial<WorkerOptions>
68+
): Partial<WorkerOptions> {
69+
const aRecord = a as Record<string, unknown>;
70+
for (const [key, bValue] of Object.entries(b)) {
71+
const aValue = aRecord[key];
72+
if (aValue === undefined) {
73+
// Simple case: if `key` only exists in `b`, copy it over to `a`
74+
aRecord[key] = bValue;
75+
continue;
76+
}
77+
78+
const aIsArray = Array.isArray(aValue);
79+
const bIsArray = Array.isArray(bValue);
80+
const aIsObject = isPlainObject(aValue);
81+
const bIsObject = isPlainObject(bValue);
82+
if (aIsArray && bIsArray) {
83+
// Merge arrays by joining them together, de-duplicating primitives
84+
aRecord[key] = Array.from(new Set(aValue.concat(bValue)));
85+
} else if (aIsArray && bIsObject) {
86+
// Merge arrays and objects by converting the array into object form,
87+
// then assigning `b` to `a`.
88+
const aNewValue = convertWorkerOptionsArrayToObject(
89+
// Must be an array/record key if `aValue` & `bValue` are array/record
90+
key as WorkerOptionsArrayRecordKeys,
91+
aValue
92+
);
93+
Object.assign(aNewValue, bValue);
94+
aRecord[key] = aNewValue;
95+
} else if (aIsObject && bIsArray) {
96+
const bNewValue = convertWorkerOptionsArrayToObject(
97+
// Must be an array/record key if `aValue` & `bValue` are array/record
98+
key as WorkerOptionsArrayRecordKeys,
99+
bValue
100+
);
101+
Object.assign(aValue, bNewValue);
102+
} else if (aIsObject && bIsObject) {
103+
// Merge objects by assigning `b` to `a`
104+
Object.assign(aValue, bValue);
105+
} else {
106+
// Merge primitives/complex objects by just using `b`'s value
107+
aRecord[key] = bValue;
108+
}
109+
}
110+
return a;
111+
}
Lines changed: 132 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,132 @@
1+
import test from "ava";
2+
import { mergeWorkerOptions } from "miniflare";
3+
4+
test("merges options", (t) => {
5+
// Check options in `a` but not `b`
6+
// Check options in `b` but not `a`
7+
const a = { compatibilityDate: "2024-01-01" };
8+
let result = mergeWorkerOptions(a, { compatibilityFlags: ["nodejs_compat"] });
9+
t.deepEqual(result, {
10+
compatibilityDate: "2024-01-01",
11+
compatibilityFlags: ["nodejs_compat"],
12+
});
13+
t.is(result, a); // Check modifies `a`
14+
15+
// Check array-valued option in both `a` and `b`
16+
result = mergeWorkerOptions(
17+
{ kvNamespaces: ["NAMESPACE_1"] },
18+
{ kvNamespaces: ["NAMESPACE_2"] }
19+
);
20+
t.deepEqual(result, {
21+
kvNamespaces: ["NAMESPACE_1", "NAMESPACE_2"],
22+
});
23+
result = mergeWorkerOptions(
24+
{ kvNamespaces: ["NAMESPACE_1"] },
25+
{ kvNamespaces: ["NAMESPACE_1", "NAMESPACE_2"] }
26+
);
27+
t.deepEqual(result, {
28+
kvNamespaces: ["NAMESPACE_1", "NAMESPACE_2"], // Primitives de-duped
29+
});
30+
result = mergeWorkerOptions(
31+
{ compatibilityFlags: ["global_navigator", "nodejs_compat"] },
32+
{ compatibilityFlags: ["nodejs_compat", "export_commonjs_default"] }
33+
);
34+
t.deepEqual(result, {
35+
compatibilityFlags: [
36+
"global_navigator",
37+
"nodejs_compat",
38+
"export_commonjs_default",
39+
], // Primitives de-duped
40+
});
41+
42+
// Check object-valued option in both `a` and `b`
43+
result = mergeWorkerOptions(
44+
{ d1Databases: { DATABASE_1: "database-1" } },
45+
{ d1Databases: { DATABASE_2: "database-2" } }
46+
);
47+
t.deepEqual(result, {
48+
d1Databases: { DATABASE_1: "database-1", DATABASE_2: "database-2" },
49+
});
50+
result = mergeWorkerOptions(
51+
{ d1Databases: { DATABASE_1: "database-1" } },
52+
{ d1Databases: { DATABASE_1: "database-one", DATABASE_2: "database-two" } }
53+
);
54+
t.deepEqual(result, {
55+
d1Databases: { DATABASE_1: "database-one", DATABASE_2: "database-two" },
56+
});
57+
58+
// Check array-valued option in `a` but object-valued option in `b`
59+
// Check object-valued option in `b` but array-valued option in `a`
60+
result = mergeWorkerOptions(
61+
{
62+
r2Buckets: ["BUCKET_1"],
63+
queueConsumers: { "queue-1": { maxBatchTimeout: 0 } },
64+
},
65+
{
66+
r2Buckets: { BUCKET_2: "bucket-2" },
67+
queueConsumers: ["queue-2"],
68+
}
69+
);
70+
t.deepEqual(result, {
71+
r2Buckets: { BUCKET_1: "BUCKET_1", BUCKET_2: "bucket-2" },
72+
queueConsumers: { "queue-1": { maxBatchTimeout: 0 }, "queue-2": {} },
73+
});
74+
75+
// Check primitives in `a` and `b`
76+
result = mergeWorkerOptions(
77+
{ compatibilityDate: "2024-01-01" },
78+
{ compatibilityDate: "2024-02-02" }
79+
);
80+
t.deepEqual(result, { compatibilityDate: "2024-02-02" });
81+
82+
// Check nested-objects not merged (e.g. service bindings, queue consumers, Durable Objects)
83+
result = mergeWorkerOptions(
84+
{
85+
serviceBindings: {
86+
DISK_SERVICE: { disk: { path: "/path/to/a", writable: true } },
87+
OTHER_SERVICE: "worker",
88+
},
89+
queueConsumers: {
90+
queue: { maxBatchTimeout: 0 },
91+
},
92+
durableObjects: {
93+
OBJECT_1: "Object1",
94+
OBJECT_2: {
95+
className: "Object2",
96+
scriptName: "worker2",
97+
},
98+
},
99+
},
100+
{
101+
serviceBindings: {
102+
DISK_SERVICE: { disk: { path: "/path/to/b" } },
103+
},
104+
queueConsumers: {
105+
queue: { maxBatchSize: 1 },
106+
},
107+
durableObjects: {
108+
OBJECT_1: {
109+
className: "Object1",
110+
scriptName: "worker1",
111+
},
112+
OBJECT_2: "Object2",
113+
},
114+
}
115+
);
116+
t.deepEqual(result, {
117+
serviceBindings: {
118+
DISK_SERVICE: { disk: { path: "/path/to/b" } },
119+
OTHER_SERVICE: "worker",
120+
},
121+
queueConsumers: {
122+
queue: { maxBatchSize: 1 },
123+
},
124+
durableObjects: {
125+
OBJECT_1: {
126+
className: "Object1",
127+
scriptName: "worker1",
128+
},
129+
OBJECT_2: "Object2",
130+
},
131+
});
132+
});

packages/vitest-pool-workers/package.json

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -44,12 +44,13 @@
4444
"esbuild": "0.17.19",
4545
"import-meta-resolve": "^4.0.0",
4646
"miniflare": "workspace:*",
47+
"wrangler": "workspace:*",
4748
"zod": "^3.20.6"
4849
},
4950
"peerDependencies": {
50-
"@vitest/runner": "1.1.3",
51-
"@vitest/snapshot": "1.1.3",
52-
"vitest": "1.1.3"
51+
"@vitest/runner": "1.3.0",
52+
"@vitest/snapshot": "1.3.0",
53+
"vitest": "1.3.0"
5354
},
5455
"workers-sdk": {
5556
"prerelease": true

0 commit comments

Comments
 (0)