mirror of
https://github.com/openclaw/openclaw.git
synced 2026-03-27 09:21:35 +07:00
test: keep vitest on forks only
This commit is contained in:
@@ -119,7 +119,7 @@
|
||||
- Agents MUST NOT modify baseline, inventory, ignore, snapshot, or expected-failure files to silence failing checks without explicit approval in this chat.
|
||||
- For targeted/local debugging, keep using the wrapper: `pnpm test -- <path-or-filter> [vitest args...]` (for example `pnpm test -- src/commands/onboard-search.test.ts -t "shows registered plugin providers"`); do not default to raw `pnpm vitest run ...` because it bypasses wrapper config/profile/pool routing.
|
||||
- Do not set test workers above 16; tried already.
|
||||
- Do not reintroduce Vitest VM pools by default without fresh green evidence on current `main`; keep CI on `forks`.
|
||||
- Keep Vitest on `forks` only. Do not introduce or reintroduce any non-`forks` Vitest pool or alternate execution mode in configs, wrapper scripts, or default test commands without explicit approval in this chat. This includes `threads`, `vmThreads`, `vmForks`, and any future/nonstandard pool variant.
|
||||
- If local Vitest runs cause memory pressure (common on non-Mac-Studio hosts), use `OPENCLAW_TEST_PROFILE=low OPENCLAW_TEST_SERIAL_GATEWAY=1 pnpm test` for land/gate runs.
|
||||
- Live tests (real keys): `OPENCLAW_LIVE_TEST=1 pnpm test:live` (OpenClaw-only) or `LIVE=1 pnpm test:live` (includes provider live tests). Docker: `pnpm test:docker:live-models`, `pnpm test:docker:live-gateway`. Onboarding Docker E2E: `pnpm test:docker:onboard`.
|
||||
- Full kit + what’s covered: `docs/help/testing.md`.
|
||||
|
||||
@@ -595,6 +595,9 @@
|
||||
"build:docker": "node scripts/tsdown-build.mjs && node scripts/runtime-postbuild.mjs && node scripts/build-stamp.mjs && node --import tsx scripts/canvas-a2ui-copy.ts && node --import tsx scripts/copy-hook-metadata.ts && node --import tsx scripts/copy-export-html-templates.ts && node --import tsx scripts/write-build-info.ts && node --import tsx scripts/write-cli-startup-metadata.ts && node --import tsx scripts/write-cli-compat.ts",
|
||||
"build:plugin-sdk:dts": "tsc -p tsconfig.plugin-sdk.dts.json",
|
||||
"build:strict-smoke": "pnpm canvas:a2ui:bundle && node scripts/tsdown-build.mjs && node scripts/runtime-postbuild.mjs && node scripts/build-stamp.mjs && pnpm build:plugin-sdk:dts",
|
||||
"canon:check": "node scripts/canon.mjs check",
|
||||
"canon:check:json": "node scripts/canon.mjs check --json",
|
||||
"canon:enforce": "node scripts/canon.mjs enforce --json",
|
||||
"canvas:a2ui:bundle": "bash scripts/bundle-a2ui.sh",
|
||||
"check": "pnpm check:no-conflict-markers && pnpm check:host-env-policy:swift && pnpm check:base-config-schema && pnpm check:bundled-plugin-metadata && pnpm check:bundled-provider-auth-env-vars && pnpm format:check && pnpm tsgo && pnpm plugin-sdk:check-exports && pnpm lint && pnpm lint:tmp:no-random-messaging && pnpm lint:tmp:channel-agnostic-boundaries && pnpm lint:tmp:no-raw-channel-fetch && pnpm lint:agent:ingress-owner && pnpm lint:plugins:no-register-http-handler && pnpm lint:plugins:no-monolithic-plugin-sdk-entry-imports && pnpm lint:plugins:no-extension-src-imports && pnpm lint:plugins:no-extension-test-core-imports && pnpm lint:plugins:no-extension-imports && pnpm lint:plugins:plugin-sdk-subpaths-exported && pnpm lint:extensions:no-src-outside-plugin-sdk && pnpm lint:extensions:no-plugin-sdk-internal && pnpm lint:extensions:no-relative-outside-package && pnpm lint:web-search-provider-boundaries && pnpm lint:webhook:no-low-level-body-read && pnpm lint:auth:no-pairing-store-group && pnpm lint:auth:pairing-account-scope",
|
||||
"check:base-config-schema": "node --import tsx scripts/generate-base-config-schema.ts --check",
|
||||
@@ -734,7 +737,6 @@
|
||||
"test:parallels:npm-update": "bash scripts/e2e/parallels-npm-update-smoke.sh",
|
||||
"test:parallels:windows": "bash scripts/e2e/parallels-windows-smoke.sh",
|
||||
"test:perf:budget": "node scripts/test-perf-budget.mjs",
|
||||
"test:perf:find-thread-candidates": "node scripts/test-find-thread-candidates.mjs",
|
||||
"test:perf:hotspots": "node scripts/test-hotspots.mjs",
|
||||
"test:perf:imports": "OPENCLAW_VITEST_IMPORT_DURATIONS=1 OPENCLAW_VITEST_PRINT_IMPORT_BREAKDOWN=1 pnpm test",
|
||||
"test:perf:imports:changed": "OPENCLAW_VITEST_IMPORT_DURATIONS=1 OPENCLAW_VITEST_PRINT_IMPORT_BREAKDOWN=1 pnpm test -- --changed origin/main",
|
||||
|
||||
@@ -1,195 +0,0 @@
|
||||
import { spawnSync } from "node:child_process";
|
||||
import path from "node:path";
|
||||
import { pathToFileURL } from "node:url";
|
||||
import {
|
||||
booleanFlag,
|
||||
floatFlag,
|
||||
intFlag,
|
||||
parseFlagArgs,
|
||||
readEnvNumber,
|
||||
stringFlag,
|
||||
} from "./lib/arg-utils.mjs";
|
||||
import { formatMs } from "./lib/vitest-report-cli-utils.mjs";
|
||||
import { loadTestRunnerBehavior, loadUnitTimingManifest } from "./test-runner-manifest.mjs";
|
||||
|
||||
export function parseArgs(argv) {
|
||||
const envLimit = readEnvNumber("OPENCLAW_TEST_THREAD_CANDIDATE_LIMIT");
|
||||
return parseFlagArgs(
|
||||
argv,
|
||||
{
|
||||
config: "vitest.unit.config.ts",
|
||||
limit: Number.isFinite(envLimit) ? Math.max(1, Math.floor(envLimit)) : 20,
|
||||
minDurationMs: readEnvNumber("OPENCLAW_TEST_THREAD_CANDIDATE_MIN_DURATION_MS") ?? 250,
|
||||
minGainMs: readEnvNumber("OPENCLAW_TEST_THREAD_CANDIDATE_MIN_GAIN_MS") ?? 100,
|
||||
minGainPct: readEnvNumber("OPENCLAW_TEST_THREAD_CANDIDATE_MIN_GAIN_PCT") ?? 10,
|
||||
json: false,
|
||||
files: [],
|
||||
},
|
||||
[
|
||||
stringFlag("--config", "config"),
|
||||
intFlag("--limit", "limit", { min: 1 }),
|
||||
floatFlag("--min-duration-ms", "minDurationMs", { min: 0 }),
|
||||
floatFlag("--min-gain-ms", "minGainMs", { min: 0 }),
|
||||
floatFlag("--min-gain-pct", "minGainPct", { min: 0, includeMin: false }),
|
||||
booleanFlag("--json", "json"),
|
||||
],
|
||||
{
|
||||
ignoreDoubleDash: true,
|
||||
onUnhandledArg(arg, args) {
|
||||
if (arg.startsWith("-")) {
|
||||
throw new Error(`Unknown option: ${arg}`);
|
||||
}
|
||||
args.files.push(arg);
|
||||
return "handled";
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
export function getExistingThreadCandidateExclusions(behavior) {
|
||||
return new Set([
|
||||
...(behavior.base?.threadPinned ?? []).map((entry) => entry.file),
|
||||
...(behavior.base?.threadSingleton ?? []).map((entry) => entry.file),
|
||||
...(behavior.unit?.isolated ?? []).map((entry) => entry.file),
|
||||
...(behavior.unit?.threadPinned ?? []).map((entry) => entry.file),
|
||||
...(behavior.unit?.threadSingleton ?? []).map((entry) => entry.file),
|
||||
]);
|
||||
}
|
||||
|
||||
export function selectThreadCandidateFiles({
|
||||
files,
|
||||
timings,
|
||||
exclude = new Set(),
|
||||
limit,
|
||||
minDurationMs,
|
||||
includeUnknownDuration = false,
|
||||
}) {
|
||||
return files
|
||||
.map((file) => ({
|
||||
file,
|
||||
durationMs: timings.files[file]?.durationMs ?? null,
|
||||
}))
|
||||
.filter((entry) => !exclude.has(entry.file))
|
||||
.filter((entry) =>
|
||||
entry.durationMs === null ? includeUnknownDuration : entry.durationMs >= minDurationMs,
|
||||
)
|
||||
.toSorted((a, b) => b.durationMs - a.durationMs)
|
||||
.slice(0, limit)
|
||||
.map((entry) => entry.file);
|
||||
}
|
||||
|
||||
export function summarizeThreadBenchmark({ file, forks, threads, minGainMs, minGainPct }) {
|
||||
const forkOk = forks.exitCode === 0;
|
||||
const threadOk = threads.exitCode === 0;
|
||||
const gainMs = forks.elapsedMs - threads.elapsedMs;
|
||||
const gainPct = forks.elapsedMs > 0 ? (gainMs / forks.elapsedMs) * 100 : 0;
|
||||
const recommended =
|
||||
forkOk &&
|
||||
threadOk &&
|
||||
gainMs >= minGainMs &&
|
||||
gainPct >= minGainPct &&
|
||||
threads.elapsedMs < forks.elapsedMs;
|
||||
return {
|
||||
file,
|
||||
forks,
|
||||
threads,
|
||||
gainMs,
|
||||
gainPct,
|
||||
recommended,
|
||||
};
|
||||
}
|
||||
|
||||
function benchmarkFile({ config, file, pool }) {
|
||||
const startedAt = process.hrtime.bigint();
|
||||
const run = spawnSync("pnpm", ["vitest", "run", "--config", config, `--pool=${pool}`, file], {
|
||||
encoding: "utf8",
|
||||
env: process.env,
|
||||
maxBuffer: 20 * 1024 * 1024,
|
||||
});
|
||||
const elapsedMs = Number(process.hrtime.bigint() - startedAt) / 1_000_000;
|
||||
return {
|
||||
pool,
|
||||
exitCode: run.status ?? 1,
|
||||
elapsedMs,
|
||||
stderr: run.stderr ?? "",
|
||||
stdout: run.stdout ?? "",
|
||||
};
|
||||
}
|
||||
|
||||
function buildOutput(results) {
|
||||
return results.map((result) => ({
|
||||
file: result.file,
|
||||
forksMs: Math.round(result.forks.elapsedMs),
|
||||
threadsMs: Math.round(result.threads.elapsedMs),
|
||||
gainMs: Math.round(result.gainMs),
|
||||
gainPct: Number(result.gainPct.toFixed(1)),
|
||||
forksExitCode: result.forks.exitCode,
|
||||
threadsExitCode: result.threads.exitCode,
|
||||
recommended: result.recommended,
|
||||
}));
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const opts = parseArgs(process.argv.slice(2));
|
||||
const behavior = loadTestRunnerBehavior();
|
||||
const timings = loadUnitTimingManifest();
|
||||
const exclude = getExistingThreadCandidateExclusions(behavior);
|
||||
const inputFiles = opts.files.length > 0 ? opts.files : Object.keys(timings.files);
|
||||
const candidates = selectThreadCandidateFiles({
|
||||
files: inputFiles,
|
||||
timings,
|
||||
exclude,
|
||||
limit: opts.limit,
|
||||
minDurationMs: opts.minDurationMs,
|
||||
includeUnknownDuration: opts.files.length > 0,
|
||||
});
|
||||
|
||||
const results = [];
|
||||
for (const file of candidates) {
|
||||
const forks = benchmarkFile({ config: opts.config, file, pool: "forks" });
|
||||
const threads = benchmarkFile({ config: opts.config, file, pool: "threads" });
|
||||
results.push(
|
||||
summarizeThreadBenchmark({
|
||||
file,
|
||||
forks,
|
||||
threads,
|
||||
minGainMs: opts.minGainMs,
|
||||
minGainPct: opts.minGainPct,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (opts.json) {
|
||||
console.log(JSON.stringify(buildOutput(results), null, 2));
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(
|
||||
`[test-find-thread-candidates] tested=${String(results.length)} minGain=${formatMs(
|
||||
opts.minGainMs,
|
||||
0,
|
||||
)} minGainPct=${String(opts.minGainPct)}%`,
|
||||
);
|
||||
for (const result of results) {
|
||||
const status = result.recommended
|
||||
? "recommend"
|
||||
: result.forks.exitCode !== 0
|
||||
? "forks-failed"
|
||||
: result.threads.exitCode !== 0
|
||||
? "threads-failed"
|
||||
: "skip";
|
||||
console.log(
|
||||
`${status.padEnd(14, " ")} ${result.file} forks=${formatMs(
|
||||
result.forks.elapsedMs,
|
||||
0,
|
||||
)} threads=${formatMs(result.threads.elapsedMs, 0)} gain=${formatMs(result.gainMs, 0)} (${result.gainPct.toFixed(1)}%)`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (process.argv[1] && pathToFileURL(path.resolve(process.argv[1])).href === import.meta.url) {
|
||||
main().catch((error) => {
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
@@ -94,11 +94,8 @@ const testProfile =
|
||||
const isMacMiniProfile = testProfile === "macmini";
|
||||
// Vitest executes Node tests through Vite's SSR/module-runner pipeline, so the
|
||||
// shared unit lane still retains transformed ESM/module state even when the
|
||||
// tests themselves are not "server rendering" a website. We previously kept
|
||||
// forks as the default after VM-pool regressions on constrained hosts. On
|
||||
// 2026-03-22, a direct full-unit threads run finished 1109/1110 green; the sole
|
||||
// correctness exception stayed on the manifest fork lane, so the wrapper now
|
||||
// defaults unit runs to threads while preserving explicit fork escapes.
|
||||
// tests themselves are not "server rendering" a website. Keep forks as the
|
||||
// only active pool so local and CI behavior stay aligned.
|
||||
const forceIsolation =
|
||||
process.env.OPENCLAW_TEST_ISOLATE === "1" || process.env.OPENCLAW_TEST_ISOLATE === "true";
|
||||
const disableIsolation =
|
||||
@@ -110,12 +107,6 @@ const includeChannelsSuite = process.env.OPENCLAW_TEST_INCLUDE_CHANNELS === "1";
|
||||
const includeExtensionsSuite = process.env.OPENCLAW_TEST_INCLUDE_EXTENSIONS === "1";
|
||||
const noIsolateArgs = disableIsolation ? ["--isolate=false"] : [];
|
||||
const skipDefaultRuns = process.env.OPENCLAW_TEST_SKIP_DEFAULT === "1";
|
||||
const parsePoolOverride = (value, fallback) => {
|
||||
if (value === "threads" || value === "forks") {
|
||||
return value;
|
||||
}
|
||||
return fallback;
|
||||
};
|
||||
// Even on low-memory or fully serial hosts, keep the unit lane split so
|
||||
// long-lived workers do not accumulate the whole unit transform graph.
|
||||
const shouldSplitUnitRuns = true;
|
||||
@@ -284,9 +275,9 @@ const channelIsolatedFiles = dedupeFilesPreserveOrder([
|
||||
),
|
||||
]);
|
||||
const channelIsolatedFileSet = new Set(channelIsolatedFiles);
|
||||
const defaultUnitPool = parsePoolOverride(process.env.OPENCLAW_TEST_UNIT_DEFAULT_POOL, "threads");
|
||||
const isTargetedIsolatedUnitFile = (fileFilter) =>
|
||||
unitForkIsolatedFiles.includes(fileFilter) || unitMemoryIsolatedFiles.includes(fileFilter);
|
||||
const isLegacyBasePinnedFile = (fileFilter) => baseThreadPinnedFiles.includes(fileFilter);
|
||||
const inferTarget = (fileFilter) => {
|
||||
const isolated =
|
||||
isTargetedIsolatedUnitFile(fileFilter) ||
|
||||
@@ -548,13 +539,13 @@ const unitFastEntries = unitFastBuckets.flatMap((files, index) => {
|
||||
"run",
|
||||
"--config",
|
||||
"vitest.unit.config.ts",
|
||||
`--pool=${defaultUnitPool}`,
|
||||
"--pool=forks",
|
||||
...noIsolateArgs,
|
||||
],
|
||||
}));
|
||||
});
|
||||
// Shared channel workers retain large transformed module graphs across files on
|
||||
// threads/non-isolated runs. Recycle that lane in bounded batches so the
|
||||
// non-isolated runs. Recycle that lane in bounded batches so the
|
||||
// process gets torn down before unrelated channel files inherit the full graph.
|
||||
const channelsSharedBatches = splitFilesByDurationBudget(
|
||||
channelSharedCandidateFiles,
|
||||
@@ -597,17 +588,17 @@ const unitHeavyEntries = heavyUnitBuckets.map((files, index) => ({
|
||||
...files,
|
||||
],
|
||||
}));
|
||||
const unitThreadEntries =
|
||||
const unitPinnedEntries =
|
||||
unitThreadPinnedFiles.length > 0
|
||||
? [
|
||||
{
|
||||
name: "unit-threads",
|
||||
name: "unit-pinned",
|
||||
args: [
|
||||
"vitest",
|
||||
"run",
|
||||
"--config",
|
||||
"vitest.unit.config.ts",
|
||||
"--pool=threads",
|
||||
"--pool=forks",
|
||||
...noIsolateArgs,
|
||||
...unitThreadPinnedFiles,
|
||||
],
|
||||
@@ -646,7 +637,7 @@ const baseRuns = [
|
||||
file,
|
||||
],
|
||||
})),
|
||||
...unitThreadEntries,
|
||||
...unitPinnedEntries,
|
||||
]
|
||||
: [
|
||||
{
|
||||
@@ -711,8 +702,6 @@ const resolveFilterMatches = (fileFilter) => {
|
||||
}
|
||||
return allKnownTestFiles.filter((file) => file.includes(normalizedFilter));
|
||||
};
|
||||
const isThreadPinnedUnitFile = (fileFilter) => unitThreadPinnedFiles.includes(fileFilter);
|
||||
const isBaseThreadPinnedFile = (fileFilter) => baseThreadPinnedFiles.includes(fileFilter);
|
||||
const createTargetedEntry = (owner, isolated, filters) => {
|
||||
const name = isolated ? `${owner}-isolated` : owner;
|
||||
const forceForks = isolated;
|
||||
@@ -724,27 +713,13 @@ const createTargetedEntry = (owner, isolated, filters) => {
|
||||
"run",
|
||||
"--config",
|
||||
"vitest.unit.config.ts",
|
||||
`--pool=${forceForks ? "forks" : defaultUnitPool}`,
|
||||
"--pool=forks",
|
||||
...noIsolateArgs,
|
||||
...filters,
|
||||
],
|
||||
};
|
||||
}
|
||||
if (owner === "unit-threads") {
|
||||
return {
|
||||
name,
|
||||
args: [
|
||||
"vitest",
|
||||
"run",
|
||||
"--config",
|
||||
"vitest.unit.config.ts",
|
||||
"--pool=threads",
|
||||
...noIsolateArgs,
|
||||
...filters,
|
||||
],
|
||||
};
|
||||
}
|
||||
if (owner === "base-threads") {
|
||||
if (owner === "base-pinned") {
|
||||
return {
|
||||
name,
|
||||
args: [
|
||||
@@ -752,7 +727,7 @@ const createTargetedEntry = (owner, isolated, filters) => {
|
||||
"run",
|
||||
"--config",
|
||||
"vitest.config.ts",
|
||||
"--pool=threads",
|
||||
"--pool=forks",
|
||||
...noIsolateArgs,
|
||||
...filters,
|
||||
],
|
||||
@@ -835,11 +810,7 @@ const formatPerFileEntryName = (owner, file) => {
|
||||
};
|
||||
const createPerFileTargetedEntry = (file) => {
|
||||
const target = inferTarget(file);
|
||||
const owner = isThreadPinnedUnitFile(file)
|
||||
? "unit-threads"
|
||||
: isBaseThreadPinnedFile(file)
|
||||
? "base-threads"
|
||||
: target.owner;
|
||||
const owner = isLegacyBasePinnedFile(file) ? "base-pinned" : target.owner;
|
||||
return {
|
||||
...createTargetedEntry(owner, target.isolated, [file]),
|
||||
name: `${formatPerFileEntryName(owner, file)}${target.isolated ? "-isolated" : ""}`,
|
||||
@@ -913,11 +884,7 @@ const targetedEntries = (() => {
|
||||
if (matchedFiles.length === 0) {
|
||||
const normalizedFile = normalizeRepoPath(fileFilter);
|
||||
const target = inferTarget(normalizedFile);
|
||||
const owner = isThreadPinnedUnitFile(normalizedFile)
|
||||
? "unit-threads"
|
||||
: isBaseThreadPinnedFile(normalizedFile)
|
||||
? "base-threads"
|
||||
: target.owner;
|
||||
const owner = isLegacyBasePinnedFile(normalizedFile) ? "base-pinned" : target.owner;
|
||||
const key = `${owner}:${target.isolated ? "isolated" : "default"}`;
|
||||
const files = acc.get(key) ?? [];
|
||||
files.push(normalizedFile);
|
||||
@@ -926,11 +893,7 @@ const targetedEntries = (() => {
|
||||
}
|
||||
for (const matchedFile of matchedFiles) {
|
||||
const target = inferTarget(matchedFile);
|
||||
const owner = isThreadPinnedUnitFile(matchedFile)
|
||||
? "unit-threads"
|
||||
: isBaseThreadPinnedFile(matchedFile)
|
||||
? "base-threads"
|
||||
: target.owner;
|
||||
const owner = isLegacyBasePinnedFile(matchedFile) ? "base-pinned" : target.owner;
|
||||
const key = `${owner}:${target.isolated ? "isolated" : "default"}`;
|
||||
const files = acc.get(key) ?? [];
|
||||
files.push(matchedFile);
|
||||
@@ -941,7 +904,7 @@ const targetedEntries = (() => {
|
||||
return Array.from(groups, ([key, filters]) => {
|
||||
const [owner, mode] = key.split(":");
|
||||
const uniqueFilters = [...new Set(filters)];
|
||||
if (mode === "isolated") {
|
||||
if (mode === "isolated" || owner === "base-pinned") {
|
||||
return uniqueFilters.map((file) => createPerFileTargetedEntry(file));
|
||||
}
|
||||
return [createTargetedEntry(owner, false, uniqueFilters)];
|
||||
@@ -1142,9 +1105,6 @@ const maxWorkersForRun = (name) => {
|
||||
if (isCI && isMacOS) {
|
||||
return 1;
|
||||
}
|
||||
if (name.endsWith("-threads")) {
|
||||
return 1;
|
||||
}
|
||||
if (name.endsWith("-isolated")) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
@@ -1,138 +0,0 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import {
|
||||
getExistingThreadCandidateExclusions,
|
||||
parseArgs,
|
||||
selectThreadCandidateFiles,
|
||||
summarizeThreadBenchmark,
|
||||
} from "../../scripts/test-find-thread-candidates.mjs";
|
||||
|
||||
describe("scripts/test-find-thread-candidates parseArgs", () => {
|
||||
it("parses explicit thresholds and positional files", () => {
|
||||
expect(
|
||||
parseArgs([
|
||||
"--limit",
|
||||
"4",
|
||||
"--min-duration-ms",
|
||||
"600",
|
||||
"--min-gain-ms",
|
||||
"120",
|
||||
"--min-gain-pct",
|
||||
"15",
|
||||
"--json",
|
||||
"src/a.test.ts",
|
||||
]),
|
||||
).toEqual({
|
||||
config: "vitest.unit.config.ts",
|
||||
limit: 4,
|
||||
minDurationMs: 600,
|
||||
minGainMs: 120,
|
||||
minGainPct: 15,
|
||||
json: true,
|
||||
files: ["src/a.test.ts"],
|
||||
});
|
||||
});
|
||||
|
||||
it("accepts zero thresholds for explicit deep scans", () => {
|
||||
expect(parseArgs(["--min-duration-ms", "0", "--min-gain-ms", "0"])).toMatchObject({
|
||||
minDurationMs: 0,
|
||||
minGainMs: 0,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("scripts/test-find-thread-candidates exclusions", () => {
|
||||
it("collects already-pinned files across behavior buckets", () => {
|
||||
expect(
|
||||
getExistingThreadCandidateExclusions({
|
||||
base: {
|
||||
threadPinned: [{ file: "src/base-a.test.ts" }],
|
||||
},
|
||||
unit: {
|
||||
isolated: [{ file: "src/a.test.ts" }],
|
||||
threadPinned: [{ file: "src/c.test.ts" }],
|
||||
},
|
||||
}),
|
||||
).toEqual(new Set(["src/base-a.test.ts", "src/a.test.ts", "src/c.test.ts"]));
|
||||
});
|
||||
|
||||
it("keeps backward-compatible aliases readable", () => {
|
||||
expect(
|
||||
getExistingThreadCandidateExclusions({
|
||||
base: {
|
||||
threadSingleton: [{ file: "src/base-a.test.ts" }],
|
||||
},
|
||||
unit: {
|
||||
isolated: [{ file: "src/a.test.ts" }],
|
||||
threadSingleton: [{ file: "src/c.test.ts" }],
|
||||
},
|
||||
}),
|
||||
).toEqual(new Set(["src/base-a.test.ts", "src/a.test.ts", "src/c.test.ts"]));
|
||||
});
|
||||
});
|
||||
|
||||
describe("scripts/test-find-thread-candidates selection", () => {
|
||||
it("keeps only known, unpinned files above the duration floor", () => {
|
||||
expect(
|
||||
selectThreadCandidateFiles({
|
||||
files: ["src/a.test.ts", "src/b.test.ts", "src/c.test.ts", "src/d.test.ts"],
|
||||
timings: {
|
||||
files: {
|
||||
"src/a.test.ts": { durationMs: 1100 },
|
||||
"src/b.test.ts": { durationMs: 700 },
|
||||
"src/c.test.ts": { durationMs: 300 },
|
||||
},
|
||||
},
|
||||
exclude: new Set(["src/b.test.ts"]),
|
||||
limit: 10,
|
||||
minDurationMs: 500,
|
||||
}),
|
||||
).toEqual(["src/a.test.ts"]);
|
||||
});
|
||||
|
||||
it("allows explicit unknown-duration files when requested", () => {
|
||||
expect(
|
||||
selectThreadCandidateFiles({
|
||||
files: ["src/a.test.ts", "src/b.test.ts"],
|
||||
timings: {
|
||||
files: {
|
||||
"src/a.test.ts": { durationMs: 700 },
|
||||
},
|
||||
},
|
||||
exclude: new Set(),
|
||||
limit: 10,
|
||||
minDurationMs: 500,
|
||||
includeUnknownDuration: true,
|
||||
}),
|
||||
).toEqual(["src/a.test.ts", "src/b.test.ts"]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("scripts/test-find-thread-candidates summarizeThreadBenchmark", () => {
|
||||
it("recommends clear thread wins", () => {
|
||||
expect(
|
||||
summarizeThreadBenchmark({
|
||||
file: "src/a.test.ts",
|
||||
forks: { exitCode: 0, elapsedMs: 1000, stderr: "", stdout: "" },
|
||||
threads: { exitCode: 0, elapsedMs: 780, stderr: "", stdout: "" },
|
||||
minGainMs: 100,
|
||||
minGainPct: 10,
|
||||
}),
|
||||
).toMatchObject({
|
||||
file: "src/a.test.ts",
|
||||
gainMs: 220,
|
||||
recommended: true,
|
||||
});
|
||||
});
|
||||
|
||||
it("rejects thread failures even when the measured wall time is lower", () => {
|
||||
expect(
|
||||
summarizeThreadBenchmark({
|
||||
file: "src/b.test.ts",
|
||||
forks: { exitCode: 0, elapsedMs: 1000, stderr: "", stdout: "" },
|
||||
threads: { exitCode: 1, elapsedMs: 400, stderr: "TypeError", stdout: "" },
|
||||
minGainMs: 100,
|
||||
minGainPct: 10,
|
||||
}).recommended,
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
@@ -145,4 +145,23 @@ describe("scripts/test-parallel lane planning", () => {
|
||||
expect(output).toContain("unit-fast-batch-");
|
||||
expect(output).not.toContain("unit-fast filters=all maxWorkers=");
|
||||
});
|
||||
|
||||
it("keeps legacy base-pinned targeted reruns on dedicated forks lanes", () => {
|
||||
const repoRoot = path.resolve(import.meta.dirname, "../..");
|
||||
const output = execFileSync(
|
||||
"node",
|
||||
["scripts/test-parallel.mjs", "src/auto-reply/reply/followup-runner.test.ts"],
|
||||
{
|
||||
cwd: repoRoot,
|
||||
env: {
|
||||
...process.env,
|
||||
OPENCLAW_TEST_LIST_LANES: "1",
|
||||
},
|
||||
encoding: "utf8",
|
||||
},
|
||||
);
|
||||
|
||||
expect(output).toContain("base-pinned-followup-runner");
|
||||
expect(output).not.toContain("base-followup-runner");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -54,6 +54,7 @@ describe("scoped vitest configs", () => {
|
||||
|
||||
it("defaults channel tests to non-isolated mode", () => {
|
||||
expect(defaultChannelsConfig.test?.isolate).toBe(false);
|
||||
expect(defaultChannelsConfig.test?.pool).toBe("forks");
|
||||
});
|
||||
|
||||
it("loads channel include overrides from OPENCLAW_VITEST_INCLUDE_FILE", () => {
|
||||
@@ -82,6 +83,7 @@ describe("scoped vitest configs", () => {
|
||||
|
||||
it("defaults extension tests to non-isolated mode", () => {
|
||||
expect(defaultExtensionsConfig.test?.isolate).toBe(false);
|
||||
expect(defaultExtensionsConfig.test?.pool).toBe("forks");
|
||||
});
|
||||
|
||||
it("normalizes extension include patterns relative to the scoped dir", () => {
|
||||
|
||||
@@ -11,7 +11,7 @@ export function loadIncludePatternsFromEnv(
|
||||
export function createChannelsVitestConfig(env?: Record<string, string | undefined>) {
|
||||
return createScopedVitestConfig(loadIncludePatternsFromEnv(env) ?? channelTestInclude, {
|
||||
env,
|
||||
pool: "threads",
|
||||
pool: "forks",
|
||||
exclude: ["src/gateway/**"],
|
||||
passWithNoTests: true,
|
||||
});
|
||||
|
||||
@@ -14,7 +14,7 @@ export function createExtensionsVitestConfig(
|
||||
return createScopedVitestConfig(loadIncludePatternsFromEnv(env) ?? ["extensions/**/*.test.ts"], {
|
||||
dir: "extensions",
|
||||
env,
|
||||
pool: "threads",
|
||||
pool: "forks",
|
||||
passWithNoTests: true,
|
||||
// Channel implementations live under extensions/ but are tested by
|
||||
// vitest.channels.config.ts (pnpm test:channels) which provides
|
||||
|
||||
Reference in New Issue
Block a user