diff --git a/.github/workflows/stage-2-test.yaml b/.github/workflows/stage-2-test.yaml index 5adb30c7b..42b5c2734 100644 --- a/.github/workflows/stage-2-test.yaml +++ b/.github/workflows/stage-2-test.yaml @@ -100,12 +100,29 @@ jobs: with: node-version: ${{ inputs.nodejs_version }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: "Install dependencies" + run: npm ci - name: "Setup Python" uses: actions/setup-python@v6 with: python-version: ${{ inputs.python_version }} cache: 'pip' cache-dependency-path: '**/requirements*.txt' + - name: "Cache generated dependencies" + id: schema-cache + uses: actions/cache@v4 + with: + path: | + schemas/digital-letters/ + output/digital-letters/ + src/digital-letters-events/types/ + src/digital-letters-events/validators/ + src/digital-letters-events/digital_letters_events/models/ + key: generated-deps-${{ runner.os }}-${{ hashFiles('src/cloudevents/**', 'src/typescript-schema-generator/**', 'src/python-schema-generator/**') }} + - name: "Generate dependencies" + if: steps.schema-cache.outputs.cache-hit != 'true' + run: | + npm run generate-dependencies - name: "Run unit test suite" run: | make test-unit diff --git a/.gitignore b/.gitignore index cf7d4b6ab..8732a35f1 100644 --- a/.gitignore +++ b/.gitignore @@ -59,3 +59,6 @@ coverage-*/ **/playwright-report **/test-results plugin-cache + +# Generated by npm run test:unit:parallel — do not commit +jest.config.cjs diff --git a/lambdas/core-notifier-lambda/src/__tests__/app/notify-api-client.test.ts b/lambdas/core-notifier-lambda/src/__tests__/app/notify-api-client.test.ts index 59af51fd2..e84eb293a 100644 --- a/lambdas/core-notifier-lambda/src/__tests__/app/notify-api-client.test.ts +++ b/lambdas/core-notifier-lambda/src/__tests__/app/notify-api-client.test.ts @@ -12,7 +12,10 @@ import { IAccessTokenRepository, NotifyClient } from 'app/notify-api-client'; import { RequestAlreadyReceivedError } from 'domain/request-already-received-error'; jest.mock('utils'); -jest.mock('node:crypto'); +jest.mock('node:crypto', () => ({ + ...jest.requireActual('node:crypto'), + randomUUID: jest.fn(), +})); jest.mock('axios', () => { const original: AxiosStatic = jest.requireActual('axios'); diff --git a/lambdas/core-notifier-lambda/src/__tests__/domain/mapper.test.ts b/lambdas/core-notifier-lambda/src/__tests__/domain/mapper.test.ts index e1bde3bc7..dfb1d3bfa 100644 --- a/lambdas/core-notifier-lambda/src/__tests__/domain/mapper.test.ts +++ b/lambdas/core-notifier-lambda/src/__tests__/domain/mapper.test.ts @@ -9,7 +9,10 @@ import { PDMResourceAvailable } from 'digital-letters-events'; import { randomUUID } from 'node:crypto'; jest.mock('utils'); -jest.mock('node:crypto'); +jest.mock('node:crypto', () => ({ + ...jest.requireActual('node:crypto'), + randomUUID: jest.fn(), +})); const mockLogger = jest.mocked(logger); const mockRandomUUID = jest.mocked(randomUUID); diff --git a/lambdas/mesh-acknowledge/pytest.ini b/lambdas/mesh-acknowledge/pytest.ini index e19306a77..7f80cf1af 100644 --- a/lambdas/mesh-acknowledge/pytest.ini +++ b/lambdas/mesh-acknowledge/pytest.ini @@ -7,6 +7,7 @@ addopts = -v --tb=short [coverage:run] relative_files = True +data_file = lambdas/mesh-acknowledge/.coverage omit = */mesh_acknowledge/__tests__/* */test_*.py diff --git a/lambdas/mesh-download/pytest.ini b/lambdas/mesh-download/pytest.ini index 303659aad..b2483b3c0 100644 --- a/lambdas/mesh-download/pytest.ini +++ b/lambdas/mesh-download/pytest.ini @@ -7,6 +7,7 @@ addopts = -v --tb=short [coverage:run] relative_files = True +data_file = lambdas/mesh-download/.coverage omit = */tests/* */test_*.py diff --git a/lambdas/mesh-poll/pytest.ini b/lambdas/mesh-poll/pytest.ini index 933720312..8657f96d1 100644 --- a/lambdas/mesh-poll/pytest.ini +++ b/lambdas/mesh-poll/pytest.ini @@ -7,6 +7,7 @@ addopts = -v --tb=short [coverage:run] relative_files = True +data_file = lambdas/mesh-poll/.coverage omit = */mesh_poll/__tests__/* */test_*.py diff --git a/lambdas/report-sender/pytest.ini b/lambdas/report-sender/pytest.ini index 91879c293..9402fd11a 100644 --- a/lambdas/report-sender/pytest.ini +++ b/lambdas/report-sender/pytest.ini @@ -7,6 +7,7 @@ addopts = -v --tb=short [coverage:run] relative_files = True +data_file = lambdas/report-sender/.coverage omit = */report_sender/__tests__/* */test_*.py diff --git a/package.json b/package.json index ca8080a48..459fe5fec 100644 --- a/package.json +++ b/package.json @@ -54,6 +54,7 @@ "lint:fix": "npm run lint:fix --workspaces", "start": "npm run start --workspace frontend", "test:unit": "npm run test:unit --workspaces", + "test:unit:parallel": "tsx scripts/generate-parallel-jest-config.ts && cross-env NODE_OPTIONS=\"$NODE_OPTIONS --experimental-vm-modules\" jest --config jest.config.cjs", "typecheck": "npm run typecheck --workspaces" }, "version": "0.0.1", diff --git a/scripts/generate-parallel-jest-config.ts b/scripts/generate-parallel-jest-config.ts new file mode 100644 index 000000000..ea7fe7714 --- /dev/null +++ b/scripts/generate-parallel-jest-config.ts @@ -0,0 +1,153 @@ +/** + * Generates jest.config.cjs from the individual workspace jest.config.ts files. + */ + +import { execFileSync } from 'node:child_process'; +import * as fs from 'node:fs'; +import * as os from 'node:os'; +import * as path from 'node:path'; + +const repoRoot = path.resolve(__dirname, '..'); +const outputPath = path.join(repoRoot, 'jest.config.cjs'); + +interface PackageJson { + workspaces?: string[]; +} + +const rootPkg = JSON.parse( + fs.readFileSync(path.join(repoRoot, 'package.json'), 'utf8'), +) as PackageJson; + +const workspaces: string[] = rootPkg.workspaces ?? []; + +/** + * Inline TypeScript written to a temp .ts file and executed by tsx so each + * workspace jest.config.ts is evaluated in an isolated Node process with a + * fresh module registry (preventing shared mutable baseJestConfig state). + */ +const EVALUATOR = (configPath: string): string => ` +import config from ${JSON.stringify(configPath)}; +process.stdout.write(JSON.stringify(config)); +`; + +/** Serialise a plain JS value to source code, indented with the given prefix. */ +function serialise(value: unknown, indent = ' '): string { + if (value === null) return 'null'; + if (value === undefined) return 'undefined'; + if (typeof value === 'string') return JSON.stringify(value); + if (typeof value === 'number' || typeof value === 'boolean') + return String(value); + + if (Array.isArray(value)) { + if (value.length === 0) return '[]'; + const items = value + .map((v) => `${indent} ${serialise(v, indent + ' ')}`) + .join(',\n'); + return `[\n${items},\n${indent}]`; + } + + if (typeof value === 'object') { + const entries = Object.entries(value as Record).filter( + ([, v]) => v !== undefined, + ); + if (entries.length === 0) return '{}'; + const lines = entries + .map( + ([k, v]) => + `${indent} ${JSON.stringify(k)}: ${serialise(v, indent + ' ')}`, + ) + .join(',\n'); + return `{\n${lines},\n${indent}}`; + } + + return String(value); +} + +interface ProjectEntry { + workspace: string; + config: Record; +} + +function main(): void { + const projects: ProjectEntry[] = []; + + for (const ws of workspaces) { + const wsDir = path.join(repoRoot, ws); + + const hasCjs = fs.existsSync(path.join(wsDir, 'jest.config.cjs')); + const hasTs = fs.existsSync(path.join(wsDir, 'jest.config.ts')); + + if (hasCjs && !hasTs) { + throw new Error( + `${ws} has jest.config.cjs but no jest.config.ts. ` + + `Migrate it to jest.config.ts so the generator can handle it uniformly.`, + ); + } + + if (!hasTs) { + // No Jest config → no Jest tests (e.g. src/digital-letters-events) + continue; + } + + // Evaluate the workspace config in an isolated tsx subprocess so that the + // shared mutable `baseJestConfig` object is freshly initialised for every + // workspace. Dynamic import() in the parent process would share the cached + // module instance and accumulate mutations. + const configPath = path.join(wsDir, 'jest.config.ts'); + const tsxBin = path.join(repoRoot, 'node_modules', '.bin', 'tsx'); + const tmpFile = path.join(os.tmpdir(), `jest-config-eval-${Date.now()}.ts`); + let json: string; + try { + fs.writeFileSync(tmpFile, EVALUATOR(configPath), 'utf8'); + json = execFileSync(tsxBin, [tmpFile], { + cwd: repoRoot, + encoding: 'utf8', + }); + } finally { + fs.rmSync(tmpFile, { force: true }); + } + const wsConfig = JSON.parse(json) as Record; + + // Inject rootDir and displayName. Jest resolves all relative paths inside a + // project entry relative to that project's rootDir. + const entry: Record = { + ...wsConfig, + rootDir: `/${ws}`, + displayName: ws, + }; + + projects.push({ workspace: ws, config: entry }); + } + + // Build the projects array source + const projectLines = projects.map((p) => { + const body = serialise(p.config, ' '); + return ` // ${p.workspace}\n ${body}`; + }); + + const banner = `/** + * Root Jest config — runs all TypeScript workspace test suites in + * parallel via Jest's native \`projects\` support. + * + * ⚠️ THIS FILE IS AUTO-GENERATED. Do not edit it directly. + * + * Generated by scripts/generate-parallel-jest-config.ts + */ + +/** @type {import('jest').Config} */ +module.exports = { + projects: [ +${projectLines.join(',\n\n')} + ], +}; +`; + + fs.writeFileSync(outputPath, banner, 'utf8'); + console.log(`Written: ${path.relative(repoRoot, outputPath)}`); + console.log(` ${projects.length} project(s) included`); + for (const p of projects) { + console.log(` ${p.workspace}`); + } +} + +main(); diff --git a/scripts/tests/unit.sh b/scripts/tests/unit.sh index 1a4a23e6c..1ea43322c 100755 --- a/scripts/tests/unit.sh +++ b/scripts/tests/unit.sh @@ -4,72 +4,100 @@ set -euo pipefail cd "$(git rev-parse --show-toplevel)" -# This file is for you! Edit it to call your unit test suite. Note that the same -# file will be called if you run it locally as if you run it on CI. - -# Replace the following line with something like: -# -# rails test:unit -# python manage.py test -# npm run test -# -# or whatever is appropriate to your project. You should *only* run your fast -# tests from here. If you want to run other test suites, see the predefined -# tasks in scripts/test.mk. - -# run tests - -# TypeScript/JavaScript projects (npm workspace) -# Note: src/cloudevents is included in workspaces, so it will be tested here -npm ci -npm run generate-dependencies -npm run test:unit --workspaces - -# Python projects - asyncapigenerator -echo "Setting up and running asyncapigenerator tests..." -make -C ./src/asyncapigenerator install-dev -make -C ./src/asyncapigenerator coverage # Run with coverage to generate coverage.xml for SonarCloud - -# Python projects - cloudeventjekylldocs -echo "Setting up and running cloudeventjekylldocs tests..." -make -C ./src/cloudeventjekylldocs install-dev -make -C ./src/cloudeventjekylldocs coverage # Run with coverage to generate coverage.xml for SonarCloud - -# Python projects - eventcatalogasyncapiimporter -echo "Setting up and running eventcatalogasyncapiimporter tests..." -make -C ./src/eventcatalogasyncapiimporter install-dev -make -C ./src/eventcatalogasyncapiimporter coverage # Run with coverage to generate coverage.xml for SonarCloud - -# Python utility packages - py-utils -echo "Setting up and running py-utils tests..." -make -C ./utils/py-utils install-dev -make -C ./utils/py-utils coverage # Run with coverage to generate coverage.xml for SonarCloud - -# Python projects - python-schema-generator -echo "Setting up and running python-schema-generator tests..." -make -C ./src/python-schema-generator install-dev -make -C ./src/python-schema-generator coverage # Run with coverage to generate coverage.xml for SonarCloud - -# Python Lambda - mesh-acknowledge -echo "Setting up and running mesh-acknowledge tests..." -make -C ./lambdas/mesh-acknowledge install-dev -make -C ./lambdas/mesh-acknowledge coverage # Run with coverage to generate coverage.xml for SonarCloud - -# Python Lambda - mesh-poll -echo "Setting up and running mesh-poll tests..." -make -C ./lambdas/mesh-poll install-dev -make -C ./lambdas/mesh-poll coverage # Run with coverage to generate coverage.xml for SonarCloud - -# Python Lambda - mesh-download -echo "Setting up and running mesh-download tests..." -make -C ./lambdas/mesh-download install-dev -make -C ./lambdas/mesh-download coverage # Run with coverage to generate coverage.xml for SonarCloud - -# Python Lambda - report-sender -echo "Setting up and running report-sender tests..." -make -C ./lambdas/report-sender install-dev -make -C ./lambdas/report-sender coverage # Run with coverage to generate coverage.xml for SonarCloud +_timer_labels=() +_timer_seconds=() + +run_timed() { + local label="$1" + shift + local start + start=$(date +%s) + local rc=0 + "$@" || rc=$? + local end + end=$(date +%s) + _timer_labels+=("$label") + _timer_seconds+=("$((end - start))") + return "$rc" +} + +print_timing_summary() { + echo "" + echo "===== Timing Summary =====" + local total=0 + for i in "${!_timer_labels[@]}"; do + printf " %-55s %4ds\n" "${_timer_labels[$i]}" "${_timer_seconds[$i]}" + total=$((total + _timer_seconds[$i])) + done + echo " ---------------------------------------------------------" + printf " %-55s %4ds\n" "TOTAL" "$total" + echo "==========================" +} + +trap print_timing_summary EXIT + +run_timed "Node unit tests (parallel)" npm run test:unit:parallel || jest_exit=$? + +# ---- Phase 1: install all Python dev dependencies (sequential) ---- +# Discover Python projects dynamically: any directory under src/, utils/, or +# lambdas/ whose Makefile defines both an `install-dev` target (Python deps) +# and a `coverage` target (pytest). This avoids maintaining a hardcoded list. +echo "Installing Python dev dependencies..." +mapfile -t _python_projects < <( + grep -rl "^install-dev:" src/ utils/ lambdas/ --include="Makefile" 2>/dev/null \ + | xargs grep -l "^coverage:" \ + | xargs -I{} dirname {} \ + | sort +) +for proj in "${_python_projects[@]}"; do + run_timed "${proj}: install-dev" make -C "$proj" install-dev +done + +# ---- Phase 2: run all coverage steps in parallel ---- +echo "Running Python coverage in parallel..." + +_py_pids=() +_py_labels=() +_py_logs=() +_py_exits=() + +for proj in "${_python_projects[@]}"; do + label="${proj}: coverage" + logfile=$(mktemp) + make -C "$proj" coverage >"$logfile" 2>&1 & + _py_pids+=("$!") + _py_labels+=("$label") + _py_logs+=("$logfile") +done + +# Collect results in launch order (preserves deterministic output) +_py_start=$(date +%s) +for i in "${!_py_pids[@]}"; do + wait "${_py_pids[$i]}" + _py_exits+=("$?") + echo "" + echo "--- ${_py_labels[$i]} ---" + cat "${_py_logs[$i]}" + rm -f "${_py_logs[$i]}" +done +_py_end=$(date +%s) +_timer_labels+=("Python unit tests (parallel)") +_timer_seconds+=("$((_py_end - _py_start))") # merge coverage reports -mkdir -p .reports -TMPDIR="./.reports" ./node_modules/.bin/lcov-result-merger "**/.reports/unit/coverage/lcov.info" ".reports/lcov.info" --ignore "node_modules" --prepend-source-files --prepend-path-fix "../../.." +run_timed "lcov-result-merger" \ + bash -c 'mkdir -p .reports && TMPDIR="./.reports" ./node_modules/.bin/lcov-result-merger "**/.reports/unit/coverage/lcov.info" ".reports/lcov.info" --ignore "node_modules" --prepend-source-files --prepend-path-fix "../../.."' + +# Propagate any Jest failure now that all other test suites have completed +if [ "${jest_exit:-0}" -ne 0 ]; then + echo "Jest tests failed with exit code ${jest_exit}" + exit "${jest_exit}" +fi + +# Propagate any Python coverage failure +for i in "${!_py_exits[@]}"; do + if [ "${_py_exits[$i]}" -ne 0 ]; then + echo "${_py_labels[$i]} failed with exit code ${_py_exits[$i]}" + exit "${_py_exits[$i]}" + fi +done diff --git a/src/asyncapigenerator/pytest.ini b/src/asyncapigenerator/pytest.ini index 94bd46ada..fcdb5a3fd 100644 --- a/src/asyncapigenerator/pytest.ini +++ b/src/asyncapigenerator/pytest.ini @@ -19,6 +19,7 @@ markers = [coverage:run] relative_files = True +data_file = src/asyncapigenerator/.coverage omit = */tests/* */test_*.py diff --git a/src/cloudeventjekylldocs/pytest.ini b/src/cloudeventjekylldocs/pytest.ini index dba4156ce..b79a593f4 100644 --- a/src/cloudeventjekylldocs/pytest.ini +++ b/src/cloudeventjekylldocs/pytest.ini @@ -18,6 +18,7 @@ markers = [coverage:run] relative_files = True +data_file = src/cloudeventjekylldocs/.coverage omit = */tests/* */test_*.py diff --git a/src/cloudevents/domains/common.mk b/src/cloudevents/domains/common.mk index e96dcd34e..e48919191 100644 --- a/src/cloudevents/domains/common.mk +++ b/src/cloudevents/domains/common.mk @@ -56,31 +56,23 @@ build-no-bundle: @echo "Building $(DOMAIN) schemas to output/..." @if [ -n "$(PROFILE_NAMES)" ]; then \ echo "Building profile schemas..."; \ - for schema in $(PROFILE_NAMES); do \ - echo " - $$schema"; \ - cd $(CLOUD_EVENTS_DIR) && npm run build -- --root-dir $(ROOT_DIR) $(SRC_DIR)/$$schema.schema.yaml $(OUTPUT_DIR) || exit 1; \ - done; \ + printf '%s\n' $(PROFILE_NAMES) | xargs -P 0 -I{} sh -c \ + 'cd $(CLOUD_EVENTS_DIR) && npm run build -- --root-dir $(ROOT_DIR) $(SRC_DIR)/{}.schema.yaml $(OUTPUT_DIR) || exit 1'; \ fi @if [ -n "$(DEFS_NAMES)" ]; then \ echo "Building defs schemas..."; \ - for schema in $(DEFS_NAMES); do \ - echo " - $$schema"; \ - cd $(CLOUD_EVENTS_DIR) && npm run build -- --root-dir $(ROOT_DIR) $(SRC_DIR)/defs/$$schema.yaml $(OUTPUT_DIR)/defs || exit 1; \ - done; \ + printf '%s\n' $(DEFS_NAMES) | xargs -P 0 -I{} sh -c \ + 'cd $(CLOUD_EVENTS_DIR) && npm run build -- --root-dir $(ROOT_DIR) $(SRC_DIR)/defs/{}.yaml $(OUTPUT_DIR)/defs || exit 1'; \ fi @if [ -n "$(DATA_NAMES)" ]; then \ echo "Building data schemas..."; \ - for schema in $(DATA_NAMES); do \ - echo " - $$schema"; \ - cd $(CLOUD_EVENTS_DIR) && npm run build -- --root-dir $(ROOT_DIR) $(SRC_DIR)/data/$$schema.yaml $(OUTPUT_DIR)/data || exit 1; \ - done; \ + printf '%s\n' $(DATA_NAMES) | xargs -P 0 -I{} sh -c \ + 'cd $(CLOUD_EVENTS_DIR) && npm run build -- --root-dir $(ROOT_DIR) $(SRC_DIR)/data/{}.yaml $(OUTPUT_DIR)/data || exit 1'; \ fi @if [ -n "$(EVENT_NAMES)" ]; then \ echo "Building event schemas..."; \ - for schema in $(EVENT_NAMES); do \ - echo " - $$schema"; \ - cd $(CLOUD_EVENTS_DIR) && npm run build -- --root-dir $(ROOT_DIR) $(SRC_DIR)/events/$$schema.schema.yaml $(OUTPUT_DIR)/events || exit 1; \ - done; \ + printf '%s\n' $(EVENT_NAMES) | xargs -P 0 -I{} sh -c \ + 'cd $(CLOUD_EVENTS_DIR) && npm run build -- --root-dir $(ROOT_DIR) $(SRC_DIR)/events/{}.schema.yaml $(OUTPUT_DIR)/events || exit 1'; \ fi publish-json: @@ -138,11 +130,9 @@ publish-json: publish-bundled-json: @if [ -n "$(EVENT_NAMES)" ]; then \ - @echo "Flattening published event schemas..."; \ - for schema in $(EVENT_NAMES); do \ - echo " - $$schema (flatten)"; \ - cd $(CLOUD_EVENTS_DIR) && npm run bundle -- --flatten --root-dir $(ROOT_DIR) --base-url $(SCHEMA_BASE_URL) $(OUTPUT_DIR)/events/$$schema.schema.json $(SCHEMAS_DIR)/events/$$schema.flattened.schema.json || exit 1; \ - done; \ + echo "Flattening published event schemas..."; \ + printf '%s\n' $(EVENT_NAMES) | xargs -P 0 -I{} sh -c \ + 'cd $(CLOUD_EVENTS_DIR) && npm run bundle -- --flatten --root-dir $(ROOT_DIR) --base-url $(SCHEMA_BASE_URL) $(OUTPUT_DIR)/events/{}.schema.json $(SCHEMAS_DIR)/events/{}.flattened.schema.json || exit 1'; \ fi publish-yaml: diff --git a/src/cloudevents/jest.config.cjs b/src/cloudevents/jest.config.cjs deleted file mode 100644 index f40450410..000000000 --- a/src/cloudevents/jest.config.cjs +++ /dev/null @@ -1,57 +0,0 @@ -/** @type {import('jest').Config} */ -module.exports = { - preset: 'ts-jest', - testEnvironment: 'node', - roots: [''], - testMatch: [ - '**/__tests__/**/*.ts', - '**/?(*.)+(spec|test).ts' - ], - transform: { - '^.+\\.ts$': ['ts-jest', { - tsconfig: { - esModuleInterop: true, - allowSyntheticDefaultImports: true, - allowImportingTsExtensions: true, - module: 'commonjs', - target: 'ES2020', - moduleResolution: 'node', - noEmit: true - }, - diagnostics: { - ignoreCodes: [1343] // Ignore TS1343: import.meta errors - } - }] - }, - collectCoverageFrom: [ - 'tools/**/*.{ts,js,cjs}', - '!tools/**/*.d.ts', - '!tools/**/__tests__/**', - '!tools/**/*.test.ts', - '!tools/**/*.spec.ts', - '!tools/builder/build-schema.ts', - '!tools/generator/generate-example.ts', - '!tools/generator/manual-bundle-schema.ts', - '!tools/validator/validate.ts' - ], - coverageDirectory: 'coverage', - coverageReporters: ['text', 'lcov', 'html', 'cobertura'], - coveragePathIgnorePatterns: [ - '/node_modules/', - '/__tests__/' - ], - coverageThreshold: { - global: { - branches: 60, - functions: 60, - lines: 60, - statements: 60 - } - }, - moduleFileExtensions: ['ts', 'js', 'json'], - moduleNameMapper: { - '^(.*)\\.ts$': '$1', - }, - verbose: true, - testTimeout: 10000 -}; diff --git a/src/cloudevents/jest.config.ts b/src/cloudevents/jest.config.ts new file mode 100644 index 000000000..d03dd4d4d --- /dev/null +++ b/src/cloudevents/jest.config.ts @@ -0,0 +1,57 @@ +import type { Config } from 'jest'; + +const config: Config = { + preset: 'ts-jest', + testEnvironment: 'node', + roots: [''], + testMatch: ['**/__tests__/**/*.ts', '**/?(*.)+(spec|test).ts'], + transform: { + '^.+\\.ts$': [ + 'ts-jest', + { + tsconfig: { + esModuleInterop: true, + allowSyntheticDefaultImports: true, + allowImportingTsExtensions: true, + module: 'commonjs', + target: 'ES2020', + moduleResolution: 'node', + noEmit: true, + }, + diagnostics: { + ignoreCodes: [1343], // Ignore TS1343: import.meta errors + }, + }, + ], + }, + collectCoverageFrom: [ + 'tools/**/*.{ts,js,cjs}', + '!tools/**/*.d.ts', + '!tools/**/__tests__/**', + '!tools/**/*.test.ts', + '!tools/**/*.spec.ts', + '!tools/builder/build-schema.ts', + '!tools/generator/generate-example.ts', + '!tools/generator/manual-bundle-schema.ts', + '!tools/validator/validate.ts', + ], + coverageDirectory: 'coverage', + coverageReporters: ['text', 'lcov', 'html', 'cobertura'], + coveragePathIgnorePatterns: ['/node_modules/', '/__tests__/'], + coverageThreshold: { + global: { + branches: 60, + functions: 60, + lines: 60, + statements: 60, + }, + }, + moduleFileExtensions: ['ts', 'js', 'json'], + moduleNameMapper: { + '^(.*)\\.ts$': '$1', + }, + verbose: true, + testTimeout: 10000, +}; + +export default config; diff --git a/src/cloudevents/tools/builder/__tests__/build-schema.test.ts b/src/cloudevents/tools/builder/__tests__/build-schema.test.ts index f7c35f908..7c7fecf9b 100644 --- a/src/cloudevents/tools/builder/__tests__/build-schema.test.ts +++ b/src/cloudevents/tools/builder/__tests__/build-schema.test.ts @@ -9,6 +9,10 @@ import fs from 'fs'; import path from 'path'; import { execSync } from 'child_process'; +// Resolve paths relative to this test file so the suite works whether Jest +// runs from the workspace directory or from the repository root. +const cloudEventsRoot = path.resolve(__dirname, '..', '..', '..'); + describe('build-schema CLI', () => { let testDir: string; let sourceDir: string; @@ -16,7 +20,7 @@ describe('build-schema CLI', () => { beforeAll(() => { // Create test directories - testDir = path.join(process.cwd(), 'test-build-' + Date.now()); + testDir = path.join(cloudEventsRoot, 'test-build-' + Date.now()); sourceDir = path.join(testDir, 'src'); outputDir = path.join(testDir, 'output'); @@ -52,7 +56,7 @@ describe('build-schema CLI', () => { execSync( `tsx tools/builder/build-schema.ts "${inputFile}" "${outputDir}"`, { - cwd: process.cwd(), + cwd: cloudEventsRoot, stdio: 'pipe' } ); @@ -91,7 +95,7 @@ properties: execSync( `tsx tools/builder/build-schema.ts "${inputFile}" "${outputDir}"`, { - cwd: process.cwd(), + cwd: cloudEventsRoot, stdio: 'pipe' } ); @@ -120,7 +124,7 @@ properties: const result = execSync( `tsx tools/builder/build-schema.ts "${inputFile}" "${outputDir}"`, { - cwd: process.cwd(), + cwd: cloudEventsRoot, stdio: 'pipe', encoding: 'utf-8' } @@ -139,7 +143,7 @@ properties: execSync( 'tsx tools/builder/build-schema.ts', { - cwd: process.cwd(), + cwd: cloudEventsRoot, stdio: 'pipe' } ); @@ -160,7 +164,7 @@ properties: execSync( `tsx tools/builder/build-schema.ts "${inputFile}" "${outputDir}" "https://example.com"`, { - cwd: process.cwd(), + cwd: cloudEventsRoot, stdio: 'pipe' } ); @@ -198,7 +202,7 @@ properties: execSync( `tsx tools/builder/build-schema.ts "${mainFile}" "${outputDir}"`, { - cwd: process.cwd(), + cwd: cloudEventsRoot, stdio: 'pipe' } ); @@ -224,7 +228,7 @@ properties: execSync( `tsx tools/builder/build-schema.ts "${yamlFile}" "${outputDir}"`, { - cwd: process.cwd(), + cwd: cloudEventsRoot, stdio: 'pipe' } ); @@ -246,7 +250,7 @@ properties: execSync( `tsx tools/builder/build-schema.ts "nonexistent.json" "${outputDir}"`, { - cwd: process.cwd(), + cwd: cloudEventsRoot, stdio: 'pipe' } ); @@ -266,7 +270,7 @@ properties: execSync( `tsx tools/builder/build-schema.ts "${invalidFile}" "${outputDir}"`, { - cwd: process.cwd(), + cwd: cloudEventsRoot, stdio: 'pipe' } ); @@ -282,7 +286,7 @@ properties: describe('module structure', () => { it('should export expected functions (if any)', () => { // build-schema-cli.ts contains the testable logic - const buildSchemaCliPath = path.join(process.cwd(), 'tools/builder/build-schema-cli.ts'); + const buildSchemaCliPath = path.join(cloudEventsRoot, 'tools/builder/build-schema-cli.ts'); expect(fs.existsSync(buildSchemaCliPath)).toBe(true); // Verify file has expected structure @@ -293,7 +297,7 @@ properties: }); it('should have proper imports', () => { - const buildSchemaCliPath = path.join(process.cwd(), 'tools/builder/build-schema-cli.ts'); + const buildSchemaCliPath = path.join(cloudEventsRoot, 'tools/builder/build-schema-cli.ts'); const content = fs.readFileSync(buildSchemaCliPath, 'utf-8'); expect(content).toContain('import fs from'); diff --git a/src/eventcatalogasyncapiimporter/pytest.ini b/src/eventcatalogasyncapiimporter/pytest.ini index 0b2a45512..41b39ff49 100644 --- a/src/eventcatalogasyncapiimporter/pytest.ini +++ b/src/eventcatalogasyncapiimporter/pytest.ini @@ -20,6 +20,7 @@ testpaths = tests [coverage:run] relative_files = True +data_file = src/eventcatalogasyncapiimporter/.coverage omit = */tests/* */test_*.py diff --git a/src/python-schema-generator/pytest.ini b/src/python-schema-generator/pytest.ini index 0d63d6be4..001fba94a 100644 --- a/src/python-schema-generator/pytest.ini +++ b/src/python-schema-generator/pytest.ini @@ -9,6 +9,7 @@ addopts = [coverage:run] relative_files = True +data_file = src/python-schema-generator/.coverage omit = */tests/* */test_*.py diff --git a/utils/py-utils/pytest.ini b/utils/py-utils/pytest.ini index f704cd777..b5bbd23b4 100644 --- a/utils/py-utils/pytest.ini +++ b/utils/py-utils/pytest.ini @@ -7,6 +7,7 @@ addopts = -v --tb=short [coverage:run] relative_files = True +data_file = utils/py-utils/.coverage omit = */dl_utils/__tests__/* */test_*.py