diff --git a/README.md b/README.md index db32ba4..8120509 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,7 @@ Typical use cases include porting a 100k+ line Python monolith to TypeScript, a ## How It Works -AAMF treats the migration as a pipeline of **up to 9 phases** (7 standard + 2 optional), each driven by purpose-built agents defined as `.agent.md` prompt files. The runtime never performs reasoning itself — it is pure execution machinery that launches agents, feeds them minimal context, collects their output, and decides what to run next. +AAMF treats the migration as a pipeline of **9 phases** (0–8), each driven by purpose-built agents defined as `.agent.md` prompt files. The runtime never performs reasoning itself — it is pure execution machinery that launches agents, feeds them minimal context, collects their output, and decides what to run next. ``` ┌─────────────────────────────────────────────────────────────────────┐ @@ -69,17 +69,17 @@ AAMF treats the migration as a pipeline of **up to 9 phases** (7 standard + 2 op | Phase | Name | Agents | Optional | Critical | |-------|------|--------|----------|----------| -| 0 | **KB Indexing** | *(runtime logic — Lore)* | Yes | Yes | -| 1 | **Impact Assessment** | `impact-assessor` | No | Yes | +| 0 | **KB Indexing** | *(runtime logic — Lore)* | No | Yes | +| 1 | **Task Graph Construction** | *(runtime logic — Lore)* | No | Yes | | 2 | **Knowledge Base Construction** | `knowledge-builder` | No | Yes | -| 3 | **Migration Planning** | `migration-planner`, `task-decomposer`, `adjudicator` | No | Yes | -| 4 | **Iterative Migration** | `code-migrator`, `parity-verifier`, `test-writer`, `failure-adjudicator` | No | Yes | -| 5 | **Final Parity Verification** | `final-parity-checker` | No | No | -| 6 | **E2E Testing & Documentation** | `e2e-test-crafter`, `documentation-writer` | No | No | -| 8 | **Idiomatic Refactor** | `idiomatic-reviewer`, `idiomatic-refactorer` | Yes | No | -| 7 | **Completion** | *(none — summary only)* | No | No | +| 3 | **Migration Planning** | `migration-planner`, `adjudicator` | No | Yes | +| 4 | **Iterative Migration** | `code-migrator`, `parity-verifier`, `test-writer`, `parity-failure-resolver` | No | Yes | +| 5 | **Final Parity Verification** | `final-parity-checker` | No | Yes | +| 6 | **E2E Testing & Documentation** | `e2e-test-crafter`, `documentation-writer` | No | Yes | +| 7 | **Idiomatic Refactor** | `idiomatic-reviewer`, `idiomatic-refactorer` | Yes | Yes | +| 8 | **Completion** | *(none — summary only)* | No | Yes | -> Phase 0 requires `options.kbIndex.enabled` (or `AAMF_USE_KB_INDEX=1`). Phase 8 requires `options.idiomaticRefactor.enabled`. Execution order is 0→1→2→3→4→5→6→8→7. Critical phases abort the migration on failure. Non-critical phases log issues but allow the pipeline to continue. +> Phase 7 requires `options.idiomaticRefactor.enabled`. Execution order is 0→1→2→3→4→5→6→7→8. All phases are critical — failure in any phase halts the flow. --- @@ -156,15 +156,13 @@ AAMF defines 16 specialized agent roles. Each corresponds to a `.agent.md` file |-------|-------|---------| | `migration-orchestrator` | — | Top-level coordination logic (mirrored by the runtime) | | `migration-runner` | — | Entry point agent | -| `impact-assessor` | 1 | Analyzes source codebase scope, complexity, and risk | | `knowledge-builder` | 2 | Documents all modules, dependencies, and patterns | | `migration-planner` | 3 | Creates the task-level migration plan with dependency ordering | -| `task-decomposer` | 3 | Decomposes module groups into granular migration tasks | | `adjudicator` | 3 | Decides between competing migration strategies | | `code-migrator` | 4 | Translates source code to the target language/framework | | `parity-verifier` | 4 | Checks behavioral equivalence between source and migrated code | | `test-writer` | 4 | Generates unit tests for migrated code | -| `failure-adjudicator` | 4 | Decides whether exhausted retries are fixed, false positives, real gaps, or inconclusive | +| `parity-failure-resolver` | 4 | Decides whether exhausted retries are fixed, false positives, real gaps, or inconclusive | | `final-parity-checker` | 5 | Full-codebase parity sweep with loop-back fix capability | | `e2e-test-crafter` | 6 | Creates end-to-end integration tests | | `documentation-writer` | 6 | Produces migration documentation and guides | @@ -175,7 +173,7 @@ AAMF defines 16 specialized agent roles. Each corresponds to a `.agent.md` file ## Execution Details by Phase -### Phase 0 — KB Indexing (Optional) +### Phase 0 — KB Indexing When `options.kbIndex.enabled` is set (or `AAMF_USE_KB_INDEX=1`), the runtime uses `@jafreck/lore` to build a SQLite knowledge-base index from the source codebase. This phase: @@ -186,9 +184,9 @@ When `options.kbIndex.enabled` is set (or `AAMF_USE_KB_INDEX=1`), the runtime us The MCP server runs for the lifetime of the migration and is shut down in a `finally` block. -### Phase 1 — Impact Assessment +### Phase 1 — Task Graph Construction -A single `impact-assessor` invocation scans the source tree and produces `impact-assessment.md`: scope, file count, complexity ratings, risk areas, and estimated effort. +The runtime uses `@jafreck/lore` to build a deterministic call-graph: SCC contraction → greedy merge → topologically-sorted task list. ### Phase 2 — Knowledge Base Construction @@ -248,18 +246,16 @@ The `final-parity-checker` performs a codebase-wide parity sweep. If issues are ### Phase 6 — E2E Testing & Documentation -`e2e-test-crafter` and `documentation-writer` run **in parallel** (serialized when git automation is enabled). Neither is critical; failures are logged but do not abort the migration. +`e2e-test-crafter` and `documentation-writer` run **in parallel** (serialized when git automation is enabled). -### Phase 8 — Idiomatic Refactor (Optional) +### Phase 7 — Idiomatic Refactor (Optional) -When `options.idiomaticRefactor.enabled` is set, Phase 8 runs up to `maxIterations` (default: 2) review-and-refactor cycles: +When `options.idiomaticRefactor.enabled` is set, Phase 7 runs up to `maxIterations` (default: 2) review-and-refactor cycles: 1. `idiomatic-reviewer` scans the migrated codebase for non-idiomatic patterns. 2. For each flagged issue, `idiomatic-refactorer` applies targeted fixes with git commits. -Phase 8 executes before Phase 7 (Completion). - -### Phase 7 — Completion +### Phase 8 — Completion The runtime writes a final summary to the progress file and returns a `MigrationResult` with per-phase outcomes, token usage, and lists of failed/blocked tasks. @@ -271,13 +267,13 @@ The runtime writes a final summary to the progress file and returns a `Migration All state is persisted to `.aamf/migration/{projectName}/state/checkpoint.json` after every phase completion and task completion. The checkpoint records: -- Current phase and per-phase cursors for deterministic resume (Phases 4, 5, 6, 8) +- Current phase and per-phase cursors for deterministic resume (Phases 4, 5, 6, 7) - Completed phases and tasks (with per-task wall-clock durations) - Failed/blocked tasks with error details - Phase output file paths - Cumulative token usage (by phase and by agent) - Phase 0 source fingerprint (skip KB rebuild if unchanged) -- Phase 3 decomposer progress (per-module-group completion) +- Phase 2 knowledge-builder progress (per-module-group completion) - Adjudication waivers and auditable event history - Terminal exhaustion metadata for fail-fast policy - Metrics record count for JSONL resume alignment @@ -384,9 +380,9 @@ All migration state is organized under `.aamf/migration/{projectName}/`: │ │ └── competing-strategies.md # (if adjudication needed) │ ├── parity/ │ │ ├── final-parity-report.md # Phase 5 output -│ │ └── idiomatic-review-report.md # Phase 8 output +│ │ └── idiomatic-review-report.md # Phase 7 output │ ├── adjudication/ # Failure adjudication records -│ └── impact-assessment.md # Phase 1 output +│ └── impact-assessment.md # Phase 2 output ├── reports/ │ ├── progress.md # Human-readable status dashboard │ └── observability/ diff --git a/agents/templates/migration-orchestrator.md b/agents/templates/migration-orchestrator.md index 30a8dfd..5bad8b1 100644 --- a/agents/templates/migration-orchestrator.md +++ b/agents/templates/migration-orchestrator.md @@ -13,15 +13,13 @@ You are the **Migration Orchestrator** — the central coordinator for large-sca Execute these phases in order. On resume, skip completed phases (read from `state/checkpoint.json`). -### Phase 1: Impact Assessment & Cost Estimation -- Launch: `impact-assessor` -- Input: Source codebase path, target specification -- Output: `.aamf/migration/{projectName}/artifacts/impact-assessment.md` -- Parallelizable: YES (read-only) +### Phase 1: Task Graph Construction +- The runtime builds a deterministic call-graph: SCC contraction → greedy merge → topologically-sorted task list. +- Output: Structured task list with dependency ordering -### Phase 2: Investigation & Knowledge Base Construction +### Phase 2: Knowledge Base Construction - Launch: `knowledge-builder` -- Input: Source codebase path, impact assessment results +- Input: Source codebase path, task graph - Output: `.aamf/migration/{projectName}/knowledge-base/` directory containing high-level architecture, module, and integration documentation - Parallelizable: YES (read-only) @@ -62,7 +60,6 @@ Serial execution required for code-writing. Parity verification is read-only and | Agent | Purpose | Parallelizable | |-------|---------|----------------| -| `impact-assessor` | Impact assessment and cost estimation | Yes | | `knowledge-builder` | Investigation and knowledge base construction | Yes | | `migration-planner` | Plan migration implementation | No | | `adjudicator` | Decide between competing plans/solutions | No | diff --git a/claude.md b/claude.md index 9d09321..cd81953 100644 --- a/claude.md +++ b/claude.md @@ -55,16 +55,16 @@ migration.config.json → MigrationRuntime | Phase | Name | Agents | Critical | |-------|------|--------|----------| | 0 | KB Indexing | *(runtime — Lore)* | Yes | -| 1 | Impact Assessment | `impact-assessor` | Yes | +| 1 | Task Graph Construction | *(runtime — Lore)* | Yes | | 2 | Knowledge Base Construction | `knowledge-builder` | Yes | -| 3 | Migration Planning | `migration-planner`, `task-decomposer`, `adjudicator` | Yes | -| 4 | Iterative Migration | `code-migrator`, `parity-verifier`, `test-writer`, `failure-adjudicator` | Yes | -| 5 | Final Parity Verification | `final-parity-checker` | No | -| 6 | E2E Testing & Documentation | `e2e-test-crafter`, `documentation-writer` | No | -| 8 | Idiomatic Refactor (optional) | `idiomatic-reviewer`, `idiomatic-refactorer` | No | -| 7 | Completion | *(summary only)* | No | - -Execution order: 0→1→2→3→4→5→6→8→7. Critical phases abort on failure. Non-critical phases log issues and continue. +| 3 | Migration Planning | `migration-planner`, `adjudicator` | Yes | +| 4 | Iterative Migration | `code-migrator`, `parity-verifier`, `test-writer`, `parity-failure-resolver` | Yes | +| 5 | Final Parity Verification | `final-parity-checker` | Yes | +| 6 | E2E Testing & Documentation | `e2e-test-crafter`, `documentation-writer` | Yes | +| 7 | Idiomatic Refactor (optional) | `idiomatic-reviewer`, `idiomatic-refactorer` | Yes | +| 8 | Completion | *(summary only)* | Yes | + +Execution order: 0→1→2→3→4→5→6→7→8. All phases are critical — failure in any phase halts the flow. ## Agent Runtimes diff --git a/docs/configuration.md b/docs/configuration.md index c42663b..a02cf49 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -181,7 +181,7 @@ Create a `migration.config.json` file in your project root. Below is a full refe | `options.git.authorName` | `string` | `'AAMF Migration Bot'` | Git author name. | | `options.git.authorEmail` | `string` | `'aamf@local.invalid'` | Git author email. | -#### Idiomatic Refactor (Phase 8) +#### Idiomatic Refactor (Phase 7) | Field | Type | Default | Description | |-------|------|---------|-------------| @@ -262,21 +262,21 @@ npx aamf kb-server --db ./kb.db ### Migration Phases -The runtime executes migration as a sequence of up to 9 phases (7 standard + 2 optional), each driven by one or more specialized agents: +The runtime executes migration as a sequence of **9 phases** (0–8), each driven by one or more specialized agents: | Phase | Name | Description | |-------|------|-------------| -| 0 | **KB Indexing** *(optional)* | Builds a SQLite knowledge-base index from the source codebase using `@jafreck/lore` with tree-sitter parsing and optional embeddings. Starts an HTTP MCP server for downstream agent access. | -| 1 | **Impact Assessment** | Scans the source codebase to build a dependency graph, identify file roles, and estimate migration complexity. | +| 0 | **KB Indexing** | Builds a SQLite knowledge-base index from the source codebase using `@jafreck/lore` with tree-sitter parsing and optional embeddings. Starts an HTTP MCP server for downstream agent access. | +| 1 | **Task Graph Construction** | Builds a deterministic call-graph: SCC contraction → greedy merge → topologically-sorted task list. | | 2 | **Knowledge Base Construction** | Extracts patterns, idioms, and domain knowledge from the source code into a structured knowledge base that downstream agents reference. | | 3 | **Migration Planning** | Produces an ordered task list — module groups decomposed into granular tasks with dependency ordering. Optionally invokes adjudication for competing strategies. | | 4 | **Iterative Migration** | The main execution loop. Supports per-task and wave-barrier scheduling with migration/validation cycles, infrastructure error classification, failure adjudication, model routing, and git automation. | | 5 | **Final Parity Verification** | Compares the migrated codebase against the source to verify functional equivalence, with loopback fix capability. | | 6 | **E2E Testing & Documentation** | Generates end-to-end tests and migration documentation. | -| 8 | **Idiomatic Refactor** *(optional)* | Reviews migrated code for non-idiomatic patterns and applies targeted refactoring with git commits. | -| 7 | **Completion** | Finalizes artifacts, writes the summary report, generates the observability report, and cleans up. | +| 7 | **Idiomatic Refactor** *(optional)* | Reviews migrated code for non-idiomatic patterns and applies targeted refactoring with git commits. | +| 8 | **Completion** | Finalizes artifacts, writes the summary report, generates the observability report, and cleans up. | -Execution order is 0→1→2→3→4→5→6→8→7. Phase 0 requires `kbIndex.enabled`. Phase 8 requires `idiomaticRefactor.enabled`. +Execution order is 0→1→2→3→4→5→6→7→8. Phase 7 requires `idiomaticRefactor.enabled`. ### Runtime ↔ Agent Boundary diff --git a/package-lock.json b/package-lock.json index 0a45c1c..adb7c11 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,7 +9,7 @@ "version": "0.1.0", "dependencies": { "@cadre-dev/framework": "0.2.1", - "@jafreck/lore": "^0.3.7", + "@jafreck/lore": "^0.3.8", "@modelcontextprotocol/sdk": "^1.27.1", "@types/better-sqlite3": "^7.6.13", "better-sqlite3": "^12.6.2", @@ -120,6 +120,28 @@ "url": "https://github.com/sponsors/colinhacks" } }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, "node_modules/@emnapi/runtime": { "version": "1.9.0", "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.9.0.tgz", @@ -605,6 +627,12 @@ "sharp": "^0.34.1" } }, + "node_modules/@iarna/toml": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/@iarna/toml/-/toml-2.2.5.tgz", + "integrity": "sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg==", + "license": "ISC" + }, "node_modules/@img/colour": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@img/colour/-/colour-1.1.0.tgz", @@ -1083,15 +1111,17 @@ } }, "node_modules/@jafreck/lore": { - "version": "0.3.7", - "resolved": "https://registry.npmjs.org/@jafreck/lore/-/lore-0.3.7.tgz", - "integrity": "sha512-ZbkYUbPnEK/ZbeFfBmZzAaoPhr/8Z3N5ruW3QKFJVBWC/yJ3Zr+1PqZ0W2qi1okx1HfayakQ9eJFTLp9p9yVrA==", + "version": "0.3.8", + "resolved": "https://registry.npmjs.org/@jafreck/lore/-/lore-0.3.8.tgz", + "integrity": "sha512-DCqKJcuXhhqpYH7cAfQp1oBqdx2z5Cv7IpPObm6c51l1W9aYdOWldsRTQZjOsRO6gXJS+dgRppJHkPmivSjlgg==", "license": "MIT", "dependencies": { "@bufbuild/protobuf": "^2.11.0", "@elm-tooling/tree-sitter-elm": "^5.9.0", "@huggingface/transformers": "^3.8.1", "@modelcontextprotocol/sdk": "^1.27.1", + "@sourcegraph/scip-python": "^0.6.6", + "@sourcegraph/scip-typescript": "^0.4.0", "@tree-sitter-grammars/tree-sitter-lua": "^0.4.1", "@tree-sitter-grammars/tree-sitter-zig": "^1.1.2", "better-sqlite3": "^12.6.2", @@ -1476,7 +1506,6 @@ "version": "3.1.2", "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", - "dev": true, "license": "MIT", "engines": { "node": ">=6.0.0" @@ -1486,7 +1515,6 @@ "version": "1.5.5", "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", - "dev": true, "license": "MIT" }, "node_modules/@jridgewell/trace-mapping": { @@ -2004,6 +2032,58 @@ "win32" ] }, + "node_modules/@sourcegraph/scip-python": { + "version": "0.6.6", + "resolved": "https://registry.npmjs.org/@sourcegraph/scip-python/-/scip-python-0.6.6.tgz", + "integrity": "sha512-qoKL1Rggg0o5newAFbCFAKlS0AjWxG5MA+mC28BtgxOv0DhO4zdL8u7151FxEppDpXMVvm7+yXSjXotoVH9cMQ==", + "license": "MIT", + "dependencies": { + "@iarna/toml": "^2.2.5", + "command-exists": "^1.2.9", + "commander": "^9.2.0", + "diff": "^5.0.0", + "glob": "^7.2.0", + "google-protobuf": "^3.19.3", + "ts-node": "^10.5.0", + "vscode-languageserver": "^7.0.0" + }, + "bin": { + "scip-python": "index.js" + } + }, + "node_modules/@sourcegraph/scip-python/node_modules/commander": { + "version": "9.5.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-9.5.0.tgz", + "integrity": "sha512-KRs7WVDKg86PWiuAqhDrAQnTXZKraVcCc6vFdL14qrZ/DcWwuRo7VoiYXalXO7S5GKpqYiVEwCbgFDfxNHKJBQ==", + "license": "MIT", + "engines": { + "node": "^12.20.0 || >=14" + } + }, + "node_modules/@sourcegraph/scip-typescript": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/@sourcegraph/scip-typescript/-/scip-typescript-0.4.0.tgz", + "integrity": "sha512-k+AtsrqmS41Sd5qjkZlHcmvoSQIvBOonRj4jpgp0KNFM6aqvMGpdSuPUqrUcg8ENTKjUbfaUVszgQwq3bCOvwA==", + "license": "Apache-2.0", + "dependencies": { + "commander": "^12.1.0", + "google-protobuf": "^3.21.4", + "progress": "^2.0.3", + "typescript": "^5.6.2" + }, + "bin": { + "scip-typescript": "dist/src/main.js" + } + }, + "node_modules/@sourcegraph/scip-typescript/node_modules/commander": { + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-12.1.0.tgz", + "integrity": "sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==", + "license": "MIT", + "engines": { + "node": ">=18" + } + }, "node_modules/@standard-schema/spec": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.1.0.tgz", @@ -2049,6 +2129,30 @@ } } }, + "node_modules/@tsconfig/node10": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.12.tgz", + "integrity": "sha512-UCYBaeFvM11aU2y3YPZ//O5Rhj+xKyzy7mvcIoAjASbigy8mHMryP5cK7dgjlz2hWxh1g5pLw084E0a/wlUSFQ==", + "license": "MIT" + }, + "node_modules/@tsconfig/node12": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", + "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", + "license": "MIT" + }, + "node_modules/@tsconfig/node14": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", + "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", + "license": "MIT" + }, + "node_modules/@tsconfig/node16": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz", + "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", + "license": "MIT" + }, "node_modules/@types/better-sqlite3": { "version": "7.6.13", "resolved": "https://registry.npmjs.org/@types/better-sqlite3/-/better-sqlite3-7.6.13.tgz", @@ -2247,6 +2351,30 @@ "node": ">= 0.6" } }, + "node_modules/acorn": { + "version": "8.16.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.16.0.tgz", + "integrity": "sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==", + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-walk": { + "version": "8.3.5", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.5.tgz", + "integrity": "sha512-HEHNfbars9v4pgpW6SO1KSPkfoS0xVOM/9UzkJltjlsHZmJasxg8aXkuZa7SMf8vKGIBhpUsPluQSqhJFCqebw==", + "license": "MIT", + "dependencies": { + "acorn": "^8.11.0" + }, + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/ajv": { "version": "8.18.0", "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.18.0.tgz", @@ -2280,6 +2408,12 @@ } } }, + "node_modules/arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "license": "MIT" + }, "node_modules/assertion-error": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", @@ -2302,6 +2436,12 @@ "js-tokens": "^10.0.0" } }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "license": "MIT" + }, "node_modules/base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", @@ -2387,6 +2527,16 @@ "deprecated": "Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.", "license": "MIT" }, + "node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, "node_modules/braces": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", @@ -2492,6 +2642,12 @@ "node": ">=18" } }, + "node_modules/command-exists": { + "version": "1.2.9", + "resolved": "https://registry.npmjs.org/command-exists/-/command-exists-1.2.9.tgz", + "integrity": "sha512-LTQ/SGc+s0Xc0Fu5WaKnR0YiygZkm9eKFvyS+fRsU7/ZWFF8ykFM6Pc9aCVf1+xasOOZpO3BAVgVrKvsqKHV7w==", + "license": "MIT" + }, "node_modules/commander": { "version": "14.0.3", "resolved": "https://registry.npmjs.org/commander/-/commander-14.0.3.tgz", @@ -2501,6 +2657,12 @@ "node": ">=20" } }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "license": "MIT" + }, "node_modules/content-disposition": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.1.tgz", @@ -2558,6 +2720,12 @@ "url": "https://opencollective.com/express" } }, + "node_modules/create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "license": "MIT" + }, "node_modules/cross-spawn": { "version": "7.0.6", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", @@ -2681,6 +2849,15 @@ "integrity": "sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==", "license": "MIT" }, + "node_modules/diff": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.2.tgz", + "integrity": "sha512-vtcDfH3TOjP8UekytvnHH1o1P4FcUdt4eQ1Y+Abap1tk/OB2MWQvcwS2ClCd1zuIhc3JKOx6p3kod8Vfys3E+A==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.3.1" + } + }, "node_modules/dunder-proto": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", @@ -3076,6 +3253,12 @@ "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==", "license": "MIT" }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "license": "ISC" + }, "node_modules/fsevents": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", @@ -3156,6 +3339,27 @@ "integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==", "license": "MIT" }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me", + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/glob-parent": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", @@ -3201,6 +3405,12 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/google-protobuf": { + "version": "3.21.4", + "resolved": "https://registry.npmjs.org/google-protobuf/-/google-protobuf-3.21.4.tgz", + "integrity": "sha512-MnG7N936zcKTco4Jd2PX2U96Kf9PxygAPKBug+74LHzmHXmceN16MmRcdgZv+DGef/S9YvQAfRsNCn4cjf9yyQ==", + "license": "(BSD-3-Clause AND Apache-2.0)" + }, "node_modules/gopd": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", @@ -3337,6 +3547,17 @@ ], "license": "BSD-3-Clause" }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", @@ -3526,6 +3747,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "license": "ISC" + }, "node_modules/matcher": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/matcher/-/matcher-3.0.0.tgz", @@ -3639,6 +3866,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/minimatch": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz", + "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==", + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/minimist": { "version": "1.2.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", @@ -3876,6 +4115,15 @@ "node": ">= 0.8" } }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/path-key": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", @@ -3993,6 +4241,15 @@ "node": ">=10" } }, + "node_modules/progress": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", + "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", + "license": "MIT", + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/protobufjs": { "version": "7.5.4", "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.5.4.tgz", @@ -4986,6 +5243,58 @@ } } }, + "node_modules/ts-node": { + "version": "10.9.2", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", + "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==", + "license": "MIT", + "dependencies": { + "@cspotcode/source-map-support": "^0.8.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.1", + "yn": "3.1.1" + }, + "bin": { + "ts-node": "dist/bin.js", + "ts-node-cwd": "dist/bin-cwd.js", + "ts-node-esm": "dist/bin-esm.js", + "ts-node-script": "dist/bin-script.js", + "ts-node-transpile-only": "dist/bin-transpile.js", + "ts-script": "dist/bin-script-deprecated.js" + }, + "peerDependencies": { + "@swc/core": ">=1.2.50", + "@swc/wasm": ">=1.2.50", + "@types/node": "*", + "typescript": ">=2.7" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "@swc/wasm": { + "optional": true + } + } + }, + "node_modules/ts-node/node_modules/diff": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.4.tgz", + "integrity": "sha512-X07nttJQkwkfKfvTPG/KSnE2OMdcUCao6+eXF3wmnIQRn2aPAHH3VxDbDOdegkd6JbPsXqShpvEOHfAT+nCNwQ==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.3.1" + } + }, "node_modules/tslib": { "version": "2.8.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", @@ -5055,7 +5364,6 @@ "version": "5.9.3", "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", - "dev": true, "license": "Apache-2.0", "bin": { "tsc": "bin/tsc", @@ -5086,6 +5394,12 @@ "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", "license": "MIT" }, + "node_modules/v8-compile-cache-lib": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", + "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", + "license": "MIT" + }, "node_modules/vary": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", @@ -5248,6 +5562,43 @@ } } }, + "node_modules/vscode-jsonrpc": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-6.0.0.tgz", + "integrity": "sha512-wnJA4BnEjOSyFMvjZdpiOwhSq9uDoK8e/kpRJDTaMYzwlkrhG1fwDIZI94CLsLzlCK5cIbMMtFlJlfR57Lavmg==", + "license": "MIT", + "engines": { + "node": ">=8.0.0 || >=10.0.0" + } + }, + "node_modules/vscode-languageserver": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/vscode-languageserver/-/vscode-languageserver-7.0.0.tgz", + "integrity": "sha512-60HTx5ID+fLRcgdHfmz0LDZAXYEV68fzwG0JWwEPBode9NuMYTIxuYXPg4ngO8i8+Ou0lM7y6GzaYWbiDL0drw==", + "license": "MIT", + "dependencies": { + "vscode-languageserver-protocol": "3.16.0" + }, + "bin": { + "installServerIntoExtension": "bin/installServerIntoExtension" + } + }, + "node_modules/vscode-languageserver-protocol": { + "version": "3.16.0", + "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.16.0.tgz", + "integrity": "sha512-sdeUoAawceQdgIfTI+sdcwkiK2KU+2cbEYA0agzM2uqaUy2UpnnGHtWTHVEtS0ES4zHU0eMFRGN+oQgDxlD66A==", + "license": "MIT", + "dependencies": { + "vscode-jsonrpc": "6.0.0", + "vscode-languageserver-types": "3.16.0" + } + }, + "node_modules/vscode-languageserver-types": { + "version": "3.16.0", + "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.16.0.tgz", + "integrity": "sha512-k8luDIWJWyenLc5ToFQQMaSrqCHiLwyKPHKPQZ5zz21vM+vIVUSvsRpcbiECH4WR88K2XZqc4ScRcZ7nk/jbeA==", + "license": "MIT" + }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", @@ -5295,6 +5646,15 @@ "node": ">=18" } }, + "node_modules/yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/yocto-queue": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.2.2.tgz", diff --git a/package.json b/package.json index 74c2d0f..595373d 100644 --- a/package.json +++ b/package.json @@ -20,7 +20,7 @@ }, "dependencies": { "@cadre-dev/framework": "0.2.1", - "@jafreck/lore": "^0.3.7", + "@jafreck/lore": "^0.3.8", "@modelcontextprotocol/sdk": "^1.27.1", "@types/better-sqlite3": "^7.6.13", "better-sqlite3": "^12.6.2", diff --git a/src/agents/context-builder.ts b/src/agents/context-builder.ts index 031906a..42ec362 100644 --- a/src/agents/context-builder.ts +++ b/src/agents/context-builder.ts @@ -211,7 +211,7 @@ export class ContextBuilder { }; case 'test-writer': { - // Phase 7 per-suite E2E path: payload carries a full suite brief + // Phase 6 per-suite E2E path: payload carries a full suite brief if (this.isRecord(payload?.e2eSuiteBrief)) { const brief = payload!.e2eSuiteBrief as Record; const targetFiles = Array.isArray(brief.targetFiles) ? (brief.targetFiles as string[]) : []; @@ -222,7 +222,7 @@ export class ContextBuilder { agentPayload: { taskId, testType: 'e2e', e2eSuiteBrief: brief }, }; } - // Phase 5 unit-test path (unchanged) + // Phase 4 unit-test path (unchanged) return { inputFiles: [ ...(payload?.targetFile ? [String(payload.targetFile)] : []), @@ -311,7 +311,7 @@ export class ContextBuilder { /** * Build the execution-strategy descriptor from the current config. * Injected into the planning agents' payload so they can reason about - * how Phase 5 will execute their task graph. + * how Phase 4 will execute their task graph. */ private buildExecutionStrategy(): import('./types.js').ExecutionStrategy { const opts = this.config.options; diff --git a/src/agents/registry.ts b/src/agents/registry.ts index f3f0042..6a83f9b 100644 --- a/src/agents/registry.ts +++ b/src/agents/registry.ts @@ -177,7 +177,7 @@ export const AGENT_REGISTRY: Record = { modulesDocumented: { type: 'integer', minimum: 0 }, }, }), - phases: [3], + phases: [2], copilotTools: ['read', 'edit', 'search', 'execute'], claudeTools: CLAUDE_TOOLS, }, @@ -193,7 +193,7 @@ export const AGENT_REGISTRY: Record = { strategy: { type: 'string' }, }, }), - phases: [4], + phases: [3], copilotTools: ['read', 'edit', 'search'], claudeTools: CLAUDE_TOOLS, }, @@ -214,7 +214,7 @@ export const AGENT_REGISTRY: Record = { decision: { type: 'string', minLength: 1 }, }, }), - phases: [4], + phases: [3], copilotTools: ['read', 'edit', 'search', 'execute'], claudeTools: CLAUDE_TOOLS, }, @@ -238,7 +238,7 @@ export const AGENT_REGISTRY: Record = { issues: { type: 'array' }, }, }), - phases: [5], + phases: [4], copilotTools: ['read', 'edit', 'search', 'execute'], claudeTools: CLAUDE_TOOLS, }, @@ -276,7 +276,7 @@ export const AGENT_REGISTRY: Record = { }, }, }), - phases: [5], + phases: [4], copilotTools: ['read', 'edit', 'search', 'execute'], claudeTools: CLAUDE_TOOLS, }, @@ -299,7 +299,7 @@ export const AGENT_REGISTRY: Record = { taskId: { type: 'string', minLength: 1 }, }, }), - phases: [5], + phases: [4], copilotTools: ['read', 'edit', 'search', 'execute'], claudeTools: CLAUDE_TOOLS, }, @@ -325,7 +325,7 @@ export const AGENT_REGISTRY: Record = { scopeReduced: { type: 'boolean' }, }, }), - phases: [5], + phases: [4], copilotTools: ['read', 'edit', 'search', 'execute'], claudeTools: CLAUDE_TOOLS, }, @@ -354,7 +354,7 @@ export const AGENT_REGISTRY: Record = { }, }, }), - phases: [6], + phases: [5], copilotTools: ['read', 'edit', 'search', 'execute'], claudeTools: CLAUDE_TOOLS, }, @@ -370,7 +370,7 @@ export const AGENT_REGISTRY: Record = { suitesCompleted: { type: 'integer', minimum: 0 }, }, }), - phases: [7], + phases: [6], copilotTools: ['read', 'edit', 'search', 'execute'], claudeTools: CLAUDE_TOOLS, }, @@ -387,7 +387,7 @@ export const AGENT_REGISTRY: Record = { documentsWritten: { type: 'integer', minimum: 0 }, }, }), - phases: [7], + phases: [6], copilotTools: ['read', 'edit', 'search'], claudeTools: CLAUDE_TOOLS, }, @@ -431,7 +431,7 @@ export const AGENT_REGISTRY: Record = { }, }, }), - phases: [8], + phases: [7], copilotTools: ['read', 'search'], claudeTools: CLAUDE_TOOLS, }, @@ -447,7 +447,7 @@ export const AGENT_REGISTRY: Record = { }, }), outputJsonSchema: outputSchema('idiomatic-refactorer'), - phases: [8], + phases: [7], copilotTools: ['read', 'edit'], claudeTools: CLAUDE_TOOLS, }, diff --git a/src/agents/types.ts b/src/agents/types.ts index caa6382..9f82ba3 100644 --- a/src/agents/types.ts +++ b/src/agents/types.ts @@ -229,7 +229,7 @@ export interface AgentContext { /** * Execution-topology context passed to planning agents (`migration-planner`, * `migration-planner`) so they can tailor task granularity, grouping, and - * dependency design to the actual Phase 5 execution mode. + * dependency design to the actual Phase 4 execution mode. * * Agents that receive this in their `payload.executionStrategy` can, for * example, co-locate related files into the same wave-friendly grouping, @@ -237,7 +237,7 @@ export interface AgentContext { * calibrate task complexity against the available recovery budget. */ export interface ExecutionStrategy { - /** Phase 5 execution mode: `'per-task'` (serial) or `'wave-barrier'` (concurrent waves). */ + /** Phase 4 execution mode: `'per-task'` (serial) or `'wave-barrier'` (concurrent waves). */ executionMode: 'per-task' | 'wave-barrier'; /** Maximum number of agent subprocesses running in parallel. */ @@ -277,7 +277,7 @@ export interface ExecutionStrategy { * Location details describing which wave/task/check failed and needs remediation. */ export interface RemediationTargetContext { - /** Phase 5 wave number when the failure occurred. */ + /** Phase 4 wave number when the failure occurred. */ wave?: number; /** Task identifier associated with the failure. */ taskId?: string; @@ -350,7 +350,7 @@ export interface PriorRecoveryAttempt { // ─── Terminal Exhaustion Contracts ──────────────────────────────────────────── -/** Canonical terminal exhaustion reason codes for Phase 5 fail-fast outcomes. */ +/** Canonical terminal exhaustion reason codes for Phase 4 fail-fast outcomes. */ export type TerminalReasonCode = | 'wave-convergence-exhausted' | 'task-retries-exhausted' @@ -362,7 +362,7 @@ export type TerminalReasonCode = /** * A discrete E2E test suite definition extracted from the e2e-test-crafter's * plan output. Each brief describes one test suite that will be handed to - * an independent `test-writer` agent invocation during Phase 7 fan-out. + * an independent `test-writer` agent invocation during Phase 6 fan-out. */ export interface E2eSuiteBrief { /** Unique suite identifier, e.g. `"suite-001"`. */ diff --git a/src/budget/token-tracker.ts b/src/budget/token-tracker.ts index 35e5376..0cff426 100644 --- a/src/budget/token-tracker.ts +++ b/src/budget/token-tracker.ts @@ -50,7 +50,7 @@ export class TokenTracker { /** * Return token usage keyed by task ID. * Only contains entries for invocations where a taskId was provided - * (typically Phase 5 migration tasks). + * (typically Phase 4 migration tasks). */ getByTask(): Record { return { ...this.byTask }; diff --git a/src/config/schema.ts b/src/config/schema.ts index f286053..3ab370a 100644 --- a/src/config/schema.ts +++ b/src/config/schema.ts @@ -49,20 +49,20 @@ export const MigrationConfigSchema = z.object({ */ buildConcurrency: z.number().int().min(0).max(10).default(1), /** - * Maximum number of concurrent test-writer suites in Phase 7 fan-out. + * Maximum number of concurrent test-writer suites in Phase 6 fan-out. * Controls how many E2E test suites can be generated in parallel, - * separate from the general Phase 5 parallelism (`maxParallelAgents`). + * separate from the general Phase 4 parallelism (`maxParallelAgents`). * When omitted, defaults to the value of `maxParallelAgents`. */ maxE2eSuiteConcurrency: z.number().int().min(1).max(10).optional(), /** - * Phase 5 execution strategy. + * Phase 4 execution strategy. * - `per-task`: existing behavior (migrate + validate task-by-task). * - `wave-barrier`: migrate in waves, then validate between waves. */ executionMode: z.enum(['per-task', 'wave-barrier']).default('per-task'), /** - * Controls for Phase 5 wave/barrier execution mode. + * Controls for Phase 4 wave/barrier execution mode. * These values are ignored in `per-task` mode. */ waveControl: z.object({ @@ -74,11 +74,11 @@ export const MigrationConfigSchema = z.object({ * Whether to continue executing independent tasks when one is blocked. * When `true` (default), the orchestrator skips blocked tasks and their * dependents, continuing with any remaining ready tasks. - * When `false`, Phase 5 halts on the first blocked task. + * When `false`, Phase 4 halts on the first blocked task. */ continueOnBlocked: z.boolean().default(true), /** - * Maximum number of blocked tasks before Phase 5 is halted. + * Maximum number of blocked tasks before Phase 4 is halted. * Only applies when `continueOnBlocked` is `true`. Default: unlimited (0). */ maxBlockedTasks: z.number().int().min(0).default(1), @@ -93,9 +93,9 @@ export const MigrationConfigSchema = z.object({ maxInfraRetries: z.number().int().min(0).max(10).default(3), /** - * Options for the optional idiomatic refactor phase (Phase 8). + * Options for the optional idiomatic refactor phase (Phase 7). * When enabled, the idiomatic-reviewer and idiomatic-refactorer agents - * run after Phase 7 to improve code idiomaticness. + * run after Phase 6 to improve code idiomaticness. */ idiomaticRefactor: z.object({ enabled: z.boolean().default(false), @@ -200,7 +200,7 @@ export const MigrationConfigSchema = z.object({ * * When enabled, AAMF ensures `target.outputPath` is a Git repository and * creates granular commits during migration (per code-modifying agent and - * per completed Phase 5 task). + * per completed Phase 4 task). */ git: z.object({ /** Enable automatic git init/add/commit operations. */ @@ -209,7 +209,7 @@ export const MigrationConfigSchema = z.object({ autoInit: z.boolean().default(true), /** Commit after successful code-modifying agent invocations. */ commitByAgent: z.boolean().default(true), - /** Commit after each successfully completed Phase 5 task. */ + /** Commit after each successfully completed Phase 4 task. */ commitPerTask: z.boolean().default(true), /** Allow empty git commits for task-level markers when no files changed. */ allowEmptyTaskCommits: z.boolean().default(true), diff --git a/src/core/checkpoint.ts b/src/core/checkpoint.ts index 7a5d2e2..d5cde62 100644 --- a/src/core/checkpoint.ts +++ b/src/core/checkpoint.ts @@ -16,24 +16,24 @@ export interface Phase4TaskSubstepState { lastSuccessfulStep?: string; } -export interface Phase5Cursor { +export interface Phase4Cursor { tasks: Record; } -export interface Phase6Cursor { +export interface Phase5Cursor { iteration: number; fixIndex: number; lastSuccessfulStep?: string; hadUnresolvedFixes?: boolean; } -export interface Phase7Cursor { +export interface Phase6Cursor { completedAgents: string[]; completedSuites?: string[]; lastSuccessfulStep?: string; } -export interface Phase8Cursor { +export interface Phase7Cursor { iteration: number; issueIndex: number; currentFile?: string; @@ -41,10 +41,10 @@ export interface Phase8Cursor { } export interface PhaseCursorMap { + '4'?: Phase4Cursor; '5'?: Phase5Cursor; '6'?: Phase6Cursor; '7'?: Phase7Cursor; - '8'?: Phase8Cursor; } export interface CheckpointState { @@ -71,12 +71,12 @@ export interface CheckpointState { phase3aComplete?: boolean; /** True once the target-repo scaffold has been generated from compilation units. */ scaffoldComplete?: boolean; - completedPhase3Groups?: string[]; + completedPhase2Groups?: string[]; /** Number of JSONL metric records written; used to skip on resume. */ metricsCount: number; /** Source fingerprint from last successful Phase 0 build; used to skip re-indexing on resume. */ phase0Fingerprint?: string; - /** Terminal Phase 5 exhaustion metadata, when execution stopped fail-fast. */ + /** Terminal Phase 4 exhaustion metadata, when execution stopped fail-fast. */ terminalExhaustion?: TerminalExhaustionState; /** Persisted waiver records for adjudicated false-positive findings. */ adjudicationWaivers?: AdjudicationWaiverRecord[]; @@ -86,8 +86,8 @@ export interface CheckpointState { phaseCursors?: PhaseCursorMap; /** Top-level flow checkpoint snapshot (managed by AamfFlowCheckpointAdapter). */ __flowCheckpoint?: unknown; - /** Phase 5 nested flow checkpoint snapshot (managed by Phase5CheckpointAdapter). */ - __phase5FlowCheckpoint?: unknown; + /** Phase 4 nested flow checkpoint snapshot (managed by Phase4CheckpointAdapter). */ + __phase4FlowCheckpoint?: unknown; } export interface CheckpointFailedTask { @@ -234,12 +234,12 @@ export class CheckpointManager { // Remove from blocked if it was there state.blockedTasks = state.blockedTasks.filter(id => id !== taskId); state.phaseCursors ??= {}; - state.phaseCursors['5'] ??= { tasks: {} }; - state.phaseCursors['5'].tasks[taskId] ??= { completedSubsteps: [] }; - if (!state.phaseCursors['5'].tasks[taskId].completedSubsteps.includes('completed')) { - state.phaseCursors['5'].tasks[taskId].completedSubsteps.push('completed'); + state.phaseCursors['4'] ??= { tasks: {} }; + state.phaseCursors['4'].tasks[taskId] ??= { completedSubsteps: [] }; + if (!state.phaseCursors['4'].tasks[taskId].completedSubsteps.includes('completed')) { + state.phaseCursors['4'].tasks[taskId].completedSubsteps.push('completed'); } - state.phaseCursors['5'].tasks[taskId].lastSuccessfulStep = 'completed'; + state.phaseCursors['4'].tasks[taskId].lastSuccessfulStep = 'completed'; await this.save(state); } @@ -305,7 +305,7 @@ export class CheckpointManager { await this.save(state); } - /** Persist terminal exhaustion metadata for fail-fast Phase 5 exits. */ + /** Persist terminal exhaustion metadata for fail-fast Phase 4 exits. */ async setTerminalExhaustion(terminalExhaustion: TerminalExhaustionState): Promise { const state = this.getState(); state.terminalExhaustion = terminalExhaustion; @@ -313,13 +313,13 @@ export class CheckpointManager { } /** - * Mark Phase 4 migration strategy (migration-planner) as complete. + * Mark Phase 3 migration strategy (migration-planner) as complete. * Subsequent resumes will skip re-running the migration-planner. */ async completePhase3a(): Promise { const state = this.getState(); state.phase3aComplete = true; - state.completedPhase3Groups ??= []; + state.completedPhase2Groups ??= []; await this.save(state); } @@ -336,9 +336,9 @@ export class CheckpointManager { /** Record that a specific module group finished successfully. On resume, completed groups are skipped. */ async completePhase3Group(groupId: string): Promise { const state = this.getState(); - state.completedPhase3Groups ??= []; - if (!state.completedPhase3Groups.includes(groupId)) { - state.completedPhase3Groups.push(groupId); + state.completedPhase2Groups ??= []; + if (!state.completedPhase2Groups.includes(groupId)) { + state.completedPhase2Groups.push(groupId); } await this.save(state); } @@ -383,7 +383,7 @@ export class CheckpointManager { completedTaskDurationsMs: [], phase3aComplete: false, scaffoldComplete: false, - completedPhase3Groups: [], + completedPhase2Groups: [], metricsCount: 0, terminalExhaustion: undefined, adjudicationWaivers: [], @@ -397,20 +397,20 @@ export class CheckpointManager { state.completedTaskDurationsMs ??= []; state.phase3aComplete ??= false; state.scaffoldComplete ??= false; - state.completedPhase3Groups ??= []; + state.completedPhase2Groups ??= []; state.metricsCount ??= 0; state.phase0Fingerprint ??= undefined; state.terminalExhaustion ??= undefined; state.adjudicationWaivers ??= []; state.adjudicationEvents ??= []; state.phaseCursors ??= {}; - state.phaseCursors['5'] ??= { tasks: {} }; - state.phaseCursors['6'] ??= { iteration: 0, fixIndex: 0 }; - state.phaseCursors['7'] ??= { completedAgents: [] }; - state.phaseCursors['8'] ??= { iteration: 0, issueIndex: 0 }; - state.phaseCursors['5'].tasks ??= {}; - state.phaseCursors['7'].completedAgents ??= []; - state.phaseCursors['7'].completedSuites ??= []; + state.phaseCursors['4'] ??= { tasks: {} }; + state.phaseCursors['5'] ??= { iteration: 0, fixIndex: 0 }; + state.phaseCursors['6'] ??= { completedAgents: [] }; + state.phaseCursors['7'] ??= { iteration: 0, issueIndex: 0 }; + state.phaseCursors['4'].tasks ??= {}; + state.phaseCursors['6'].completedAgents ??= []; + state.phaseCursors['6'].completedSuites ??= []; } private async resolveCheckpointReadPath(): Promise { diff --git a/src/core/kb-server-process.ts b/src/core/kb-server-process.ts index c7a5018..e812c2d 100644 --- a/src/core/kb-server-process.ts +++ b/src/core/kb-server-process.ts @@ -112,7 +112,7 @@ export class KbServerProcess { const transport = new StreamableHTTPServerTransport({ sessionIdGenerator: () => randomUUID(), }); - const mcpServer = createLoreMcpServer(db, dbPath, embedder, serverOptions); + const mcpServer = await createLoreMcpServer(db, dbPath, embedder, serverOptions); await mcpServer.connect(transport); // Store session once the transport assigns an ID. diff --git a/src/core/progress.ts b/src/core/progress.ts index b7a13e1..57cde27 100644 --- a/src/core/progress.ts +++ b/src/core/progress.ts @@ -45,12 +45,12 @@ export class ProgressWriter { this.retryTargets = []; this.terminalExhaustion = undefined; this.adjudicationEvents = []; - this.phases.set(1, { name: 'Knowledge Base Construction', status: 'pending' }); - this.phases.set(2, { name: 'Migration Planning', status: 'pending' }); - this.phases.set(3, { name: 'Iterative Migration', status: 'pending' }); - this.phases.set(4, { name: 'Final Parity Verification', status: 'pending' }); - this.phases.set(5, { name: 'E2E Testing & Documentation', status: 'pending' }); - this.phases.set(6, { name: 'Completion', status: 'pending' }); + this.phases.set(2, { name: 'Knowledge Base Construction', status: 'pending' }); + this.phases.set(3, { name: 'Migration Planning', status: 'pending' }); + this.phases.set(4, { name: 'Iterative Migration', status: 'pending' }); + this.phases.set(5, { name: 'Final Parity Verification', status: 'pending' }); + this.phases.set(6, { name: 'E2E Testing & Documentation', status: 'pending' }); + this.phases.set(8, { name: 'Completion', status: 'pending' }); await this.write(config.projectName); } @@ -58,10 +58,14 @@ export class ProgressWriter { /** Reconstruct progress state from a checkpoint (used on resume). */ reconstructFromCheckpoint(state: CheckpointState): void { // Ensure phase definitions exist - const phaseNames = ['Knowledge Base Construction', 'Migration Planning', 'Iterative Migration', 'Final Parity Verification', 'E2E Testing & Documentation', 'Completion']; - for (let i = 0; i < phaseNames.length; i++) { - if (!this.phases.has(i + 1)) { - this.phases.set(i + 1, { name: phaseNames[i]!, status: 'pending' }); + const phaseNames: [number, string][] = [ + [2, 'Knowledge Base Construction'], [3, 'Migration Planning'], + [4, 'Iterative Migration'], [5, 'Final Parity Verification'], + [6, 'E2E Testing & Documentation'], [8, 'Completion'], + ]; + for (const [id, name] of phaseNames) { + if (!this.phases.has(id)) { + this.phases.set(id, { name, status: 'pending' }); } } @@ -72,7 +76,7 @@ export class ProgressWriter { } // Mark current phase as in-progress - if (state.currentPhase <= 7) { + if (state.currentPhase <= 8) { const current = this.phases.get(state.currentPhase); if (current) current.status = 'in-progress'; } @@ -125,7 +129,7 @@ export class ProgressWriter { this.totalTasks = count; } - /** Update task progress within Phase 5 */ + /** Update task progress within Phase 4 */ async updateTask(taskId: string, status: string, details?: TaskDetails): Promise { this.tasks.set(taskId, { status, details }); await this.writeCurrentState(); @@ -267,7 +271,7 @@ export class ProgressWriter { md += '\n'; } - // Task progress bar (Phase 5) + // Task progress bar (Phase 4) if (this.totalTasks > 0) { md += `## Task Progress\n\n`; const migratedTasks = [...this.tasks.values()].filter(t => t.status === 'migrated').length; diff --git a/src/execution/task-queue.ts b/src/execution/task-queue.ts index ff3a913..ebadf02 100644 --- a/src/execution/task-queue.ts +++ b/src/execution/task-queue.ts @@ -15,7 +15,7 @@ export interface PipelinedTaskResult { } /** - * Dependency-aware task queue for Phase 5 execution. + * Dependency-aware task queue for Phase 4 execution. * * Tracks task completion and blocked status, and determines which tasks * are ready based on dependency satisfaction. Supports checkpoint resume diff --git a/src/flow/checkpoint-adapter.ts b/src/flow/checkpoint-adapter.ts index 47ec07e..7ebb3b5 100644 --- a/src/flow/checkpoint-adapter.ts +++ b/src/flow/checkpoint-adapter.ts @@ -50,15 +50,15 @@ export class AamfFlowCheckpointAdapter implements FlowCheckpointAdapter { +export class Phase4CheckpointAdapter implements FlowCheckpointAdapter { constructor(private readonly checkpoint: CheckpointManager) {} async load(flowId: string): Promise | null> { const state = this.checkpoint.getState(); - const stored = state.__phase5FlowCheckpoint; + const stored = state.__phase4FlowCheckpoint; if (!stored || typeof stored !== 'object') return null; const snapshot = stored as FlowCheckpointSnapshot; if (snapshot.flowId !== flowId) return null; @@ -77,7 +77,7 @@ export class Phase5CheckpointAdapter implements FlowCheckpointAdapter; /** Cumulative projected escalation cost (USD) */ escalationCostUsd: number; - /** Phase 5 observability counters */ - phase5Snapshot?: Phase4MetricsSnapshot; + /** Phase 4 observability counters */ + phase4Snapshot?: Phase4MetricsSnapshot; /** When true, per-task/per-agent git commits are suppressed (wave-barrier mode) */ deferGitCommits: boolean; } diff --git a/src/flow/index.ts b/src/flow/index.ts index 5e47130..bade08c 100644 --- a/src/flow/index.ts +++ b/src/flow/index.ts @@ -5,7 +5,7 @@ */ export { migrationFlow, buildFlowUpToPhase, nodeIdToPhase } from './migration-flow.js'; -export { AamfFlowCheckpointAdapter, Phase5CheckpointAdapter } from './checkpoint-adapter.js'; +export { AamfFlowCheckpointAdapter, Phase4CheckpointAdapter } from './checkpoint-adapter.js'; export type { MigrationFlowContext } from './context.js'; export type { TaskGraphOutput } from './steps/task-graph.js'; export type { diff --git a/src/flow/migration-flow.ts b/src/flow/migration-flow.ts index 817a610..d799a36 100644 --- a/src/flow/migration-flow.ts +++ b/src/flow/migration-flow.ts @@ -60,13 +60,13 @@ function budgetOk(ctx: { context: MigrationFlowContext }): boolean { * * Phase 0 → KB Indexing (deterministic) * Phase 1 → Task Graph Construction (deterministic) - * Phase 3 → Knowledge Base Construction → budget gate - * Phase 4 → Migration Strategy → budget gate - * Phase 5 → Iterative Migration → budget gate - * Phase 6 → Final Parity (loop: check → fix, until no fixes or max 3) - * Phase 7 → E2E Testing & Documentation (parallel: suites + docs) - * Phase 8 → Idiomatic Refactor (conditional → loop: review → refactor) - * Phase 9 → Completion + * Phase 2 → Knowledge Base Construction → budget gate + * Phase 3 → Migration Strategy → budget gate + * Phase 4 → Iterative Migration → budget gate + * Phase 5 → Final Parity (loop: check → fix, until no fixes or max 3) + * Phase 6 → E2E Testing & Documentation (parallel: suites + docs) + * Phase 7 → Idiomatic Refactor (conditional → loop: review → refactor) + * Phase 8 → Completion */ export const migrationFlow: FlowDefinition = defineFlow( 'aamf-migration', @@ -84,47 +84,47 @@ export const migrationFlow: FlowDefinition = defineFlow({ id: 'kb-construction', dependsOn: ['task-graph-construction'], run: launchKnowledgeBuilder, }), gate({ - id: 'budget-check-3', + id: 'budget-check-2', dependsOn: ['kb-construction'], evaluate: budgetOk, }), - // ── Phase 4 — Migration Strategy + budget gate ── + // ── Phase 3 — Migration Strategy + budget gate ── step({ id: 'migration-planning', - dependsOn: ['budget-check-3'], + dependsOn: ['budget-check-2'], run: launchMigrationPlanner, }), gate({ - id: 'budget-check-4', + id: 'budget-check-3', dependsOn: ['migration-planning'], evaluate: budgetOk, }), - // ── Phase 5 — Iterative Migration + budget gate ── + // ── Phase 4 — Iterative Migration + budget gate ── step({ id: 'iterative-migration', - dependsOn: ['budget-check-4'], + dependsOn: ['budget-check-3'], input: fromStep('task-graph-construction'), run: (ctx, input) => executeIterativeMigration(ctx, input as TaskGraphOutput | undefined), }), gate({ - id: 'budget-check-5', + id: 'budget-check-4', dependsOn: ['iterative-migration'], evaluate: budgetOk, }), - // ── Phase 6 — Final Parity Verification (loopback) ── + // ── Phase 5 — Final Parity Verification (loopback) ── loop({ id: 'final-parity-loop', - dependsOn: ['budget-check-5'], + dependsOn: ['budget-check-4'], maxIterations: 3, do: [ step({ @@ -135,7 +135,7 @@ export const migrationFlow: FlowDefinition = defineFlow({ id: 'e2e-test-plan', dependsOn: ['final-parity-loop'], @@ -160,7 +160,7 @@ export const migrationFlow: FlowDefinition = defineFlow({ id: 'idiomatic-refactor-gate', dependsOn: ['finalization'], @@ -180,14 +180,14 @@ export const migrationFlow: FlowDefinition = defineFlow({ id: 'completion', dependsOn: ['idiomatic-refactor-gate'], run: finalizeAndReport, }), ], - 'AAMF migration pipeline — 9 phases (0-1, 3-9) from KB indexing through completion', + 'AAMF migration pipeline — 9 phases (0-8) from KB indexing through completion', ); /** @@ -197,22 +197,22 @@ export function nodeIdToPhase(nodeId: string): number { const map: Record = { 'kb-index': 0, 'task-graph-construction': 1, - 'kb-construction': 3, + 'kb-construction': 2, + 'budget-check-2': 2, + 'migration-planning': 3, 'budget-check-3': 3, - 'migration-planning': 4, + 'iterative-migration': 4, 'budget-check-4': 4, - 'iterative-migration': 5, - 'budget-check-5': 5, - 'final-parity-loop': 6, - 'final-parity-iteration': 6, - 'e2e-test-plan': 7, - 'finalization': 7, - 'e2e-suite-writers': 7, - 'documentation-writer': 7, - 'idiomatic-refactor-gate': 8, - 'idiomatic-loop': 8, - 'idiomatic-iteration': 8, - 'completion': 9, + 'final-parity-loop': 5, + 'final-parity-iteration': 5, + 'e2e-test-plan': 6, + 'finalization': 6, + 'e2e-suite-writers': 6, + 'documentation-writer': 6, + 'idiomatic-refactor-gate': 7, + 'idiomatic-loop': 7, + 'idiomatic-iteration': 7, + 'completion': 8, }; return map[nodeId] ?? -1; } @@ -221,17 +221,16 @@ export function nodeIdToPhase(nodeId: string): number { * Ordered list of phase boundary node IDs for `--phase` filtering. * Each entry is the last top-level node ID belonging to that phase. */ -const PHASE_BOUNDARY_NODE_IDS: readonly (string | undefined)[] = [ +const PHASE_BOUNDARY_NODE_IDS: readonly string[] = [ 'kb-index', // Phase 0 'task-graph-construction', // Phase 1 - undefined, // Phase 2 (removed — impact assessor) + 'budget-check-2', // Phase 2 'budget-check-3', // Phase 3 'budget-check-4', // Phase 4 - 'budget-check-5', // Phase 5 - 'final-parity-loop', // Phase 6 - 'finalization', // Phase 7 - 'idiomatic-refactor-gate', // Phase 8 - 'completion', // Phase 9 + 'final-parity-loop', // Phase 5 + 'finalization', // Phase 6 + 'idiomatic-refactor-gate', // Phase 7 + 'completion', // Phase 8 ]; /** @@ -239,7 +238,7 @@ const PHASE_BOUNDARY_NODE_IDS: readonly (string | undefined)[] = [ * Used to implement `--phase N` (run/resume up to and including phase N). */ export function buildFlowUpToPhase(maxPhase: number): FlowDefinition { - if (maxPhase >= 9) return migrationFlow; + if (maxPhase >= 8) return migrationFlow; const lastNodeId = PHASE_BOUNDARY_NODE_IDS[maxPhase]; if (!lastNodeId) return migrationFlow; diff --git a/src/flow/steps/completion.ts b/src/flow/steps/completion.ts index 912a558..dd40080 100644 --- a/src/flow/steps/completion.ts +++ b/src/flow/steps/completion.ts @@ -1,5 +1,5 @@ /** - * Phase 9 — Completion + * Phase 8 — Completion */ import type { FlowExecutionContext } from '@cadre-dev/framework/flow'; @@ -13,5 +13,5 @@ export async function finalizeAndReport( const start = Date.now(); await ctx.progress.appendEvent('Migration pipeline complete — finalizing'); ctx.logger.info('All phases complete'); - return { phase: 9, name: 'Completion', success: true, outputPath: ctx.paths.root, duration: Date.now() - start }; + return { phase: 8, name: 'Completion', success: true, outputPath: ctx.paths.root, duration: Date.now() - start }; } diff --git a/src/flow/steps/final-parity.ts b/src/flow/steps/final-parity.ts index 92dd5e3..73f7b37 100644 --- a/src/flow/steps/final-parity.ts +++ b/src/flow/steps/final-parity.ts @@ -1,5 +1,5 @@ /** - * Phase 6 — Final Parity Verification + * Phase 5 — Final Parity Verification * * Exported as a single-iteration step for the `loop()` DSL node in * migration-flow.ts. Each iteration runs final-parity-checker, applies @@ -14,7 +14,7 @@ import { toAgentRemediationContext } from '../../agents/types.js'; import { buildInvocation, launchAgentWithEvents, recordTokens, commitForAgent, buildRemediationContext, - getPhase6Cursor, savePhase6Cursor, + getPhase5Cursor, savePhase5Cursor, assertPhaseSuccess, } from './shared.js'; @@ -26,17 +26,17 @@ export async function runFinalParityIteration( flowCtx: FlowExecutionContext, ): Promise<{ fixes: number }> { const ctx = flowCtx.context; - const phase6Cursor = getPhase6Cursor(ctx); + const phase5Cursor = getPhase5Cursor(ctx); // Run final-parity-checker - const ctxFile = await ctx.contextBuilder.buildContext('final-parity-checker', 6); - const inv = buildInvocation(ctx, 'final-parity-checker', ctxFile, 6); + const ctxFile = await ctx.contextBuilder.buildContext('final-parity-checker', 5); + const inv = buildInvocation(ctx, 'final-parity-checker', ctxFile, 5); const result = await launchAgentWithEvents(ctx, inv); - recordTokens(ctx, result, 6); + recordTokens(ctx, result, 5); if (!result.success) { const failResult: PhaseResult = { - phase: 6, name: 'Final Parity Verification', success: false, + phase: 5, name: 'Final Parity Verification', success: false, duration: 0, error: result.error, exitCode: result.exitCode ?? undefined, stderr: result.stderr, }; assertPhaseSuccess(failResult); @@ -49,7 +49,7 @@ export async function runFinalParityIteration( } else { ctx.logger.warn('Final-parity-checker structured output unavailable'); const failResult: PhaseResult = { - phase: 6, name: 'Final Parity Verification', success: false, + phase: 5, name: 'Final Parity Verification', success: false, duration: 0, error: 'No structured output with fixes array', }; assertPhaseSuccess(failResult); @@ -58,8 +58,8 @@ export async function runFinalParityIteration( if (fixes.length === 0) { ctx.logger.info('Final parity check passed — no fixes needed'); - await savePhase6Cursor(ctx, { - iteration: phase6Cursor.iteration + 1, fixIndex: 0, + await savePhase5Cursor(ctx, { + iteration: phase5Cursor.iteration + 1, fixIndex: 0, lastSuccessfulStep: 'no-fixes', }); return { fixes: 0 }; @@ -68,12 +68,12 @@ export async function runFinalParityIteration( ctx.logger.info(`Final parity found ${fixes.length} issue(s), applying fixes`); // Apply fixes - const resumeFixIndex = Math.max(0, phase6Cursor.fixIndex); + const resumeFixIndex = Math.max(0, phase5Cursor.fixIndex); for (let fixIndex = resumeFixIndex; fixIndex < fixes.length; fixIndex++) { const fix = fixes[fixIndex]!; - const fixTaskId = `fix-${phase6Cursor.iteration}-${fixIndex}`; - await savePhase6Cursor(ctx, { - iteration: phase6Cursor.iteration, fixIndex, + const fixTaskId = `fix-${phase5Cursor.iteration}-${fixIndex}`; + await savePhase5Cursor(ctx, { + iteration: phase5Cursor.iteration, fixIndex, lastSuccessfulStep: 'fix-started', }); @@ -87,7 +87,7 @@ export async function runFinalParityIteration( expectedSuccessCondition: `Parity issue resolved: ${fix.description}`, }); - const fixCtx = await ctx.contextBuilder.buildContext('code-migrator', 5, fixTaskId, { + const fixCtx = await ctx.contextBuilder.buildContext('code-migrator', 4, fixTaskId, { sourceFiles: fix.sourceFile ? [fix.sourceFile] : [], targetFiles: fix.targetFile ? [fix.targetFile] : [], taskScope: { @@ -97,21 +97,21 @@ export async function runFinalParityIteration( }, remediationContext: toAgentRemediationContext(fixRemediation), }); - const fixInv = buildInvocation(ctx, 'code-migrator', fixCtx, 6, fixTaskId); + const fixInv = buildInvocation(ctx, 'code-migrator', fixCtx, 5, fixTaskId); const fixResult = await launchAgentWithEvents(ctx, fixInv); - recordTokens(ctx, fixResult, 6); + recordTokens(ctx, fixResult, 5); if (fixResult.success) { - await commitForAgent(ctx, 'code-migrator', 6, fixTaskId); - await savePhase6Cursor(ctx, { - iteration: phase6Cursor.iteration, fixIndex: fixIndex + 1, + await commitForAgent(ctx, 'code-migrator', 5, fixTaskId); + await savePhase5Cursor(ctx, { + iteration: phase5Cursor.iteration, fixIndex: fixIndex + 1, lastSuccessfulStep: 'fix-applied', }); } } - await savePhase6Cursor(ctx, { - iteration: phase6Cursor.iteration + 1, fixIndex: 0, + await savePhase5Cursor(ctx, { + iteration: phase5Cursor.iteration + 1, fixIndex: 0, lastSuccessfulStep: 'iteration-complete', }); @@ -119,7 +119,7 @@ export async function runFinalParityIteration( } /** - * `until` predicate for the Phase 6 loop — returns true when no fixes + * `until` predicate for the Phase 5 loop — returns true when no fixes * were found in the last iteration (the loop node output). */ export function noFixesNeeded( diff --git a/src/flow/steps/finalization.ts b/src/flow/steps/finalization.ts index dd65e0a..b25f81a 100644 --- a/src/flow/steps/finalization.ts +++ b/src/flow/steps/finalization.ts @@ -1,5 +1,5 @@ /** - * Phase 7 — E2E Testing & Documentation + * Phase 6 — E2E Testing & Documentation * * Split into three exported functions for the flow DSL: * - launchE2eTestCrafter: step() — creates e2e-test-plan.md @@ -18,7 +18,7 @@ import { fileExists } from '../../util/fs.js'; import { buildInvocation, launchAgentWithEvents, recordTokens, commitForAgent, isGitAutomationEnabled, - getPhase7Cursor, savePhase7Cursor, + getPhase6Cursor, savePhase6Cursor, assertPhaseSuccess, } from './shared.js'; @@ -28,28 +28,28 @@ export async function launchE2eTestCrafter( flowCtx: FlowExecutionContext, ): Promise { const ctx = flowCtx.context; - const phase7Cursor = getPhase7Cursor(ctx); - const completedAgents = new Set(phase7Cursor.completedAgents); + const phase6Cursor = getPhase6Cursor(ctx); + const completedAgents = new Set(phase6Cursor.completedAgents); if (completedAgents.has('e2e-test-crafter')) { return { agent: 'e2e-test-crafter', workItemId: '', exitCode: 0, success: true, timedOut: false, duration: 0, stdout: '', stderr: '', tokenUsage: null, outputPath: '', outputExists: false, extensions: {} }; } const e2eCtx = await ctx.contextBuilder.buildContext('e2e-test-crafter', 6, undefined, { planOnly: true }); - const crafterResult = await launchAgentWithEvents(ctx, buildInvocation(ctx, 'e2e-test-crafter', e2eCtx, 7)); - recordTokens(ctx, crafterResult, 7); + const crafterResult = await launchAgentWithEvents(ctx, buildInvocation(ctx, 'e2e-test-crafter', e2eCtx, 6)); + recordTokens(ctx, crafterResult, 6); if (crafterResult.success) { - if (isGitAutomationEnabled(ctx)) await commitForAgent(ctx, 'e2e-test-crafter', 7); + if (isGitAutomationEnabled(ctx)) await commitForAgent(ctx, 'e2e-test-crafter', 6); completedAgents.add('e2e-test-crafter'); - await savePhase7Cursor(ctx, { + await savePhase6Cursor(ctx, { completedAgents: Array.from(completedAgents), - completedSuites: Array.from(getPhase7Cursor(ctx).completedSuites), + completedSuites: Array.from(getPhase6Cursor(ctx).completedSuites), lastSuccessfulStep: 'completed-e2e-test-crafter', }); } else { assertPhaseSuccess({ - phase: 7, name: 'E2E Testing & Documentation', success: false, + phase: 6, name: 'E2E Testing & Documentation', success: false, duration: 0, error: crafterResult.error ?? 'e2e-test-crafter failed', }); } @@ -63,9 +63,9 @@ export async function launchE2eSuiteWriters( flowCtx: FlowExecutionContext, ): Promise { const ctx = flowCtx.context; - const phase7Cursor = getPhase7Cursor(ctx); - const completedAgents = new Set(phase7Cursor.completedAgents); - const completedSuites = new Set(phase7Cursor.completedSuites); + const phase6Cursor = getPhase6Cursor(ctx); + const completedAgents = new Set(phase6Cursor.completedAgents); + const completedSuites = new Set(phase6Cursor.completedSuites); const results: AgentResult[] = []; const planPath = join(ctx.config.target.outputPath, 'e2e', 'e2e-test-plan.md'); @@ -103,7 +103,7 @@ export async function launchE2eSuiteWriters( if (!allSuccess) { const errors = results.filter(r => !r.success).map(r => r.error); assertPhaseSuccess({ - phase: 7, name: 'E2E Testing & Documentation', success: false, + phase: 6, name: 'E2E Testing & Documentation', success: false, duration: 0, error: errors.join('; '), }); } @@ -117,28 +117,28 @@ export async function launchDocWriter( flowCtx: FlowExecutionContext, ): Promise { const ctx = flowCtx.context; - const phase7Cursor = getPhase7Cursor(ctx); - const completedAgents = new Set(phase7Cursor.completedAgents); + const phase6Cursor = getPhase6Cursor(ctx); + const completedAgents = new Set(phase6Cursor.completedAgents); if (completedAgents.has('documentation-writer')) { return { agent: 'documentation-writer', workItemId: '', exitCode: 0, success: true, timedOut: false, duration: 0, stdout: '', stderr: '', tokenUsage: null, outputPath: '', outputExists: false, extensions: {} }; } - const docCtx = await ctx.contextBuilder.buildContext('documentation-writer', 7); - const docResult = await launchAgentWithEvents(ctx, buildInvocation(ctx, 'documentation-writer', docCtx, 7)); - recordTokens(ctx, docResult, 7); + const docCtx = await ctx.contextBuilder.buildContext('documentation-writer', 6); + const docResult = await launchAgentWithEvents(ctx, buildInvocation(ctx, 'documentation-writer', docCtx, 6)); + recordTokens(ctx, docResult, 6); if (docResult.success) { - if (isGitAutomationEnabled(ctx)) await commitForAgent(ctx, 'documentation-writer', 7); + if (isGitAutomationEnabled(ctx)) await commitForAgent(ctx, 'documentation-writer', 6); completedAgents.add('documentation-writer'); - await savePhase7Cursor(ctx, { + await savePhase6Cursor(ctx, { completedAgents: Array.from(completedAgents), - completedSuites: Array.from(getPhase7Cursor(ctx).completedSuites), + completedSuites: Array.from(getPhase6Cursor(ctx).completedSuites), lastSuccessfulStep: 'completed-documentation-writer', }); } else { assertPhaseSuccess({ - phase: 7, name: 'E2E Testing & Documentation', success: false, + phase: 6, name: 'E2E Testing & Documentation', success: false, duration: 0, error: docResult.error ?? 'documentation-writer failed', }); } @@ -158,14 +158,14 @@ async function executeSuiteWithRetry( const suiteCtx = await ctx.contextBuilder.buildContext('test-writer', 6, suite.id, { e2eSuiteBrief: suite }); const retryExec = new RetryExecutor(inv => launchAgentWithEvents(ctx, inv), ctx.logger); const suiteResult = await retryExec.executeWithRetry( - buildInvocation(ctx, 'test-writer', suiteCtx, 7, suite.id), + buildInvocation(ctx, 'test-writer', suiteCtx, 6, suite.id), { maxAttempts: ctx.config.options.maxRetriesPerTask }, ); - recordTokens(ctx, suiteResult, 7); + recordTokens(ctx, suiteResult, 6); if (suiteResult.success) { - if (isGitAutomationEnabled(ctx)) await commitForAgent(ctx, 'test-writer', 7, suite.id, suite.name); + if (isGitAutomationEnabled(ctx)) await commitForAgent(ctx, 'test-writer', 6, suite.id, suite.name); completedSuites.add(suite.id); - await savePhase7Cursor(ctx, { + await savePhase6Cursor(ctx, { completedAgents: Array.from(completedAgents), completedSuites: Array.from(completedSuites), lastSuccessfulStep: `completed-suite-${suite.id}`, @@ -194,7 +194,7 @@ async function executeParallelSuiteFanOut( const invocations = []; for (const suite of budgetFiltered) { const suiteCtx = await ctx.contextBuilder.buildContext('test-writer', 6, suite.id, { e2eSuiteBrief: suite }); - invocations.push(buildInvocation(ctx, 'test-writer', suiteCtx, 7, suite.id)); + invocations.push(buildInvocation(ctx, 'test-writer', suiteCtx, 6, suite.id)); } const retryExec = new RetryExecutor(inv => launchAgentWithEvents(ctx, inv), ctx.logger); const parallel = new ParallelExecutor( @@ -208,10 +208,10 @@ async function executeParallelSuiteFanOut( const suite = budgetFiltered[i]!; const result = parallelResults[i]!; results.push(result); - recordTokens(ctx, result, 7); + recordTokens(ctx, result, 6); if (result.success) completedSuites.add(suite.id); } - await savePhase7Cursor(ctx, { + await savePhase6Cursor(ctx, { completedAgents: Array.from(completedAgents), completedSuites: Array.from(completedSuites), lastSuccessfulStep: completedSuites.size === allSuites.length diff --git a/src/flow/steps/idiomatic-refactor.ts b/src/flow/steps/idiomatic-refactor.ts index d5d15f5..c306f28 100644 --- a/src/flow/steps/idiomatic-refactor.ts +++ b/src/flow/steps/idiomatic-refactor.ts @@ -1,5 +1,5 @@ /** - * Phase 8 — Idiomatic Refactor + * Phase 7 — Idiomatic Refactor * * Exported as a single-iteration step for the `loop()` DSL node. * Each iteration runs idiomatic-reviewer, then idiomatic-refactorer for @@ -12,7 +12,7 @@ import type { PhaseResult } from '../../agents/types.js'; import { buildInvocation, launchAgentWithEvents, recordTokens, commitForAgent, runCommand, - getPhase8Cursor, savePhase8Cursor, + getPhase7Cursor, savePhase7Cursor, assertPhaseSuccess, } from './shared.js'; @@ -24,18 +24,18 @@ export async function runIdiomaticReviewIteration( flowCtx: FlowExecutionContext, ): Promise<{ issues: number }> { const ctx = flowCtx.context; - const phase8Cursor = getPhase8Cursor(ctx); + const phase7Cursor = getPhase7Cursor(ctx); const start = Date.now(); // Review - const reviewCtx = await ctx.contextBuilder.buildContext('idiomatic-reviewer', 8); - const reviewInv = buildInvocation(ctx, 'idiomatic-reviewer', reviewCtx, 8); + const reviewCtx = await ctx.contextBuilder.buildContext('idiomatic-reviewer', 7); + const reviewInv = buildInvocation(ctx, 'idiomatic-reviewer', reviewCtx, 7); const reviewResult = await launchAgentWithEvents(ctx, reviewInv); - recordTokens(ctx, reviewResult, 8); + recordTokens(ctx, reviewResult, 7); if (!reviewResult.success) { const failResult: PhaseResult = { - phase: 8, name: 'Idiomatic Refactor', success: false, + phase: 7, name: 'Idiomatic Refactor', success: false, duration: Date.now() - start, error: reviewResult.error, exitCode: reviewResult.exitCode ?? undefined, stderr: reviewResult.stderr, }; @@ -48,7 +48,7 @@ export async function runIdiomaticReviewIteration( } else { ctx.logger.warn('Idiomatic-reviewer structured output unavailable'); const failResult: PhaseResult = { - phase: 8, name: 'Idiomatic Refactor', success: false, + phase: 7, name: 'Idiomatic Refactor', success: false, duration: Date.now() - start, error: 'No structured issues output', }; assertPhaseSuccess(failResult); @@ -57,55 +57,55 @@ export async function runIdiomaticReviewIteration( if (issues.length === 0) { ctx.logger.info('Idiomatic review found no issues'); - await savePhase8Cursor(ctx, { - iteration: phase8Cursor.iteration + 1, issueIndex: 0, + await savePhase7Cursor(ctx, { + iteration: phase7Cursor.iteration + 1, issueIndex: 0, lastSuccessfulStep: 'no-issues', }); return { issues: 0 }; } ctx.logger.info(`Idiomatic review found ${issues.length} issue(s), refactoring`); - const resumeIssueIndex = Math.max(0, phase8Cursor.issueIndex); + const resumeIssueIndex = Math.max(0, phase7Cursor.issueIndex); for (let issueIndex = resumeIssueIndex; issueIndex < issues.length; issueIndex++) { const issue = issues[issueIndex]!; - await savePhase8Cursor(ctx, { - iteration: phase8Cursor.iteration, issueIndex, + await savePhase7Cursor(ctx, { + iteration: phase7Cursor.iteration, issueIndex, currentFile: issue.file, lastSuccessfulStep: 'refactor-started', }); - const refactorCtx = await ctx.contextBuilder.buildContext('idiomatic-refactorer', 8, undefined, { + const refactorCtx = await ctx.contextBuilder.buildContext('idiomatic-refactorer', 7, undefined, { targetFile: issue.file, issue, }); - const refactorInv = buildInvocation(ctx, 'idiomatic-refactorer', refactorCtx, 8); + const refactorInv = buildInvocation(ctx, 'idiomatic-refactorer', refactorCtx, 7); const refactorResult = await launchAgentWithEvents(ctx, refactorInv); - recordTokens(ctx, refactorResult, 8); + recordTokens(ctx, refactorResult, 7); if (refactorResult.success) { if (ctx.config.target.formatCommand) { - const fmtResult = await runCommand(ctx, 'format', ctx.config.target.formatCommand, `phase8-${issue.file}`); - if (!fmtResult.success) ctx.logger.warn(`Phase 8 format failed for ${issue.file}: ${fmtResult.error ?? 'unknown'}`); + const fmtResult = await runCommand(ctx, 'format', ctx.config.target.formatCommand, `phase7-${issue.file}`); + if (!fmtResult.success) ctx.logger.warn(`Phase 7 format failed for ${issue.file}: ${fmtResult.error ?? 'unknown'}`); } - await commitForAgent(ctx, 'idiomatic-refactorer', 8, issue.file); + await commitForAgent(ctx, 'idiomatic-refactorer', 7, issue.file); if (ctx.config.target.lintCommand) { - const lintResult = await runCommand(ctx, 'lint', ctx.config.target.lintCommand, `phase8-${issue.file}`); - if (!lintResult.success) ctx.logger.warn(`Phase 8 lint failed for ${issue.file}: ${lintResult.error ?? 'unknown'}`); + const lintResult = await runCommand(ctx, 'lint', ctx.config.target.lintCommand, `phase7-${issue.file}`); + if (!lintResult.success) ctx.logger.warn(`Phase 7 lint failed for ${issue.file}: ${lintResult.error ?? 'unknown'}`); } - await savePhase8Cursor(ctx, { - iteration: phase8Cursor.iteration, issueIndex: issueIndex + 1, + await savePhase7Cursor(ctx, { + iteration: phase7Cursor.iteration, issueIndex: issueIndex + 1, lastSuccessfulStep: 'refactor-complete', }); } else { const failResult: PhaseResult = { - phase: 8, name: 'Idiomatic Refactor', success: false, + phase: 7, name: 'Idiomatic Refactor', success: false, duration: Date.now() - start, }; assertPhaseSuccess(failResult); } } - await savePhase8Cursor(ctx, { - iteration: phase8Cursor.iteration + 1, issueIndex: 0, + await savePhase7Cursor(ctx, { + iteration: phase7Cursor.iteration + 1, issueIndex: 0, lastSuccessfulStep: 'iteration-complete', }); @@ -113,7 +113,7 @@ export async function runIdiomaticReviewIteration( } /** - * `until` predicate for the Phase 8 loop — returns true when no + * `until` predicate for the Phase 7 loop — returns true when no * idiomatic issues were found in the last iteration. */ export function noIdiomaticIssues( diff --git a/src/flow/steps/kb-construction.ts b/src/flow/steps/kb-construction.ts index 7a66b3a..5c1c5cf 100644 --- a/src/flow/steps/kb-construction.ts +++ b/src/flow/steps/kb-construction.ts @@ -1,5 +1,5 @@ /** - * Phase 3 — Knowledge Base Construction (agentic, single agent) + * Phase 2 — Knowledge Base Construction (agentic, single agent) */ import type { FlowExecutionContext } from '@cadre-dev/framework/flow'; @@ -15,24 +15,24 @@ export async function launchKnowledgeBuilder( const outputPath = ctx.paths.knowledgeBaseDir; const checkpointState = ctx.checkpoint.getState(); - if (checkpointState.completedPhases.includes(3)) { - ctx.logger.info('Phase 3 skipped on resume — knowledge base already built'); - return { phase: 3, name: 'Knowledge Base Construction', success: true, outputPath, duration: Date.now() - start }; + if (checkpointState.completedPhases.includes(2)) { + ctx.logger.info('Phase 2 skipped on resume — knowledge base already built'); + return { phase: 2, name: 'Knowledge Base Construction', success: true, outputPath, duration: Date.now() - start }; } - const kbContext = await ctx.contextBuilder.buildContext('knowledge-builder', 3); - const kbInv = buildInvocation(ctx, 'knowledge-builder', kbContext, 3); + const kbContext = await ctx.contextBuilder.buildContext('knowledge-builder', 2); + const kbInv = buildInvocation(ctx, 'knowledge-builder', kbContext, 2); const kbResult = await launchAgentWithEvents(ctx, kbInv); - recordTokens(ctx, kbResult, 3); + recordTokens(ctx, kbResult, 2); if (!kbResult.success) { const failResult: PhaseResult = { - phase: 3, name: 'Knowledge Base Construction', success: false, + phase: 2, name: 'Knowledge Base Construction', success: false, duration: Date.now() - start, error: kbResult.error, exitCode: kbResult.exitCode ?? undefined, stderr: kbResult.stderr, }; assertPhaseSuccess(failResult); } - return { phase: 3, name: 'Knowledge Base Construction', success: true, outputPath, duration: Date.now() - start }; + return { phase: 2, name: 'Knowledge Base Construction', success: true, outputPath, duration: Date.now() - start }; } diff --git a/src/flow/steps/migration.ts b/src/flow/steps/migration.ts index 4f33908..6fb84bb 100644 --- a/src/flow/steps/migration.ts +++ b/src/flow/steps/migration.ts @@ -1,5 +1,5 @@ /** - * Phase 5 — Iterative Migration + * Phase 4 — Iterative Migration * * The core migration loop, supporting both per-task and wave-barrier * execution modes. Extracted from orchestrator.ts (previously ~2500 lines). @@ -20,7 +20,7 @@ import { ParallelExecutor } from '../../execution/parallel-executor.js'; import { TaskQueue } from '../../execution/task-queue.js'; import { RetryExecutor } from '../../execution/retry.js'; import { CostEstimator } from '../../budget/cost-estimator.js'; -import { Phase5CheckpointAdapter } from '../checkpoint-adapter.js'; +import { Phase4CheckpointAdapter } from '../checkpoint-adapter.js'; import { fileExists, readJson, countFileLines } from '../../util/fs.js'; import { @@ -41,7 +41,7 @@ import { // ─── Substep Functions ─────────────────────────────────────────────── // Each function executes one logical substep within a per-task migration. -// The framework's checkpoint skip replaces the manual hasPhase5Substep guards. +// The framework's checkpoint skip replaces the manual hasPhase4Substep guards. async function runMigrateSubstep( ctx: MigrationFlowContext, task: MigrationTask, retryExec: RetryExecutor, @@ -423,7 +423,7 @@ function buildPerTaskFlow( })); } - return defineFlow('phase-5-per-task', nodes); + return defineFlow('phase-4-per-task', nodes); } /** @@ -449,7 +449,7 @@ function buildWaveBarrierFlow( id: `wave-${w}-start`, dependsOn: prevDep, run: async (c) => { - if (c.context.phase5Snapshot) c.context.phase5Snapshot.waveCount++; + if (c.context.phase4Snapshot) c.context.phase4Snapshot.waveCount++; c.context.logger.info(`Wave ${w}: migrating ${waveTasksCopy.length} task(s)`); c.context.logger.event({ type: 'wave-started', wave: w, taskIds: waveTaskIds }); await c.context.progress.appendWaveLifecycle({ wave: w, milestone: 'started' }); @@ -538,7 +538,7 @@ function buildWaveBarrierFlow( })); } - return defineFlow('phase-5-wave-barrier', nodes); + return defineFlow('phase-4-wave-barrier', nodes); } /** @@ -603,7 +603,7 @@ export async function executeIterativeMigration( tasks = await readJson(mergedPlanPath); } else { const failResultNoPlan: PhaseResult = { - phase: 5, name: 'Iterative Migration', success: false, duration: Date.now() - start, + phase: 4, name: 'Iterative Migration', success: false, duration: Date.now() - start, error: 'migration-plan.md and tasks-merged.json not found — Phase 1 may not have completed', }; assertPhaseSuccess(failResultNoPlan); @@ -616,7 +616,7 @@ export async function executeIterativeMigration( } if (tasks.length === 0) { ctx.logger.warn('No tasks found in migration plan'); - return { phase: 5, name: 'Iterative Migration', success: true, outputPath: ctx.config.target.outputPath, duration: Date.now() - start }; + return { phase: 4, name: 'Iterative Migration', success: true, outputPath: ctx.config.target.outputPath, duration: Date.now() - start }; } // 1b. Validate maxLinesPerTask @@ -641,15 +641,15 @@ export async function executeIterativeMigration( const model = getConfiguredRuntimeModel(ctx); const projected = ctx.costEstimator.estimateFromTotal(model, estimatedTotalTokens); ctx.logger.info( - `Phase 5: ${taskCount} tasks, estimated ~${estimatedTotalTokens.toLocaleString()} tokens, ` + + `Phase 4: ${taskCount} tasks, estimated ~${estimatedTotalTokens.toLocaleString()} tokens, ` + `projected cost: ${CostEstimator.formatCost(projected.total)} (${model})`, ); - await ctx.progress.appendEvent(`Phase 5 projection: ${taskCount} tasks, ~${CostEstimator.formatCost(projected.total)} estimated`); + await ctx.progress.appendEvent(`Phase 4 projection: ${taskCount} tasks, ~${CostEstimator.formatCost(projected.total)} estimated`); if (ctx.config.options.tokenBudget) { const currentUsage = ctx.tokenTracker.getTotal(); if (currentUsage + estimatedTotalTokens > ctx.config.options.tokenBudget) { ctx.logger.warn( - `Projected Phase 5 usage (${estimatedTotalTokens.toLocaleString()}) plus current (${currentUsage.toLocaleString()}) exceeds budget`, + `Projected Phase 4 usage (${estimatedTotalTokens.toLocaleString()}) plus current (${currentUsage.toLocaleString()}) exceeds budget`, ); } } @@ -686,16 +686,16 @@ export async function executeIterativeMigration( sortedTasks = TaskQueue.topologicalSort(tasks); } - // 3. Build and execute nested Phase 5 flow + // 3. Build and execute nested Phase 4 flow const retryExec = new RetryExecutor( (inv) => launchAgentWithEvents(ctx, inv), ctx.logger, ); const executionMode = ctx.config.options.executionMode ?? 'per-task'; - const phase5Concurrency = + const phase4Concurrency = isGitAutomationEnabled(ctx) && executionMode !== 'wave-barrier' ? 1 : ctx.config.options.maxParallelAgents; - ctx.phase5Snapshot = { + ctx.phase4Snapshot = { executionMode, phase4DurationMs: 0, completedTaskCount: 0, waveCount: 0, waveValidationRuns: 0, waveConvergenceIterations: 0, waveConvergenceFailures: 0, waveConvergenceLimitHits: 0, @@ -709,20 +709,20 @@ export async function executeIterativeMigration( ctx.deferGitCommits = true; } - const phase5Flow = executionMode === 'wave-barrier' + const phase4Flow = executionMode === 'wave-barrier' ? buildWaveBarrierFlow(ctx, sortedTasks, retryExec) : buildPerTaskFlow(ctx, sortedTasks, retryExec); - const phase5Checkpoint = new Phase5CheckpointAdapter(ctx.checkpoint); + const phase4Checkpoint = new Phase4CheckpointAdapter(ctx.checkpoint); const runner = new FlowRunner(); let flowSuccess = false; let flowError: string | undefined; let flowResult: FlowRunResult | undefined; try { - flowResult = await runner.run(phase5Flow, ctx, { - checkpoint: phase5Checkpoint, - concurrency: phase5Concurrency, + flowResult = await runner.run(phase4Flow, ctx, { + checkpoint: phase4Checkpoint, + concurrency: phase4Concurrency, }); flowSuccess = flowResult.status === 'completed'; if (flowResult.error) flowError = flowResult.error.message; @@ -734,7 +734,7 @@ export async function executeIterativeMigration( } else { flowError = error instanceof Error ? error.message : String(error); } - ctx.logger.error(`Phase 5 nested flow failed: ${flowError}`); + ctx.logger.error(`Phase 4 nested flow failed: ${flowError}`); flowSuccess = false; } finally { if (executionMode === 'wave-barrier') { @@ -762,29 +762,29 @@ export async function executeIterativeMigration( waveEndGateError = await runWaveEndQualityGates(ctx, sortedTasks); } - if (ctx.phase5Snapshot) { - ctx.phase5Snapshot.phase4DurationMs = Date.now() - start; - ctx.phase5Snapshot.completedTaskCount = completedTaskCount; - ctx.metricsCollector.setPhase4Snapshot(ctx.phase5Snapshot); - ctx.phase5Snapshot = undefined; + if (ctx.phase4Snapshot) { + ctx.phase4Snapshot.phase4DurationMs = Date.now() - start; + ctx.phase4Snapshot.completedTaskCount = completedTaskCount; + ctx.metricsCollector.setPhase4Snapshot(ctx.phase4Snapshot); + ctx.phase4Snapshot = undefined; } - const phase5Result: PhaseResult = { - phase: 5, name: 'Iterative Migration', + const phase4Result: PhaseResult = { + phase: 4, name: 'Iterative Migration', success: flowSuccess && !waveEndGateError, outputPath: ctx.config.target.outputPath, duration: Date.now() - start, error: !flowSuccess - ? flowError ?? 'Phase 5 nested flow did not complete successfully' + ? flowError ?? 'Phase 4 nested flow did not complete successfully' : waveEndGateError ?? undefined, }; - assertPhaseSuccess(phase5Result); - return phase5Result; + assertPhaseSuccess(phase4Result); + return phase4Result; } // ─── Helpers ────────────────────────────────────────────────────────── async function runWaveValidation(ctx: MigrationFlowContext, wave: number): Promise { - if (ctx.phase5Snapshot) ctx.phase5Snapshot.waveValidationRuns++; + if (ctx.phase4Snapshot) ctx.phase4Snapshot.waveValidationRuns++; const waveTaskId = `wave-${wave}`; if (ctx.config.target.formatCommand) { const format = await runCommand(ctx, 'format', ctx.config.target.formatCommand, waveTaskId); diff --git a/src/flow/steps/planning.ts b/src/flow/steps/planning.ts index 6c7719a..32751a6 100644 --- a/src/flow/steps/planning.ts +++ b/src/flow/steps/planning.ts @@ -1,5 +1,5 @@ /** - * Phase 4 — Migration Strategy (planning + adjudication + scaffold) + * Phase 3 — Migration Strategy (planning + adjudication + scaffold) */ import { join } from 'node:path'; @@ -26,14 +26,14 @@ export async function launchMigrationPlanner( // Step 4a: migration-planner + optional adjudicator if (!checkpointState.phase3aComplete) { - const planContext = await ctx.contextBuilder.buildContext('migration-planner', 4); - const planInv = buildInvocation(ctx, 'migration-planner', planContext, 4); + const planContext = await ctx.contextBuilder.buildContext('migration-planner', 3); + const planInv = buildInvocation(ctx, 'migration-planner', planContext, 3); const planResult = await launchAgentWithEvents(ctx, planInv); - recordTokens(ctx, planResult, 4); + recordTokens(ctx, planResult, 3); if (!planResult.success) { const failResult: PhaseResult = { - phase: 4, name: 'Migration Strategy', success: false, duration: Date.now() - start, + phase: 3, name: 'Migration Strategy', success: false, duration: Date.now() - start, error: planResult.error, exitCode: planResult.exitCode ?? undefined, stderr: planResult.stderr, }; assertPhaseSuccess(failResult); @@ -42,12 +42,12 @@ export async function launchMigrationPlanner( // Adjudicator const adjudicationFile = ctx.paths.competingStrategiesFile; if (await fileExists(adjudicationFile)) { - const adjCtx = await ctx.contextBuilder.buildContext('adjudicator', 4, undefined, { + const adjCtx = await ctx.contextBuilder.buildContext('adjudicator', 3, undefined, { competingStrategiesFile: adjudicationFile, decisionType: 'migration-strategy', }); - const adjInv = buildInvocation(ctx, 'adjudicator', adjCtx, 4); + const adjInv = buildInvocation(ctx, 'adjudicator', adjCtx, 3); const adjResult = await launchAgentWithEvents(ctx, adjInv); - recordTokens(ctx, adjResult, 4); + recordTokens(ctx, adjResult, 3); } else { try { const planningEntries = await readdir(planningDir); @@ -65,9 +65,9 @@ export async function launchMigrationPlanner( } await ctx.checkpoint.completePhase3a(); - ctx.logger.info('Phase 4 step 4a complete: migration-planner wrote strategy'); + ctx.logger.info('Phase 3 step 3a complete: migration-planner wrote strategy'); } else { - ctx.logger.info('Resuming Phase 4 — strategy already complete'); + ctx.logger.info('Resuming Phase 3 — strategy already complete'); } // Step 4b: scaffold @@ -104,11 +104,11 @@ export async function launchMigrationPlanner( } await ctx.checkpoint.completeScaffold(); } else { - ctx.logger.info('Resuming Phase 4 — scaffold already generated'); + ctx.logger.info('Resuming Phase 3 — scaffold already generated'); } return { - phase: 4, name: 'Migration Strategy', success: true, + phase: 3, name: 'Migration Strategy', success: true, outputPath: join(planningDir, 'strategy.md'), duration: Date.now() - start, }; } diff --git a/src/flow/steps/shared.ts b/src/flow/steps/shared.ts index b598e11..81c2419 100644 --- a/src/flow/steps/shared.ts +++ b/src/flow/steps/shared.ts @@ -42,7 +42,7 @@ import { TaskQueue } from '../../execution/task-queue.js'; /** Hardcoded average token estimate per migration task for cost projections. */ export const AVG_TOKENS_PER_TASK = 100_000; -/** Hardcoded retry-overhead multiplier for aggregate Phase 5 cost projections. */ +/** Hardcoded retry-overhead multiplier for aggregate Phase 4 cost projections. */ export const RETRY_OVERHEAD_MULTIPLIER = 1.25; // ─── Infrastructure Error Detection ────────────────────────────────────── @@ -81,7 +81,7 @@ export class TerminalExhaustionError extends Error { details.check ? `check=${details.check}` : undefined, ].filter((part): part is string => !!part); const location = locationParts.length > 0 ? ` (${locationParts.join(', ')})` : ''; - super(`Phase 5 terminal exhaustion: ${details.reasonCode}${location} - ${details.summary}`); + super(`Phase 4 terminal exhaustion: ${details.reasonCode}${location} - ${details.summary}`); this.name = 'TerminalExhaustionError'; } } @@ -552,11 +552,11 @@ export async function runCommand( command: string, taskId: string, ): Promise { - if (ctx.phase5Snapshot) { - if (label === 'build') ctx.phase5Snapshot.buildCommandRuns++; - if (label === 'test') ctx.phase5Snapshot.testCommandRuns++; - if (label === 'format') ctx.phase5Snapshot.formatCommandRuns++; - if (label === 'lint') ctx.phase5Snapshot.lintCommandRuns++; + if (ctx.phase4Snapshot) { + if (label === 'build') ctx.phase4Snapshot.buildCommandRuns++; + if (label === 'test') ctx.phase4Snapshot.testCommandRuns++; + if (label === 'format') ctx.phase4Snapshot.formatCommandRuns++; + if (label === 'lint') ctx.phase4Snapshot.lintCommandRuns++; } return ctx.buildLimiter(async () => { const timeout = getRuntimeTimeout(ctx); @@ -631,7 +631,7 @@ export async function runCommandWithRecovery( let infraAttempt = 0; while (cmdResult.infraError && infraAttempt < maxInfraRetries) { infraAttempt++; - if (ctx.phase5Snapshot) ctx.phase5Snapshot.commandInfraRetries++; + if (ctx.phase4Snapshot) ctx.phase4Snapshot.commandInfraRetries++; const backoffMs = Math.min(1000 * Math.pow(2, infraAttempt - 1), 30_000); ctx.logger.warn( `${label} failed for ${task.id} with infra error "${cmdResult.infraError}", ` + @@ -640,7 +640,7 @@ export async function runCommandWithRecovery( await new Promise(resolve => setTimeout(resolve, backoffMs)); cmdResult = await runCommand(ctx, label, command, task.id); if (cmdResult.success) { - if (ctx.phase5Snapshot) ctx.phase5Snapshot.recoveryLoopTimeMs += Date.now() - recoveryLoopStartedAt; + if (ctx.phase4Snapshot) ctx.phase4Snapshot.recoveryLoopTimeMs += Date.now() - recoveryLoopStartedAt; ctx.logger.info(`${label} recovered for ${task.id} after infra retry ${infraAttempt}`); return true; } @@ -649,7 +649,7 @@ export async function runCommandWithRecovery( // Code-quality recovery loop for (let attempt = 1; attempt <= maxAttempts; attempt++) { - if (ctx.phase5Snapshot) ctx.phase5Snapshot.commandRecoveryAttempts++; + if (ctx.phase4Snapshot) ctx.phase4Snapshot.commandRecoveryAttempts++; await recordRetryTarget(ctx, { scope: retryScope, attempt, maxAttempts, taskId: task.id, wave: options?.wave, check: label, @@ -703,13 +703,13 @@ export async function runCommandWithRecovery( cmdResult = await runCommand(ctx, label, command, task.id); if (cmdResult.success) { - if (ctx.phase5Snapshot) ctx.phase5Snapshot.recoveryLoopTimeMs += Date.now() - recoveryLoopStartedAt; + if (ctx.phase4Snapshot) ctx.phase4Snapshot.recoveryLoopTimeMs += Date.now() - recoveryLoopStartedAt; ctx.logger.info(`${label} recovered for ${task.id} on attempt ${attempt}`); return true; } } - if (ctx.phase5Snapshot) ctx.phase5Snapshot.recoveryLoopTimeMs += Date.now() - recoveryLoopStartedAt; + if (ctx.phase4Snapshot) ctx.phase4Snapshot.recoveryLoopTimeMs += Date.now() - recoveryLoopStartedAt; if (options?.suppressTerminalOnExhaustion) { ctx.logger.warn( @@ -861,7 +861,7 @@ function rehydrateParityFromLog(ctx: MigrationFlowContext, taskId: string): Pari } } -// ─── Phase 5 Checkpoint Helpers ──────────────────────────────────────── +// ─── Phase 4 Checkpoint Helpers ────────────────────────────────────────── function getPhaseCursors(ctx: MigrationFlowContext) { const state = ctx.checkpoint.getState(); @@ -869,63 +869,63 @@ function getPhaseCursors(ctx: MigrationFlowContext) { return state.phaseCursors; } -export function getPhase5TaskState(ctx: MigrationFlowContext, taskId: string): { completedSubsteps: string[]; lastSuccessfulStep?: string } { +export function getPhase4TaskState(ctx: MigrationFlowContext, taskId: string): { completedSubsteps: string[]; lastSuccessfulStep?: string } { const phaseCursors = getPhaseCursors(ctx); - phaseCursors['5'] ??= { tasks: {} }; - phaseCursors['5'].tasks ??= {}; - phaseCursors['5'].tasks[taskId] ??= { completedSubsteps: [] }; - return phaseCursors['5'].tasks[taskId]; + phaseCursors['4'] ??= { tasks: {} }; + phaseCursors['4'].tasks ??= {}; + phaseCursors['4'].tasks[taskId] ??= { completedSubsteps: [] }; + return phaseCursors['4'].tasks[taskId]; } -export function hasPhase5Substep(ctx: MigrationFlowContext, taskId: string, substep: string): boolean { - return getPhase5TaskState(ctx, taskId).completedSubsteps.includes(substep); +export function hasPhase4Substep(ctx: MigrationFlowContext, taskId: string, substep: string): boolean { + return getPhase4TaskState(ctx, taskId).completedSubsteps.includes(substep); } -export async function markPhase5Substep(ctx: MigrationFlowContext, taskId: string, substep: string): Promise { - const taskState = getPhase5TaskState(ctx, taskId); +export async function markPhase4Substep(ctx: MigrationFlowContext, taskId: string, substep: string): Promise { + const taskState = getPhase4TaskState(ctx, taskId); if (!taskState.completedSubsteps.includes(substep)) taskState.completedSubsteps.push(substep); taskState.lastSuccessfulStep = substep; await ctx.checkpoint.save(ctx.checkpoint.getState()); } -// Phase 6/7/8 cursors +// Phase 5/6/7 cursors -export function getPhase6Cursor(ctx: MigrationFlowContext): { iteration: number; fixIndex: number; lastSuccessfulStep?: string; hadUnresolvedFixes?: boolean } { +export function getPhase5Cursor(ctx: MigrationFlowContext): { iteration: number; fixIndex: number; lastSuccessfulStep?: string; hadUnresolvedFixes?: boolean } { const phaseCursors = getPhaseCursors(ctx); - phaseCursors['6'] ??= { iteration: 0, fixIndex: 0 }; - phaseCursors['6'].iteration ??= 0; - phaseCursors['6'].fixIndex ??= 0; - phaseCursors['6'].hadUnresolvedFixes ??= false; - return phaseCursors['6']; + phaseCursors['5'] ??= { iteration: 0, fixIndex: 0 }; + phaseCursors['5'].iteration ??= 0; + phaseCursors['5'].fixIndex ??= 0; + phaseCursors['5'].hadUnresolvedFixes ??= false; + return phaseCursors['5']; } -export async function savePhase6Cursor(ctx: MigrationFlowContext, cursor: { iteration: number; fixIndex: number; lastSuccessfulStep?: string; hadUnresolvedFixes?: boolean }): Promise { - getPhaseCursors(ctx)['6'] = cursor; +export async function savePhase5Cursor(ctx: MigrationFlowContext, cursor: { iteration: number; fixIndex: number; lastSuccessfulStep?: string; hadUnresolvedFixes?: boolean }): Promise { + getPhaseCursors(ctx)['5'] = cursor; await ctx.checkpoint.save(ctx.checkpoint.getState()); } -export function getPhase7Cursor(ctx: MigrationFlowContext): { completedAgents: string[]; completedSuites: string[]; lastSuccessfulStep?: string } { +export function getPhase6Cursor(ctx: MigrationFlowContext): { completedAgents: string[]; completedSuites: string[]; lastSuccessfulStep?: string } { const phaseCursors = getPhaseCursors(ctx); - phaseCursors['7'] ??= { completedAgents: [] }; - phaseCursors['7'].completedAgents ??= []; - phaseCursors['7'].completedSuites ??= []; - return phaseCursors['7'] as { completedAgents: string[]; completedSuites: string[]; lastSuccessfulStep?: string }; + phaseCursors['6'] ??= { completedAgents: [] }; + phaseCursors['6'].completedAgents ??= []; + phaseCursors['6'].completedSuites ??= []; + return phaseCursors['6'] as { completedAgents: string[]; completedSuites: string[]; lastSuccessfulStep?: string }; } -export async function savePhase7Cursor(ctx: MigrationFlowContext, cursor: { completedAgents: string[]; completedSuites?: string[]; lastSuccessfulStep?: string }): Promise { - getPhaseCursors(ctx)['7'] = cursor; +export async function savePhase6Cursor(ctx: MigrationFlowContext, cursor: { completedAgents: string[]; completedSuites?: string[]; lastSuccessfulStep?: string }): Promise { + getPhaseCursors(ctx)['6'] = cursor; await ctx.checkpoint.save(ctx.checkpoint.getState()); } -export function getPhase8Cursor(ctx: MigrationFlowContext): { iteration: number; issueIndex: number; currentFile?: string; lastSuccessfulStep?: string } { +export function getPhase7Cursor(ctx: MigrationFlowContext): { iteration: number; issueIndex: number; currentFile?: string; lastSuccessfulStep?: string } { const phaseCursors = getPhaseCursors(ctx); - phaseCursors['8'] ??= { iteration: 0, issueIndex: 0 }; - phaseCursors['8'].iteration ??= 0; - phaseCursors['8'].issueIndex ??= 0; - return phaseCursors['8']; + phaseCursors['7'] ??= { iteration: 0, issueIndex: 0 }; + phaseCursors['7'].iteration ??= 0; + phaseCursors['7'].issueIndex ??= 0; + return phaseCursors['7']; } -export async function savePhase8Cursor(ctx: MigrationFlowContext, cursor: { iteration: number; issueIndex: number; currentFile?: string; lastSuccessfulStep?: string }): Promise { - getPhaseCursors(ctx)['8'] = cursor; +export async function savePhase7Cursor(ctx: MigrationFlowContext, cursor: { iteration: number; issueIndex: number; currentFile?: string; lastSuccessfulStep?: string }): Promise { + getPhaseCursors(ctx)['7'] = cursor; await ctx.checkpoint.save(ctx.checkpoint.getState()); } diff --git a/src/index.ts b/src/index.ts index a10750c..e9ee06d 100644 --- a/src/index.ts +++ b/src/index.ts @@ -15,7 +15,7 @@ program .requiredOption('-c, --config ', 'Path to migration.config.json') .option('--resume', 'Resume from last checkpoint') .option('--dry-run', 'Validate config and produce plan only') - .option('--phase ', 'Run up to and including this phase (0-9)', parseInt) + .option('--phase ', 'Run up to and including this phase (0-8)', parseInt) .option('--log-level ', 'Log level (debug|info|warn|error)', 'info') .action(async (opts) => { try { diff --git a/src/observability/metrics-collector.ts b/src/observability/metrics-collector.ts index 92cc36f..5bb3f11 100644 --- a/src/observability/metrics-collector.ts +++ b/src/observability/metrics-collector.ts @@ -54,11 +54,11 @@ export interface MetricsAggregate { totalEscalationCostUsd: number; /** Routed invocations (tier != normal) that succeeded on first attempt. */ retriesAvoidedByRouting: number; - /** Phase 5 execution strategy used for this run. */ + /** Phase 4 execution strategy used for this run. */ phase4ExecutionMode: 'per-task' | 'wave-barrier' | 'unknown'; - /** Total duration of phase 5 in milliseconds. */ + /** Total duration of phase 4 in milliseconds. */ phase4DurationMs: number; - /** Number of phase 5 tasks marked completed. */ + /** Number of phase 4 tasks marked completed. */ completedPhase4Tasks: number; /** Number of migration waves executed in wave-barrier mode. */ waveCount: number; @@ -70,9 +70,9 @@ export interface MetricsAggregate { waveConvergenceFailures: number; /** Number of waves that hit the convergence iteration cap. */ waveConvergenceLimitHits: number; - /** Number of build command invocations in phase 5. */ + /** Number of build command invocations in phase 4. */ buildCommandRuns: number; - /** Number of test command invocations in phase 5. */ + /** Number of test command invocations in phase 4. */ testCommandRuns: number; /** Number of format command invocations. */ formatCommandRuns: number; @@ -84,9 +84,9 @@ export interface MetricsAggregate { commandInfraRetries: number; /** Time spent in build/test recovery loops in milliseconds. */ recoveryLoopTimeMs: number; - /** Build/test command invocations per completed phase 5 task. */ + /** Build/test command invocations per completed phase 4 task. */ buildTestInvocationsPerCompletedTask: number; - /** Retry invocations per completed phase 5 task. */ + /** Retry invocations per completed phase 4 task. */ retryVolumePerCompletedTask: number; } diff --git a/tests/agents/context-builder.test.ts b/tests/agents/context-builder.test.ts index 43d2607..3e76659 100644 --- a/tests/agents/context-builder.test.ts +++ b/tests/agents/context-builder.test.ts @@ -412,7 +412,7 @@ describe('ContextBuilder', () => { expect(context.payload?.testType).toBe('unit'); }); - it('should route Phase 7 per-suite test-writer with e2eSuiteBrief payload', async () => { + it('should route Phase 6 per-suite test-writer with e2eSuiteBrief payload', async () => { const suiteBrief = { id: 'suite-001', name: 'Auth E2E', @@ -461,7 +461,7 @@ describe('ContextBuilder', () => { expect(context.outputPath).toBe('/tmp/target'); }); - it('should not change Phase 5 test-writer context when e2eSuiteBrief is absent', async () => { + it('should not change Phase 4 test-writer context when e2eSuiteBrief is absent', async () => { const { contextPath } = await builder.buildContext('test-writer', 4, 'task-001', { targetFile: 'src/auth.ts', kbEntry: 'kb/auth.md', @@ -493,7 +493,7 @@ describe('ContextBuilder', () => { expect(context.outputPath).toBe('/tmp/target/tests/e2e/bad'); }); - it('should fall through to Phase 5 path when e2eSuiteBrief is not a record', async () => { + it('should fall through to Phase 4 path when e2eSuiteBrief is not a record', async () => { const { contextPath } = await builder.buildContext('test-writer', 6, 'suite-str', { e2eSuiteBrief: 'not-an-object', targetFile: 'src/payments.ts', diff --git a/tests/core/checkpoint.test.ts b/tests/core/checkpoint.test.ts index 0948239..edd665f 100644 --- a/tests/core/checkpoint.test.ts +++ b/tests/core/checkpoint.test.ts @@ -350,14 +350,14 @@ describe('CheckpointManager', () => { const manager3 = new CheckpointManager(tempDir, logger); const loaded = await manager3.load('old-project'); - expect(loaded.phaseCursors?.['5']?.tasks).toEqual({}); - expect(loaded.phaseCursors?.['6']?.iteration).toBe(0); - expect(loaded.phaseCursors?.['7']?.completedAgents).toEqual([]); - expect(loaded.phaseCursors?.['7']?.completedSuites).toEqual([]); - expect(loaded.phaseCursors?.['8']?.issueIndex).toBe(0); + expect(loaded.phaseCursors?.['4']?.tasks).toEqual({}); + expect(loaded.phaseCursors?.['5']?.iteration).toBe(0); + expect(loaded.phaseCursors?.['6']?.completedAgents).toEqual([]); + expect(loaded.phaseCursors?.['6']?.completedSuites).toEqual([]); + expect(loaded.phaseCursors?.['7']?.issueIndex).toBe(0); }); - it('should default completedSuites to [] when loading legacy Phase6Cursor without it (backward compat)', async () => { + it('should default completedSuites to [] when loading legacy Phase4Cursor without it (backward compat)', async () => { const { writeJson } = await import('../../src/util/fs.js'); const oldState = { projectName: 'old-project', @@ -377,7 +377,7 @@ describe('CheckpointManager', () => { completedTaskDurationsMs: [], metricsCount: 0, phaseCursors: { - '7': { completedAgents: ['e2e-test-crafter'] }, + '6': { completedAgents: ['e2e-test-crafter'] }, }, }; await ensureDir(join(tempDir, 'state')); @@ -385,8 +385,8 @@ describe('CheckpointManager', () => { const manager3 = new CheckpointManager(tempDir, logger); const loaded = await manager3.load('old-project'); - expect(loaded.phaseCursors?.['7']?.completedAgents).toEqual(['e2e-test-crafter']); - expect(loaded.phaseCursors?.['7']?.completedSuites).toEqual([]); + expect(loaded.phaseCursors?.['6']?.completedAgents).toEqual(['e2e-test-crafter']); + expect(loaded.phaseCursors?.['6']?.completedSuites).toEqual([]); }); it('should ignore existing checkpoint state on fresh load', async () => { diff --git a/tests/core/progress.test.ts b/tests/core/progress.test.ts index 36bcb60..721b656 100644 --- a/tests/core/progress.test.ts +++ b/tests/core/progress.test.ts @@ -43,7 +43,7 @@ describe('ProgressWriter', () => { it('should update phase status', async () => { await writer.initialize(config); - await writer.updatePhase(1, 'completed'); + await writer.updatePhase(2, 'completed'); const content = await readFile(progressFile, 'utf-8'); expect(content).toContain('✅'); @@ -51,7 +51,7 @@ describe('ProgressWriter', () => { it('should persist exitCode and stderr in phase record when provided', async () => { await writer.initialize(config); - await writer.updatePhase(1, 'failed', 'agent crashed', 127, 'command not found'); + await writer.updatePhase(2, 'failed', 'agent crashed', 127, 'command not found'); const content = await readFile(progressFile, 'utf-8'); expect(content).toContain('exitCode: 127'); @@ -61,7 +61,7 @@ describe('ProgressWriter', () => { it('should not include exitCode or stderr when not provided', async () => { await writer.initialize(config); - await writer.updatePhase(1, 'failed', 'some error'); + await writer.updatePhase(2, 'failed', 'some error'); const content = await readFile(progressFile, 'utf-8'); expect(content).toContain('some error'); @@ -230,9 +230,9 @@ describe('ProgressWriter', () => { describe('Resume & Edge Cases', () => { it('should rewrite all phases to pending on re-initialization', async () => { await writer.initialize(config); - await writer.updatePhase(1, 'completed'); await writer.updatePhase(2, 'completed'); await writer.updatePhase(3, 'completed'); + await writer.updatePhase(4, 'completed'); // Re-initialize should reset everything await writer.initialize(config); @@ -403,9 +403,9 @@ describe('ProgressWriter', () => { const state = { projectName: 'test-project', version: 1, - currentPhase: 3, + currentPhase: 4, currentTask: null, - completedPhases: [1, 2], + completedPhases: [2, 3], completedTasks: [], failedTasks: [], blockedTasks: [], @@ -435,9 +435,9 @@ describe('ProgressWriter', () => { const state = { projectName: 'test-project', version: 1, - currentPhase: 3, + currentPhase: 4, currentTask: null, - completedPhases: [1, 2], + completedPhases: [2, 3], completedTasks: [], failedTasks: [], blockedTasks: [], @@ -475,9 +475,9 @@ describe('ProgressWriter', () => { const state = { projectName: 'test-project', version: 1, - currentPhase: 4, + currentPhase: 5, currentTask: null, - completedPhases: [1, 2, 3], + completedPhases: [2, 3, 4], completedTasks: ['task-001'], failedTasks: [], blockedTasks: [], diff --git a/tests/core/runtime.test.ts b/tests/core/runtime.test.ts index 46da132..ff38148 100644 --- a/tests/core/runtime.test.ts +++ b/tests/core/runtime.test.ts @@ -392,9 +392,9 @@ describe('MigrationRuntime', () => { it('handles MigrationError from flow runner and records failed phase', async () => { const { FlowRunner } = await import('@cadre-dev/framework/flow'); const { MigrationError } = await import('../../src/flow/steps/shared.js'); - const failedPhaseResult = { phase: 3, name: 'KB Construction', success: false, duration: 100, error: 'KB build failed' }; + const failedPhaseResult = { phase: 2, name: 'KB Construction', success: false, duration: 100, error: 'KB build failed' }; const flowRunnerRunSpy = vi.spyOn(FlowRunner.prototype, 'run').mockRejectedValue( - new MigrationError(3, 'KB Construction', failedPhaseResult), + new MigrationError(2, 'KB Construction', failedPhaseResult), ); const runtime = new MigrationRuntime() as any; @@ -459,11 +459,11 @@ describe('MigrationRuntime', () => { expect(result.success).toBe(false); expect(result.phases.length).toBe(1); - expect(result.phases[0]!.phase).toBe(3); + expect(result.phases[0]!.phase).toBe(2); expect(result.phases[0]!.success).toBe(false); // Should have recorded the failed phase event expect(runtime.logger.event).toHaveBeenCalledWith( - expect.objectContaining({ type: 'phase-failed', phase: 3 }), + expect.objectContaining({ type: 'phase-failed', phase: 2 }), ); flowRunnerRunSpy.mockRestore(); @@ -575,13 +575,13 @@ describe('MigrationRuntime', () => { const runtime = new MigrationRuntime() as any; const state = { projectName: 'demo', - currentPhase: 5, + currentPhase: 4, currentTask: 'task-x', - completedPhases: [1, 2, 3, 4], + completedPhases: [1, 2, 3], completedTasks: ['a', 'b'], failedTasks: ['f1'], blockedTasks: ['b1'], - phaseOutputs: { 1: {}, 2: {}, 3: {}, 4: {}, 5: {}, 6: {}, 7: {} } as Record, + phaseOutputs: { 0: {}, 1: {}, 2: {}, 3: {}, 4: {}, 5: {}, 6: {} } as Record, tokenUsage: { total: 100, byPhase: {}, byAgent: {} }, }; runtime.config = { projectName: 'demo' }; @@ -591,15 +591,15 @@ describe('MigrationRuntime', () => { }; runtime.logger = { info: vi.fn() }; - await runtime.reset(4); + await runtime.reset(3); - expect(state.completedPhases).toEqual([1, 2, 3]); - expect(state.currentPhase).toBe(4); + expect(state.completedPhases).toEqual([1, 2]); + expect(state.currentPhase).toBe(3); expect(state.currentTask).toBeNull(); - expect(state.phaseOutputs[1]).toBeDefined(); - expect(state.phaseOutputs[3]).toBeDefined(); - expect(state.phaseOutputs[4]).toBeUndefined(); - expect(state.phaseOutputs[7]).toBeUndefined(); + expect(state.phaseOutputs[0]).toBeDefined(); + expect(state.phaseOutputs[2]).toBeDefined(); + expect(state.phaseOutputs[3]).toBeUndefined(); + expect(state.phaseOutputs[6]).toBeUndefined(); expect(runtime.checkpoint.save).toHaveBeenCalledWith(state); }); diff --git a/tests/core/task-graph-builder.test.ts b/tests/core/task-graph-builder.test.ts index 2737190..874f4ee 100644 --- a/tests/core/task-graph-builder.test.ts +++ b/tests/core/task-graph-builder.test.ts @@ -60,6 +60,15 @@ function createTestDb(dbPath: string): Database.Database { ref_line INTEGER NOT NULL, ref_character INTEGER, resolved_type_signature TEXT, definition_uri TEXT, definition_path TEXT ); + CREATE TABLE IF NOT EXISTS symbol_metrics ( + symbol_id INTEGER PRIMARY KEY REFERENCES symbols(id) ON DELETE CASCADE, + line_count INTEGER NOT NULL, + param_count INTEGER NOT NULL, + cyclomatic INTEGER NOT NULL, + max_nesting INTEGER NOT NULL, + layer TEXT NOT NULL DEFAULT 'baseline', + generation INTEGER NOT NULL DEFAULT 0 + ); `); return db; } diff --git a/tests/e2e/e2e-full-migration.test.ts b/tests/e2e/e2e-full-migration.test.ts index 343d356..dddd2e4 100644 --- a/tests/e2e/e2e-full-migration.test.ts +++ b/tests/e2e/e2e-full-migration.test.ts @@ -118,8 +118,8 @@ describe.skipIf(!runE2E)('E2E Full Migration', () => { expect(phase!.name).toBe('E2E Testing & Documentation'); }); - it('Phase 7 (Completion) should succeed', () => { - const phase = result.phases.find(p => p.phase === 7); + it('Phase 8 (Completion) should succeed', () => { + const phase = result.phases.find(p => p.phase === 8); expect(phase).toBeDefined(); expect(phase!.success).toBe(true); expect(phase!.name).toBe('Completion'); diff --git a/tests/e2e/e2e-jq-csharp.test.ts b/tests/e2e/e2e-jq-csharp.test.ts index 09d628c..8565e65 100644 --- a/tests/e2e/e2e-jq-csharp.test.ts +++ b/tests/e2e/e2e-jq-csharp.test.ts @@ -269,8 +269,8 @@ describe.skipIf(!runE2E)('E2E jq C → C# (.NET 9) Migration', () => { expect(phase!.name).toBe('E2E Testing & Documentation'); }); - it('Phase 7 (Completion) should succeed', () => { - const phase = result.phases.find(p => p.phase === 7); + it('Phase 8 (Completion) should succeed', () => { + const phase = result.phases.find(p => p.phase === 8); expect(phase).toBeDefined(); expect(phase!.success).toBe(true); expect(phase!.name).toBe('Completion'); diff --git a/tests/e2e/e2e-lz4-rust.test.ts b/tests/e2e/e2e-lz4-rust.test.ts index 53c4f34..4d675b0 100644 --- a/tests/e2e/e2e-lz4-rust.test.ts +++ b/tests/e2e/e2e-lz4-rust.test.ts @@ -270,8 +270,8 @@ describe.skipIf(!runE2E)('E2E lz4 C → Rust Migration', () => { expect(phase!.name).toBe('E2E Testing & Documentation'); }); - it('Phase 7 (Completion) should succeed', () => { - const phase = result.phases.find(p => p.phase === 7); + it('Phase 8 (Completion) should succeed', () => { + const phase = result.phases.find(p => p.phase === 8); expect(phase).toBeDefined(); expect(phase!.success).toBe(true); expect(phase!.name).toBe('Completion'); diff --git a/tests/e2e/e2e-protobuf-upb-rust.test.ts b/tests/e2e/e2e-protobuf-upb-rust.test.ts index f101a01..5cce15a 100644 --- a/tests/e2e/e2e-protobuf-upb-rust.test.ts +++ b/tests/e2e/e2e-protobuf-upb-rust.test.ts @@ -228,8 +228,8 @@ describe.skipIf(!runE2E)('E2E protobuf upb C -> Rust Migration', () => { expect(phase!.name).toBe('E2E Testing & Documentation'); }); - it('Phase 7 (Completion) should succeed', () => { - const phase = result.phases.find(p => p.phase === 7); + it('Phase 8 (Completion) should succeed', () => { + const phase = result.phases.find(p => p.phase === 8); expect(phase).toBeDefined(); expect(phase!.success).toBe(true); expect(phase!.name).toBe('Completion'); diff --git a/tests/e2e/e2e-smoke.test.ts b/tests/e2e/e2e-smoke.test.ts index c67be7c..c7f602f 100644 --- a/tests/e2e/e2e-smoke.test.ts +++ b/tests/e2e/e2e-smoke.test.ts @@ -53,11 +53,11 @@ describe.skipIf(!runE2E)('E2E Smoke Test', () => { expect(true).toBe(true); }); - it('should run Phase 3 (Knowledge Base Construction) and produce expected artefacts', async () => { + it('should run Phase 2 (Knowledge Base Construction) and produce expected artefacts', async () => { const runtime = new MigrationRuntime(); await runtime.initialize({ configPath, - phase: 3, + phase: 2, logLevel: 'info', }); diff --git a/tests/e2e/e2e-sqlite-csharp.test.ts b/tests/e2e/e2e-sqlite-csharp.test.ts index 0eb1021..7f7afec 100644 --- a/tests/e2e/e2e-sqlite-csharp.test.ts +++ b/tests/e2e/e2e-sqlite-csharp.test.ts @@ -248,8 +248,8 @@ describe.skipIf(!runE2E)('E2E SQLite C → C# (.NET 9) Migration', () => { expect(phase!.name).toBe('E2E Testing & Documentation'); }); - it('Phase 7 (Completion) should succeed', () => { - const phase = result.phases.find(p => p.phase === 7); + it('Phase 8 (Completion) should succeed', () => { + const phase = result.phases.find(p => p.phase === 8); expect(phase).toBeDefined(); expect(phase!.success).toBe(true); expect(phase!.name).toBe('Completion'); diff --git a/tests/e2e/e2e-zstd-rust.test.ts b/tests/e2e/e2e-zstd-rust.test.ts index ccb29f4..9dc401d 100644 --- a/tests/e2e/e2e-zstd-rust.test.ts +++ b/tests/e2e/e2e-zstd-rust.test.ts @@ -306,8 +306,8 @@ describe.skipIf(!runE2E)('E2E zstd C → Rust Migration', () => { expect(phase!.name).toBe('E2E Testing & Documentation'); }); - it('Phase 7 (Completion) should succeed', () => { - const phase = result.phases.find(p => p.phase === 7); + it('Phase 8 (Completion) should succeed', () => { + const phase = result.phases.find(p => p.phase === 8); expect(phase).toBeDefined(); expect(phase!.success).toBe(true); expect(phase!.name).toBe('Completion'); diff --git a/tests/execution/retry-executor.test.ts b/tests/execution/retry-executor.test.ts index 0a2a8fd..ae03911 100644 --- a/tests/execution/retry-executor.test.ts +++ b/tests/execution/retry-executor.test.ts @@ -26,7 +26,7 @@ describe('RetryExecutor', () => { agent: 'code-migrator', contextPath: '/tmp/context.json', outputPath: '', - phase: 5, + phase: 4, workItemId: 'task-001', ...overrides, }; @@ -228,7 +228,7 @@ describe('RetryExecutor', () => { agent: 'parity-failure-resolver', contextPath: '/tmp/recovery-ctx.json', outputPath: '', - phase: 5, + phase: 4, workItemId: 'task-001', }; @@ -284,7 +284,7 @@ describe('RetryExecutor', () => { agent: 'parity-failure-resolver', contextPath: '/tmp/recovery-ctx.json', outputPath: '', - phase: 5, + phase: 4, workItemId: 'task-001', }; @@ -316,7 +316,7 @@ describe('RetryExecutor', () => { agent: 'parity-failure-resolver', contextPath: '/tmp/recovery.json', outputPath: '', - phase: 5, + phase: 4, workItemId: 'task-001', }; @@ -468,7 +468,7 @@ describe('RetryExecutor', () => { agent: 'parity-failure-resolver', contextPath: '/tmp/recovery.json', outputPath: '', - phase: 5, + phase: 4, workItemId: 'task-001', }; diff --git a/tests/flow/checkpoint-adapter.test.ts b/tests/flow/checkpoint-adapter.test.ts index f15fe89..1f5b860 100644 --- a/tests/flow/checkpoint-adapter.test.ts +++ b/tests/flow/checkpoint-adapter.test.ts @@ -65,7 +65,7 @@ describe('AamfFlowCheckpointAdapter', () => { describe('flow/index re-exports', () => { it('should export MigrationError', () => { expect(MigrationError).toBeDefined(); - const err = new MigrationError(3, 'KB', { phase: 3, name: 'KB', success: false, duration: 0 }); + const err = new MigrationError(2, 'KB', { phase: 2, name: 'KB', success: false, duration: 0 }); expect(err).toBeInstanceOf(Error); }); diff --git a/tests/flow/migration-flow.test.ts b/tests/flow/migration-flow.test.ts index adbada6..d45c6f4 100644 --- a/tests/flow/migration-flow.test.ts +++ b/tests/flow/migration-flow.test.ts @@ -31,14 +31,13 @@ describe('migrationFlow', () => { it('should not contain the removed impact-assessment node', () => { const ids = migrationFlow.nodes.map((n) => n.id); expect(ids).not.toContain('impact-assessment'); - expect(ids).not.toContain('budget-check-2'); }); it('should have budget gates after phases 3-5', () => { const ids = migrationFlow.nodes.map((n) => n.id); + expect(ids).toContain('budget-check-2'); expect(ids).toContain('budget-check-3'); expect(ids).toContain('budget-check-4'); - expect(ids).toContain('budget-check-5'); }); it('should enforce declaration order (kb-index before task-graph before kb-construction)', () => { @@ -60,9 +59,9 @@ describe('migrationFlow', () => { it('should have budget gates depend on their corresponding step', () => { const nodeMap = new Map(migrationFlow.nodes.map(n => [n.id, n])); - expect(nodeMap.get('budget-check-3')!.dependsOn).toContain('kb-construction'); - expect(nodeMap.get('budget-check-4')!.dependsOn).toContain('migration-planning'); - expect(nodeMap.get('budget-check-5')!.dependsOn).toContain('iterative-migration'); + expect(nodeMap.get('budget-check-2')!.dependsOn).toContain('kb-construction'); + expect(nodeMap.get('budget-check-3')!.dependsOn).toContain('migration-planning'); + expect(nodeMap.get('budget-check-4')!.dependsOn).toContain('iterative-migration'); }); }); @@ -77,41 +76,41 @@ describe('nodeIdToPhase', () => { expect(nodeIdToPhase('task-graph-construction')).toBe(1); }); - it('should return 3 for kb-construction and its budget-check', () => { - expect(nodeIdToPhase('kb-construction')).toBe(3); - expect(nodeIdToPhase('budget-check-3')).toBe(3); + it('should return 2 for kb-construction and its budget-check', () => { + expect(nodeIdToPhase('kb-construction')).toBe(2); + expect(nodeIdToPhase('budget-check-2')).toBe(2); }); - it('should return 4 for migration-planning and its budget-check', () => { - expect(nodeIdToPhase('migration-planning')).toBe(4); - expect(nodeIdToPhase('budget-check-4')).toBe(4); + it('should return 3 for migration-planning and its budget-check', () => { + expect(nodeIdToPhase('migration-planning')).toBe(3); + expect(nodeIdToPhase('budget-check-3')).toBe(3); }); - it('should return 5 for iterative-migration and its budget-check', () => { - expect(nodeIdToPhase('iterative-migration')).toBe(5); - expect(nodeIdToPhase('budget-check-5')).toBe(5); + it('should return 4 for iterative-migration and its budget-check', () => { + expect(nodeIdToPhase('iterative-migration')).toBe(4); + expect(nodeIdToPhase('budget-check-4')).toBe(4); }); - it('should return 6 for final-parity-loop and final-parity-iteration', () => { - expect(nodeIdToPhase('final-parity-loop')).toBe(6); - expect(nodeIdToPhase('final-parity-iteration')).toBe(6); + it('should return 5 for final-parity-loop and final-parity-iteration', () => { + expect(nodeIdToPhase('final-parity-loop')).toBe(5); + expect(nodeIdToPhase('final-parity-iteration')).toBe(5); }); - it('should return 7 for all phase-7 nodes', () => { - expect(nodeIdToPhase('e2e-test-plan')).toBe(7); - expect(nodeIdToPhase('finalization')).toBe(7); - expect(nodeIdToPhase('e2e-suite-writers')).toBe(7); - expect(nodeIdToPhase('documentation-writer')).toBe(7); + it('should return 6 for all phase-6 nodes', () => { + expect(nodeIdToPhase('e2e-test-plan')).toBe(6); + expect(nodeIdToPhase('finalization')).toBe(6); + expect(nodeIdToPhase('e2e-suite-writers')).toBe(6); + expect(nodeIdToPhase('documentation-writer')).toBe(6); }); - it('should return 8 for idiomatic-refactor nodes', () => { - expect(nodeIdToPhase('idiomatic-refactor-gate')).toBe(8); - expect(nodeIdToPhase('idiomatic-loop')).toBe(8); - expect(nodeIdToPhase('idiomatic-iteration')).toBe(8); + it('should return 7 for idiomatic-refactor nodes', () => { + expect(nodeIdToPhase('idiomatic-refactor-gate')).toBe(7); + expect(nodeIdToPhase('idiomatic-loop')).toBe(7); + expect(nodeIdToPhase('idiomatic-iteration')).toBe(7); }); - it('should return 9 for completion', () => { - expect(nodeIdToPhase('completion')).toBe(9); + it('should return 8 for completion', () => { + expect(nodeIdToPhase('completion')).toBe(8); }); it('should return -1 for unknown node IDs', () => { @@ -148,11 +147,14 @@ describe('buildFlowUpToPhase', () => { expect(ids).not.toContain('kb-construction'); }); - it('should handle phase 2 (removed phase) gracefully', () => { - // Phase 2 was removed — buildFlowUpToPhase should return a valid flow + it('should truncate to phase 2 (up through budget-check-2)', () => { const flow = buildFlowUpToPhase(2); expect(flow).toBeDefined(); expect(flow.id).toBe('aamf-migration'); + const ids = flow.nodes.map((n) => n.id); + expect(ids).toContain('kb-construction'); + expect(ids).toContain('budget-check-2'); + expect(ids).not.toContain('migration-planning'); }); it('should truncate to phase 3 (up through budget-check-3)', () => { @@ -161,19 +163,19 @@ describe('buildFlowUpToPhase', () => { expect(ids).toContain('kb-index'); expect(ids).toContain('kb-construction'); expect(ids).toContain('budget-check-3'); - expect(ids).not.toContain('migration-planning'); + expect(ids).not.toContain('iterative-migration'); }); - it('should truncate to phase 5 (up through budget-check-5)', () => { - const flow = buildFlowUpToPhase(5); + it('should truncate to phase 4 (up through budget-check-4)', () => { + const flow = buildFlowUpToPhase(4); const ids = flow.nodes.map((n) => n.id); expect(ids).toContain('iterative-migration'); - expect(ids).toContain('budget-check-5'); + expect(ids).toContain('budget-check-4'); expect(ids).not.toContain('final-parity-loop'); }); it('should preserve the flow id in truncated flows', () => { - const flow = buildFlowUpToPhase(4); + const flow = buildFlowUpToPhase(3); expect(flow.id).toBe('aamf-migration'); }); }); @@ -182,27 +184,27 @@ describe('buildFlowUpToPhase', () => { describe('MigrationError', () => { it('should construct with phase, name, and result', () => { - const result = { phase: 3, name: 'KB Construction', success: false, duration: 100, error: 'test failure' }; - const err = new MigrationError(3, 'KB Construction', result); + const result = { phase: 2, name: 'KB Construction', success: false, duration: 100, error: 'test failure' }; + const err = new MigrationError(2, 'KB Construction', result); - expect(err.phaseId).toBe(3); + expect(err.phaseId).toBe(2); expect(err.phaseName).toBe('KB Construction'); expect(err.result).toBe(result); - expect(err.message).toContain('Phase 3'); + expect(err.message).toContain('Phase 2'); expect(err.message).toContain('KB Construction'); expect(err.message).toContain('test failure'); }); it('should have name "MigrationError"', () => { - const result = { phase: 3, name: 'KB', success: false, duration: 0 }; - const err = new MigrationError(3, 'KB', result); + const result = { phase: 2, name: 'KB', success: false, duration: 0 }; + const err = new MigrationError(2, 'KB', result); expect(err.name).toBe('MigrationError'); expect(err).toBeInstanceOf(Error); }); it('should fallback to "unknown error" when result has no error message', () => { - const result = { phase: 5, name: 'Iterative Migration', success: false, duration: 0 }; - const err = new MigrationError(5, 'Iterative Migration', result); + const result = { phase: 4, name: 'Iterative Migration', success: false, duration: 0 }; + const err = new MigrationError(4, 'Iterative Migration', result); expect(err.message).toContain('unknown error'); }); }); diff --git a/tests/flow/steps/edge-cases.test.ts b/tests/flow/steps/edge-cases.test.ts index f17c73e..c471153 100644 --- a/tests/flow/steps/edge-cases.test.ts +++ b/tests/flow/steps/edge-cases.test.ts @@ -46,7 +46,7 @@ describe('flow/index.ts barrel exports', () => { }); }); -// ─── Phase 6 failure paths ────────────────────────────────────────────────── +// ─── Phase 5 failure paths ────────────────────────────────────────────────── import { runFinalParityIteration } from '../../../src/flow/steps/final-parity.js'; import { @@ -71,7 +71,7 @@ describe('runFinalParityIteration — failure paths', () => { const launcherFn = createFailingLauncher(['final-parity-checker']); env = await setupFlowTestWithTasks(launcherFn); - await expect(runFinalParityIteration(env.flowCtx)).rejects.toThrow(/Phase 6.*failed/); + await expect(runFinalParityIteration(env.flowCtx)).rejects.toThrow(/Phase 5.*failed/); }); it('should throw MigrationError when structured output has no valid fixes array', async () => { @@ -88,7 +88,7 @@ describe('runFinalParityIteration — failure paths', () => { }); env = await setupFlowTestWithTasks(launcherFn); - await expect(runFinalParityIteration(env.flowCtx)).rejects.toThrow(/Phase 6.*failed/); + await expect(runFinalParityIteration(env.flowCtx)).rejects.toThrow(/Phase 5.*failed/); }); it('should throw MigrationError when outputParsed is false and fixes missing', async () => { @@ -103,11 +103,11 @@ describe('runFinalParityIteration — failure paths', () => { }); env = await setupFlowTestWithTasks(launcherFn); - await expect(runFinalParityIteration(env.flowCtx)).rejects.toThrow(/Phase 6.*failed/); + await expect(runFinalParityIteration(env.flowCtx)).rejects.toThrow(/Phase 5.*failed/); }); }); -// ─── Phase 7 — Finalization: suite retry & budget ──────────────────────────── +// ─── Phase 6 — Finalization: suite retry & budget ──────────────────────────── import { launchE2eSuiteWriters, launchE2eTestCrafter, launchDocWriter } from '../../../src/flow/steps/finalization.js'; @@ -145,7 +145,7 @@ describe('launchE2eSuiteWriters — budget and retry', () => { { id: 'suite-001', name: 'Auth E2E' }, ]); - await expect(launchE2eSuiteWriters(env.flowCtx)).rejects.toThrow(/Phase 7.*failed/); + await expect(launchE2eSuiteWriters(env.flowCtx)).rejects.toThrow(/Phase 6.*failed/); }); it('should report suites: 0 when plan contains empty suites', async () => { @@ -178,7 +178,7 @@ describe('launchE2eSuiteWriters — budget and retry', () => { // Mark suite as completed const state = env.checkpoint.getState(); state.phaseCursors ??= {}; - state.phaseCursors['7'] = { + state.phaseCursors['6'] = { completedAgents: [], completedSuites: ['suite-001'], lastSuccessfulStep: 'completed-suite-suite-001', @@ -196,7 +196,7 @@ describe('launchE2eTestCrafter — failure path', () => { const launcherFn = createFailingLauncher(['e2e-test-crafter']); env = await setupFlowTest(launcherFn); - await expect(launchE2eTestCrafter(env.flowCtx)).rejects.toThrow(/Phase 7.*failed/); + await expect(launchE2eTestCrafter(env.flowCtx)).rejects.toThrow(/Phase 6.*failed/); }); }); @@ -205,7 +205,7 @@ describe('launchDocWriter — failure path', () => { const launcherFn = createFailingLauncher(['documentation-writer']); env = await setupFlowTest(launcherFn); - await expect(launchDocWriter(env.flowCtx)).rejects.toThrow(/Phase 7.*failed/); + await expect(launchDocWriter(env.flowCtx)).rejects.toThrow(/Phase 6.*failed/); }); }); @@ -352,7 +352,7 @@ describe('launchMigrationPlanner — extended', () => { }); }); -// ─── Phase 8 — Idiomatic Refactor: failure + format/lint ───────────────────── +// ─── Phase 7 — Idiomatic Refactor: failure + format/lint ───────────────────── import { runIdiomaticReviewIteration, noIdiomaticIssues } from '../../../src/flow/steps/idiomatic-refactor.js'; @@ -361,7 +361,7 @@ describe('runIdiomaticReviewIteration — failure paths', () => { const launcherFn = createFailingLauncher(['idiomatic-reviewer']); env = await setupFlowTest(launcherFn); - await expect(runIdiomaticReviewIteration(env.flowCtx)).rejects.toThrow(/Phase 8.*failed/); + await expect(runIdiomaticReviewIteration(env.flowCtx)).rejects.toThrow(/Phase 7.*failed/); }); it('should throw when structured output has no issues array', async () => { @@ -391,7 +391,7 @@ describe('runIdiomaticReviewIteration — failure paths', () => { }); env = await setupFlowTest(launcherFn); - await expect(runIdiomaticReviewIteration(env.flowCtx)).rejects.toThrow(/Phase 8.*failed/); + await expect(runIdiomaticReviewIteration(env.flowCtx)).rejects.toThrow(/Phase 7.*failed/); }); it('should throw when idiomatic-refactorer fails', async () => { @@ -410,7 +410,7 @@ describe('runIdiomaticReviewIteration — failure paths', () => { }); env = await setupFlowTest(launcherFn); - await expect(runIdiomaticReviewIteration(env.flowCtx)).rejects.toThrow(/Phase 8.*failed/); + await expect(runIdiomaticReviewIteration(env.flowCtx)).rejects.toThrow(/Phase 7.*failed/); }); it('should run format and lint commands when configured', async () => { diff --git a/tests/flow/steps/final-parity-and-finalization.test.ts b/tests/flow/steps/final-parity-and-finalization.test.ts index 97aad4e..4d7e247 100644 --- a/tests/flow/steps/final-parity-and-finalization.test.ts +++ b/tests/flow/steps/final-parity-and-finalization.test.ts @@ -1,8 +1,8 @@ /** - * Phase 6 — Final Parity Verification (step-level tests) - * Phase 7 — E2E Testing & Documentation (step-level tests) - * Phase 8 — Idiomatic Refactor (step-level tests) - * Phase 9 — Completion (step-level tests) + * Phase 5 — Final Parity Verification (step-level tests) + * Phase 6 — E2E Testing & Documentation (step-level tests) + * Phase 7 — Idiomatic Refactor (step-level tests) + * Phase 8 — Completion (step-level tests) */ import { describe, it, expect, afterEach, vi } from 'vitest'; import { join } from 'node:path'; @@ -28,7 +28,7 @@ afterEach(async () => { if (env) await env.cleanup(); }); -// ─── Phase 6 — Final Parity Verification ──────────────────────────────────── +// ─── Phase 5 — Final Parity Verification ──────────────────────────────────── describe('runFinalParityIteration', () => { it('should return fixes: 0 when parity checker finds no issues', async () => { @@ -67,7 +67,7 @@ describe('runFinalParityIteration', () => { expect(result.fixes).toBe(2); const codeMigratorInPhase6 = env.mockLauncher.invocations.filter( - i => i.agent === 'code-migrator' && i.phase === 6, + i => i.agent === 'code-migrator' && i.phase === 5, ); expect(codeMigratorInPhase6).toHaveLength(2); }); @@ -93,14 +93,14 @@ describe('runFinalParityIteration', () => { // Set cursor to indicate fix 0 was already done const state = env.checkpoint.getState(); state.phaseCursors ??= {}; - state.phaseCursors['6'] = { iteration: 0, fixIndex: 1, lastSuccessfulStep: 'fix-started' }; + state.phaseCursors['5'] = { iteration: 0, fixIndex: 1, lastSuccessfulStep: 'fix-started' }; await env.checkpoint.save(state); await runFinalParityIteration(env.flowCtx); // Should only apply fix at index 1 (index 0 was already done) const fixTaskIds = env.mockLauncher.invocations - .filter(i => i.agent === 'code-migrator' && i.phase === 6) + .filter(i => i.agent === 'code-migrator' && i.phase === 5) .map(i => i.workItemId); expect(fixTaskIds).toContain('fix-0-1'); expect(fixTaskIds).not.toContain('fix-0-0'); @@ -131,7 +131,7 @@ describe('noFixesNeeded', () => { }); }); -// ─── Phase 7 — E2E Testing & Documentation ────────────────────────────────── +// ─── Phase 6 — E2E Testing & Documentation ────────────────────────────────── describe('launchE2eTestCrafter', () => { it('should invoke e2e-test-crafter agent', async () => { @@ -150,7 +150,7 @@ describe('launchE2eTestCrafter', () => { const state = env.checkpoint.getState(); state.phaseCursors ??= {}; - state.phaseCursors['7'] = { completedAgents: ['e2e-test-crafter'], lastSuccessfulStep: 'completed-e2e-test-crafter' }; + state.phaseCursors['6'] = { completedAgents: ['e2e-test-crafter'], lastSuccessfulStep: 'completed-e2e-test-crafter' }; await env.checkpoint.save(state); await launchE2eTestCrafter(env.flowCtx); @@ -196,7 +196,7 @@ describe('launchE2eSuiteWriters', () => { const state = env.checkpoint.getState(); state.phaseCursors ??= {}; - state.phaseCursors['7'] = { completedAgents: ['e2e-test-crafter'], completedSuites: ['suite-001'], lastSuccessfulStep: 'completed-suite-suite-001' }; + state.phaseCursors['6'] = { completedAgents: ['e2e-test-crafter'], completedSuites: ['suite-001'], lastSuccessfulStep: 'completed-suite-suite-001' }; await env.checkpoint.save(state); await launchE2eSuiteWriters(env.flowCtx); @@ -237,7 +237,7 @@ describe('launchDocWriter', () => { const state = env.checkpoint.getState(); state.phaseCursors ??= {}; - state.phaseCursors['7'] = { completedAgents: ['documentation-writer'], completedSuites: [], lastSuccessfulStep: 'completed-documentation-writer' }; + state.phaseCursors['6'] = { completedAgents: ['documentation-writer'], completedSuites: [], lastSuccessfulStep: 'completed-documentation-writer' }; await env.checkpoint.save(state); await launchDocWriter(env.flowCtx); @@ -246,7 +246,7 @@ describe('launchDocWriter', () => { }); }); -// ─── Phase 8 — Idiomatic Refactor ─────────────────────────────────────────── +// ─── Phase 7 — Idiomatic Refactor ─────────────────────────────────────────── describe('runIdiomaticReviewIteration', () => { it('should return issues: 0 when no idiomatic issues found', async () => { @@ -307,7 +307,7 @@ describe('runIdiomaticReviewIteration', () => { const state = env.checkpoint.getState(); state.phaseCursors ??= {}; - state.phaseCursors['8'] = { iteration: 0, issueIndex: 1, currentFile: 'src/b.ts', lastSuccessfulStep: 'refactor-started' }; + state.phaseCursors['7'] = { iteration: 0, issueIndex: 1, currentFile: 'src/b.ts', lastSuccessfulStep: 'refactor-started' }; await env.checkpoint.save(state); await runIdiomaticReviewIteration(env.flowCtx); @@ -317,16 +317,16 @@ describe('runIdiomaticReviewIteration', () => { }); }); -// ─── Phase 9 — Completion ─────────────────────────────────────────────────── +// ─── Phase 8 — Completion ─────────────────────────────────────────────────── describe('finalizeAndReport', () => { - it('should return success with phase 9', async () => { + it('should return success with phase 8', async () => { const launcherFn = createMockLauncher(); env = await setupFlowTest(launcherFn); const result = await finalizeAndReport(env.flowCtx); - expect(result.phase).toBe(9); + expect(result.phase).toBe(8); expect(result.name).toBe('Completion'); expect(result.success).toBe(true); }); diff --git a/tests/flow/steps/kb-construction-and-planning.test.ts b/tests/flow/steps/kb-construction-and-planning.test.ts index 9658c3e..52eec7b 100644 --- a/tests/flow/steps/kb-construction-and-planning.test.ts +++ b/tests/flow/steps/kb-construction-and-planning.test.ts @@ -1,6 +1,6 @@ /** - * Phase 3 — Knowledge Base Construction (step-level tests) - * Phase 4 — Migration Strategy (step-level tests) + * Phase 2 — Knowledge Base Construction (step-level tests) + * Phase 3 — Migration Strategy (step-level tests) * * Tests the individual step functions that replaced the orchestrator's * executePhase3() and executePhase4() methods. @@ -23,7 +23,7 @@ afterEach(async () => { if (env) await env.cleanup(); }); -// ─── Phase 3 — Knowledge Base Construction ────────────────────────────────── +// ─── Phase 2 — Knowledge Base Construction ────────────────────────────────── describe('launchKnowledgeBuilder', () => { it('should return success when knowledge-builder agent succeeds', async () => { @@ -32,7 +32,7 @@ describe('launchKnowledgeBuilder', () => { const result = await launchKnowledgeBuilder(env.flowCtx); - expect(result.phase).toBe(3); + expect(result.phase).toBe(2); expect(result.name).toBe('Knowledge Base Construction'); expect(result.success).toBe(true); }); @@ -41,21 +41,21 @@ describe('launchKnowledgeBuilder', () => { const launcherFn = createFailingLauncher(['knowledge-builder']); env = await setupFlowTest(launcherFn); - await expect(launchKnowledgeBuilder(env.flowCtx)).rejects.toThrow(/Phase 3.*failed/); + await expect(launchKnowledgeBuilder(env.flowCtx)).rejects.toThrow(/Phase 2.*failed/); }); - it('should skip when phase 3 is already completed (checkpoint resume)', async () => { + it('should skip when phase 2 is already completed (checkpoint resume)', async () => { const launcherFn = createMockLauncher(); env = await setupFlowTest(launcherFn); - // Mark phase 3 as complete in checkpoint + // Mark phase 2 as complete in checkpoint const state = env.checkpoint.getState(); - state.completedPhases.push(3); + state.completedPhases.push(2); await env.checkpoint.save(state); const result = await launchKnowledgeBuilder(env.flowCtx); - expect(result.phase).toBe(3); + expect(result.phase).toBe(2); expect(result.success).toBe(true); // Agent should not have been invoked (skipped on resume) expect(env.mockLauncher.invocations).toHaveLength(0); @@ -74,7 +74,7 @@ describe('launchKnowledgeBuilder', () => { }); }); -// ─── Phase 4 — Migration Strategy ─────────────────────────────────────────── +// ─── Phase 3 — Migration Strategy ─────────────────────────────────────────── describe('launchMigrationPlanner', () => { it('should return success when migration-planner agent succeeds', async () => { @@ -83,7 +83,7 @@ describe('launchMigrationPlanner', () => { const result = await launchMigrationPlanner(env.flowCtx); - expect(result.phase).toBe(4); + expect(result.phase).toBe(3); expect(result.name).toBe('Migration Strategy'); expect(result.success).toBe(true); }); @@ -92,7 +92,7 @@ describe('launchMigrationPlanner', () => { const launcherFn = createFailingLauncher(['migration-planner']); env = await setupFlowTest(launcherFn); - await expect(launchMigrationPlanner(env.flowCtx)).rejects.toThrow(/Phase 4.*failed/); + await expect(launchMigrationPlanner(env.flowCtx)).rejects.toThrow(/Phase 3.*failed/); }); it('should skip phase 4a when already completed in checkpoint', async () => { diff --git a/tests/flow/steps/migration-waves.test.ts b/tests/flow/steps/migration-waves.test.ts index 2633a26..e7fc9f7 100644 --- a/tests/flow/steps/migration-waves.test.ts +++ b/tests/flow/steps/migration-waves.test.ts @@ -1,5 +1,5 @@ /** - * Phase 5 — Iterative Migration: Wave-Barrier Mode (step-level tests) + * Phase 4 — Iterative Migration: Wave-Barrier Mode (step-level tests) * * Tests the wave-barrier execution path in executeIterativeMigration(), * covering: basic wave execution, convergence retry, terminal exhaustion @@ -48,7 +48,7 @@ describe('executeIterativeMigration — wave-barrier mode', () => { try { const result = await executeIterativeMigration(env.flowCtx); - expect(result.phase).toBe(5); + expect(result.phase).toBe(4); expect(result.success).toBe(true); // All tasks should be processed const codeMigratorInvs = env.mockLauncher.invocations.filter(i => i.agent === 'code-migrator'); @@ -208,16 +208,16 @@ describe('executeIterativeMigration — wave-barrier mode', () => { }, }); - // Pre-populate the Phase 5 nested flow checkpoint with task-001 completed. + // Pre-populate the Phase 4 nested flow checkpoint with task-001 completed. // The framework skips nodes whose execution ID is already completed. const state = env.checkpoint.getState(); - state.__phase5FlowCheckpoint = { - flowId: 'phase-5-wave-barrier', + state.__phase4FlowCheckpoint = { + flowId: 'phase-4-wave-barrier', status: 'completed', startedAt: new Date().toISOString(), updatedAt: new Date().toISOString(), completedExecutionIds: [ - 'phase-5-wave-barrier/wave-0-tasks/task-001/task-001/migrate', + 'phase-4-wave-barrier/wave-0-tasks/task-001/task-001/migrate', ], outputs: {}, executionOutputs: {}, diff --git a/tests/flow/steps/migration.test.ts b/tests/flow/steps/migration.test.ts index 97be3f5..af00a38 100644 --- a/tests/flow/steps/migration.test.ts +++ b/tests/flow/steps/migration.test.ts @@ -1,5 +1,5 @@ /** - * Phase 5 — Iterative Migration (step-level tests) + * Phase 4 — Iterative Migration (step-level tests) * * Tests the executeIterativeMigration() step function which is the heart * of the migration pipeline, covering per-task mode, budget projection, @@ -36,7 +36,7 @@ describe('executeIterativeMigration (Phase 5)', () => { const result = await executeIterativeMigration(env.flowCtx); - expect(result.phase).toBe(5); + expect(result.phase).toBe(4); expect(result.success).toBe(true); const codeMigratorInvocations = env.mockLauncher.invocations.filter(i => i.agent === 'code-migrator'); expect(codeMigratorInvocations.length).toBeGreaterThanOrEqual(2); @@ -48,7 +48,7 @@ describe('executeIterativeMigration (Phase 5)', () => { const result = await executeIterativeMigration(env.flowCtx); - expect(result.phase).toBe(5); + expect(result.phase).toBe(4); expect(result.success).toBe(true); }); @@ -60,7 +60,7 @@ describe('executeIterativeMigration (Phase 5)', () => { await executeIterativeMigration(env.flowCtx); const projectionLog = infoSpy.mock.calls.find( - (c) => typeof c[0] === 'string' && c[0].includes('Phase 5:'), + (c) => typeof c[0] === 'string' && c[0].includes('Phase 4:'), ); expect(projectionLog).toBeDefined(); }); @@ -414,15 +414,15 @@ describe('executeIterativeMigration (Phase 5)', () => { // Pre-populate the Phase 5 nested flow checkpoint with completed execution IDs. // The framework skips nodes whose execution ID is already in completedExecutionIds. const state = env.checkpoint.getState(); - state.__phase5FlowCheckpoint = { - flowId: 'phase-5-per-task', + state.__phase4FlowCheckpoint = { + flowId: 'phase-4-per-task', status: 'completed', startedAt: new Date().toISOString(), updatedAt: new Date().toISOString(), completedExecutionIds: [ - 'phase-5-per-task/task-001/migrate', - 'phase-5-per-task/task-001/commit', - 'phase-5-per-task/task-001/parity', + 'phase-4-per-task/task-001/migrate', + 'phase-4-per-task/task-001/commit', + 'phase-4-per-task/task-001/parity', ], outputs: {}, executionOutputs: {}, diff --git a/tests/flow/steps/shared-extended.test.ts b/tests/flow/steps/shared-extended.test.ts index 8c7c6b6..d37198b 100644 --- a/tests/flow/steps/shared-extended.test.ts +++ b/tests/flow/steps/shared-extended.test.ts @@ -21,15 +21,15 @@ import { commitForTask, commitForWave, ensureGitRepositoryReady, - getPhase5TaskState, - hasPhase5Substep, - markPhase5Substep, + getPhase4TaskState, + hasPhase4Substep, + markPhase4Substep, + getPhase5Cursor, + savePhase5Cursor, getPhase6Cursor, savePhase6Cursor, getPhase7Cursor, savePhase7Cursor, - getPhase8Cursor, - savePhase8Cursor, launchAgentWithEvents, recordTokens, checkBudget, @@ -145,11 +145,11 @@ describe('runCommand', () => { } }); - it('should track command counters in phase5Snapshot', async () => { + it('should track command counters in phase4Snapshot', async () => { const launcherFn = createMockLauncher(); env = await setupFlowTest(launcherFn); - env.ctx.phase5Snapshot = { + env.ctx.phase4Snapshot = { executionMode: 'per-task', phase4DurationMs: 0, completedTaskCount: 0, waveCount: 0, waveValidationRuns: 0, waveConvergenceIterations: 0, @@ -167,16 +167,16 @@ describe('runCommand', () => { try { await runCommand(env.ctx, 'build', 'npm run build', 'task-001'); - expect(env.ctx.phase5Snapshot.buildCommandRuns).toBe(1); + expect(env.ctx.phase4Snapshot.buildCommandRuns).toBe(1); await runCommand(env.ctx, 'test', 'npm test', 'task-001'); - expect(env.ctx.phase5Snapshot.testCommandRuns).toBe(1); + expect(env.ctx.phase4Snapshot.testCommandRuns).toBe(1); await runCommand(env.ctx, 'format', 'prettier', 'task-001'); - expect(env.ctx.phase5Snapshot.formatCommandRuns).toBe(1); + expect(env.ctx.phase4Snapshot.formatCommandRuns).toBe(1); await runCommand(env.ctx, 'lint', 'eslint', 'task-001'); - expect(env.ctx.phase5Snapshot.lintCommandRuns).toBe(1); + expect(env.ctx.phase4Snapshot.lintCommandRuns).toBe(1); } finally { spawnSpy.mockRestore(); } @@ -411,7 +411,7 @@ describe('launchAgentWithEvents', () => { agent: 'code-migrator', contextPath: '/tmp/ctx.json', outputPath: '', - phase: 5, + phase: 4, workItemId: 'task-001', timeout: 300_000, }); @@ -434,7 +434,7 @@ describe('launchAgentWithEvents', () => { agent: 'code-migrator', contextPath: '/tmp/ctx.json', outputPath: '', - phase: 5, + phase: 4, workItemId: '', timeout: 300_000, }); @@ -443,14 +443,14 @@ describe('launchAgentWithEvents', () => { }); }); -// ─── Phase 5 Checkpoint Cursors ────────────────────────────────────────────── +// ─── Phase 4 Checkpoint Cursors ────────────────────────────────────────────── -describe('Phase 5 checkpoint cursors', () => { +describe('Phase 4 checkpoint cursors', () => { it('should initialize empty substeps for new task', async () => { const launcherFn = createMockLauncher(); env = await setupFlowTest(launcherFn); - const state = getPhase5TaskState(env.ctx, 'task-001'); + const state = getPhase4TaskState(env.ctx, 'task-001'); expect(state.completedSubsteps).toEqual([]); }); @@ -458,87 +458,87 @@ describe('Phase 5 checkpoint cursors', () => { const launcherFn = createMockLauncher(); env = await setupFlowTest(launcherFn); - expect(hasPhase5Substep(env.ctx, 'task-001', 'migrator')).toBe(false); - await markPhase5Substep(env.ctx, 'task-001', 'migrator'); - expect(hasPhase5Substep(env.ctx, 'task-001', 'migrator')).toBe(true); + expect(hasPhase4Substep(env.ctx, 'task-001', 'migrator')).toBe(false); + await markPhase4Substep(env.ctx, 'task-001', 'migrator'); + expect(hasPhase4Substep(env.ctx, 'task-001', 'migrator')).toBe(true); }); it('should not duplicate substeps on repeated markings', async () => { const launcherFn = createMockLauncher(); env = await setupFlowTest(launcherFn); - await markPhase5Substep(env.ctx, 'task-001', 'migrator'); - await markPhase5Substep(env.ctx, 'task-001', 'migrator'); - const state = getPhase5TaskState(env.ctx, 'task-001'); + await markPhase4Substep(env.ctx, 'task-001', 'migrator'); + await markPhase4Substep(env.ctx, 'task-001', 'migrator'); + const state = getPhase4TaskState(env.ctx, 'task-001'); expect(state.completedSubsteps.filter(s => s === 'migrator')).toHaveLength(1); }); }); -// ─── Phase 6/7/8 Cursor Helpers ────────────────────────────────────────────── +// ─── Phase 5/6/7 Cursor Helpers ────────────────────────────────────────────── describe('Phase cursor helpers', () => { - it('should initialize Phase 6 cursor with defaults', async () => { + it('should initialize Phase 5 cursor with defaults', async () => { const launcherFn = createMockLauncher(); env = await setupFlowTest(launcherFn); - const cursor = getPhase6Cursor(env.ctx); + const cursor = getPhase5Cursor(env.ctx); expect(cursor.iteration).toBe(0); expect(cursor.fixIndex).toBe(0); }); - it('should save and retrieve Phase 6 cursor', async () => { + it('should save and retrieve Phase 5 cursor', async () => { const launcherFn = createMockLauncher(); env = await setupFlowTest(launcherFn); - await savePhase6Cursor(env.ctx, { iteration: 2, fixIndex: 3, lastSuccessfulStep: 'fix-applied' }); - const cursor = getPhase6Cursor(env.ctx); + await savePhase5Cursor(env.ctx, { iteration: 2, fixIndex: 3, lastSuccessfulStep: 'fix-applied' }); + const cursor = getPhase5Cursor(env.ctx); expect(cursor.iteration).toBe(2); expect(cursor.fixIndex).toBe(3); expect(cursor.lastSuccessfulStep).toBe('fix-applied'); }); - it('should initialize Phase 7 cursor with defaults', async () => { + it('should initialize Phase 6 cursor with defaults', async () => { const launcherFn = createMockLauncher(); env = await setupFlowTest(launcherFn); - const cursor = getPhase7Cursor(env.ctx); + const cursor = getPhase6Cursor(env.ctx); expect(cursor.completedAgents).toEqual([]); expect(cursor.completedSuites).toEqual([]); }); - it('should save and retrieve Phase 7 cursor', async () => { + it('should save and retrieve Phase 6 cursor', async () => { const launcherFn = createMockLauncher(); env = await setupFlowTest(launcherFn); - await savePhase7Cursor(env.ctx, { + await savePhase6Cursor(env.ctx, { completedAgents: ['e2e-test-crafter'], completedSuites: ['suite-001'], lastSuccessfulStep: 'completed-suite-suite-001', }); - const cursor = getPhase7Cursor(env.ctx); + const cursor = getPhase6Cursor(env.ctx); expect(cursor.completedAgents).toContain('e2e-test-crafter'); expect(cursor.completedSuites).toContain('suite-001'); }); - it('should initialize Phase 8 cursor with defaults', async () => { + it('should initialize Phase 7 cursor with defaults', async () => { const launcherFn = createMockLauncher(); env = await setupFlowTest(launcherFn); - const cursor = getPhase8Cursor(env.ctx); + const cursor = getPhase7Cursor(env.ctx); expect(cursor.iteration).toBe(0); expect(cursor.issueIndex).toBe(0); }); - it('should save and retrieve Phase 8 cursor', async () => { + it('should save and retrieve Phase 7 cursor', async () => { const launcherFn = createMockLauncher(); env = await setupFlowTest(launcherFn); - await savePhase8Cursor(env.ctx, { + await savePhase7Cursor(env.ctx, { iteration: 1, issueIndex: 2, currentFile: 'src/main.ts', lastSuccessfulStep: 'refactor-started', }); - const cursor = getPhase8Cursor(env.ctx); + const cursor = getPhase7Cursor(env.ctx); expect(cursor.iteration).toBe(1); expect(cursor.issueIndex).toBe(2); expect(cursor.currentFile).toBe('src/main.ts'); diff --git a/tests/flow/steps/shared.test.ts b/tests/flow/steps/shared.test.ts index c5b94c6..ef807b4 100644 --- a/tests/flow/steps/shared.test.ts +++ b/tests/flow/steps/shared.test.ts @@ -161,7 +161,7 @@ describe('assertPhaseSuccess', () => { }); it('should throw MigrationError for failed result', () => { - const result = { phase: 3, name: 'KB Construction', success: false, duration: 100, error: 'build failed' }; + const result = { phase: 2, name: 'KB Construction', success: false, duration: 100, error: 'build failed' }; expect(() => assertPhaseSuccess(result)).toThrow(MigrationError); }); }); @@ -170,12 +170,12 @@ describe('assertPhaseSuccess', () => { describe('MigrationError', () => { it('should construct with phase, name, and result', () => { - const result = { phase: 3, name: 'KB Construction', success: false, duration: 100, error: 'something' }; - const err = new MigrationError(3, 'KB Construction', result); - expect(err.phaseId).toBe(3); + const result = { phase: 2, name: 'KB Construction', success: false, duration: 100, error: 'something' }; + const err = new MigrationError(2, 'KB Construction', result); + expect(err.phaseId).toBe(2); expect(err.phaseName).toBe('KB Construction'); expect(err.result).toBe(result); - expect(err.message).toContain('Phase 3'); + expect(err.message).toContain('Phase 2'); expect(err.message).toContain('something'); expect(err.name).toBe('MigrationError'); }); @@ -578,10 +578,10 @@ describe('resolverReducedScope', () => { describe('buildInvocation', () => { it('should construct a basic invocation with agent and phase', () => { const ctx = mockContext(); - const inv = buildInvocation(ctx, 'knowledge-builder', { contextPath: '/tmp/ctx.json', outputPath: '/tmp/out' }, 3); + const inv = buildInvocation(ctx, 'knowledge-builder', { contextPath: '/tmp/ctx.json', outputPath: '/tmp/out' }, 2); expect(inv.agent).toBe('knowledge-builder'); expect(inv.contextPath).toBe('/tmp/ctx.json'); - expect(inv.phase).toBe(3); + expect(inv.phase).toBe(2); expect(inv.timeout).toBe(300_000); }); diff --git a/tests/helpers/flow-mocks.ts b/tests/helpers/flow-mocks.ts index 5b5f195..e045818 100644 --- a/tests/helpers/flow-mocks.ts +++ b/tests/helpers/flow-mocks.ts @@ -252,7 +252,7 @@ export async function setupFlowTest( parityResults: new Map(), routedTaskIds: new Set(), escalationCostUsd: 0, - phase5Snapshot: undefined, + phase4Snapshot: undefined, deferGitCommits: false, }; @@ -273,7 +273,7 @@ export async function setupFlowTest( } /** - * Convenience: set up a flow test env with tasks pre-loaded for Phase 5+. + * Convenience: set up a flow test env with tasks pre-loaded for Phase 4+. */ export async function setupFlowTestWithTasks( launcherFn: (inv: AgentInvocation) => Promise, diff --git a/tests/helpers/mocks.test.ts b/tests/helpers/mocks.test.ts index a180ca8..77b59c5 100644 --- a/tests/helpers/mocks.test.ts +++ b/tests/helpers/mocks.test.ts @@ -13,7 +13,7 @@ function makeInvocation(overrides?: Partial): AgentInvocation { agent: 'code-migrator', contextPath: '/tmp/context.json', outputPath: '/tmp/output', - phase: 5, + phase: 4, workItemId: 'task-001', ...overrides, }; diff --git a/tests/observability/metrics-collector.test.ts b/tests/observability/metrics-collector.test.ts index 28076d1..10ffff7 100644 --- a/tests/observability/metrics-collector.test.ts +++ b/tests/observability/metrics-collector.test.ts @@ -229,13 +229,12 @@ describe('MetricsCollector', () => { it('should count retries correctly', () => { collector.record(makeMetric({ wasRetry: false })); collector.record(makeMetric({ wasRetry: true, agentType: 'code-migrator', phase: 4 })); - collector.record(makeMetric({ wasRetry: true, agentType: 'test-writer', phase: 5 })); + collector.record(makeMetric({ wasRetry: true, agentType: 'test-writer', phase: 4 })); const agg = collector.getAggregates(); expect(agg.totalRetries).toBe(2); expect(agg.retriesByAgent['code-migrator']).toBe(1); expect(agg.retriesByAgent['test-writer']).toBe(1); - expect(agg.retriesByPhase[4]).toBe(1); - expect(agg.retriesByPhase[5]).toBe(1); + expect(agg.retriesByPhase[4]).toBe(2); }); it('should sum tokens and cost by agent', () => {