From 3d050b706f2e8d75e28e149b1492ad50ebbaad47 Mon Sep 17 00:00:00 2001 From: Shahzaib Date: Mon, 16 Mar 2026 16:32:34 -0700 Subject: [PATCH 1/4] Feature Orchestrator Plugin: Core Structure (1/5) Add the portable agent plugin scaffold: - plugin.json with metadata, skill/agent/command registrations - .mcp.json with ADO MCP server configuration - hooks/state-utils.js for feature state management - hooks/hooks.json (empty, placeholder) - schemas/orchestrator-config.schema.json for config validation - README.md with plugin overview and installation instructions --- .../.github/plugin/plugin.json | 36 +++ feature-orchestrator-plugin/.mcp.json | 27 +++ feature-orchestrator-plugin/README.md | 166 ++++++++++++++ feature-orchestrator-plugin/hooks/hooks.json | 3 + .../hooks/state-utils.js | 210 ++++++++++++++++++ .../schemas/orchestrator-config.schema.json | 69 ++++++ 6 files changed, 511 insertions(+) create mode 100644 feature-orchestrator-plugin/.github/plugin/plugin.json create mode 100644 feature-orchestrator-plugin/.mcp.json create mode 100644 feature-orchestrator-plugin/README.md create mode 100644 feature-orchestrator-plugin/hooks/hooks.json create mode 100644 feature-orchestrator-plugin/hooks/state-utils.js create mode 100644 feature-orchestrator-plugin/schemas/orchestrator-config.schema.json diff --git a/feature-orchestrator-plugin/.github/plugin/plugin.json b/feature-orchestrator-plugin/.github/plugin/plugin.json new file mode 100644 index 00000000..5de3f5e7 --- /dev/null +++ b/feature-orchestrator-plugin/.github/plugin/plugin.json @@ -0,0 +1,36 @@ +{ + "name": "feature-orchestrator", + "description": "AI-driven feature development pipeline: Design → Plan → Backlog → Dispatch → Monitor. Orchestrates codebase research, design specs, PBI creation in ADO, dispatch to Copilot coding agent, and PR monitoring.", + "version": "1.0.0", + "author": { + "name": "Android Auth Team" + }, + "license": "MIT", + "keywords": [ + "feature-development", + "ai-orchestration", + "design-spec", + "pbi", + "copilot-agent", + "devops" + ], + "agents": [ + "../../agents" + ], + "commands": [ + "../../commands" + ], + "hooks": "../../hooks/hooks.json", + "mcpServers": "../../.mcp.json", + "skills": [ + "../../skills/codebase-researcher", + "../../skills/design-author", + "../../skills/design-reviewer", + "../../skills/feature-planner", + "../../skills/pbi-creator", + "../../skills/pbi-dispatcher-github", + "../../skills/pbi-dispatcher-ado", + "../../skills/pbi-dispatcher-ado-swe", + "../../skills/pr-validator" + ] +} diff --git a/feature-orchestrator-plugin/.mcp.json b/feature-orchestrator-plugin/.mcp.json new file mode 100644 index 00000000..d83a9308 --- /dev/null +++ b/feature-orchestrator-plugin/.mcp.json @@ -0,0 +1,27 @@ +{ + "inputs": [ + { + "id": "ado_org", + "type": "promptString", + "description": "Azure DevOps organization name ONLY — do NOT enter a URL (e.g., 'IdentityDivision' not 'https://dev.azure.com/IdentityDivision')", + "default": "YOUR_ORG" + } + ], + "servers": { + "ado": { + "type": "stdio", + "command": "npx", + "args": [ + "-y", + "@azure-devops/mcp", + "${input:ado_org}", + "-d", + "core", + "work", + "work-items", + "repositories", + "pipelines" + ] + } + } +} diff --git a/feature-orchestrator-plugin/README.md b/feature-orchestrator-plugin/README.md new file mode 100644 index 00000000..d72dde02 --- /dev/null +++ b/feature-orchestrator-plugin/README.md @@ -0,0 +1,166 @@ +# Feature Orchestrator Plugin + +AI-driven feature development pipeline for GitHub Copilot. Automates the full lifecycle: + +**Design → Plan → Backlog → Dispatch → Monitor** + +1. **Design** — Research the codebase, write a design spec with solution options +2. **Plan** — Decompose the approved design into right-sized, repo-targeted work items +3. **Backlog** — Create work items in Azure DevOps with proper dependencies +4. **Dispatch** — Send work items to GitHub Copilot coding agent for implementation +5. **Monitor** — Track agent PRs and iterate on feedback + +## Installation + +### From VS Code + +1. Open the Extensions sidebar (`Ctrl+Shift+X`) +2. Search for `@agentPlugins` and browse available plugins +3. Install **feature-orchestrator** + +### Local Installation (for development) + +```jsonc +// In your VS Code settings.json: +"chat.plugins.paths": { + "/path/to/feature-orchestrator-plugin": true +} +``` + +## Setup + +After installing, configure the plugin for your project: + +1. Open a chat and run: `/feature-orchestrator-plugin:setup` +2. The setup wizard guides you through: + - **Project info** — name and description + - **Repository mapping** — which modules map to which GitHub repos + - **Azure DevOps** — organization, project, work item type + - **Design docs** — where to store design specs + - **Prerequisites** — checks for `gh` CLI, `node`, authentication + +This creates `.github/orchestrator-config.json` in your workspace. Commit it to share with your team. + +## Quick Start + +After setup, describe a feature to start the pipeline: + +``` +/feature-orchestrator-plugin:feature-design I want to add retry logic with exponential backoff to the API client +``` + +Or use the agent directly: +``` +@feature-orchestrator-plugin:feature-orchestrator.agent Add push notification support for auth state changes +``` + +## Commands + +| Command | Description | +|---------|-------------| +| `/setup` | Configure the plugin for this project | +| `/feature-design` | Start a new feature — research + design spec | +| `/feature-plan` | Decompose approved design into work items | +| `/feature-backlog` | Create work items in Azure DevOps | +| `/feature-dispatch` | Send work items to Copilot coding agent | +| `/feature-status` | Check agent PR status | +| `/feature-continue` | Resume a feature from its current step | +| `/feature-pr-iterate` | Review and iterate on agent PRs | + +(All commands are prefixed with `feature-orchestrator-plugin:` in the UI) + +## Skills + +| Skill | Description | +|-------|-------------| +| `codebase-researcher` | Systematic codebase exploration with evidence-based findings | +| `design-author` | Write detailed design specs with solution options and trade-offs | +| `design-reviewer` | Address inline review comments on design specs | +| `feature-planner` | Decompose features into self-contained, right-sized work items | +| `pbi-creator` | Create and link work items in Azure DevOps | +| `pbi-dispatcher` | Dispatch work items to Copilot coding agent | + +## Configuration + +The plugin uses `.github/orchestrator-config.json` for project-specific settings: + +```jsonc +{ + "project": { + "name": "My Project", + "description": "Brief description" + }, + "repositories": { + "core-repo": { + "slug": "my-org/core-repo", + "host": "github", + "baseBranch": "main" + }, + "api-repo": { + "slug": "my-org/api-repo", + "host": "github", + "baseBranch": "dev", + "accountType": "emu" + } + }, + "modules": { + "core": { "repo": "core-repo", "path": "core/", "purpose": "Shared utilities and data models" }, + "api": { "repo": "core-repo", "path": "api/", "purpose": "Public API surface" }, + "service": { "repo": "api-repo", "purpose": "Backend service" } + }, + "ado": { + "org": "my-org", + "project": "Engineering", + "workItemType": "Product Backlog Item", + "iterationDepth": 6 + }, + "design": { + "docsPath": "docs/designs/", + "templatePath": null + } +} +``` + +### Per-Developer Config + +GitHub account mappings are stored per-developer (gitignored): + +`.github/developer-local.json`: +```json +{ + "github_accounts": { + "public": "your-github-username", + "emu": "your-emu-username" + } +} +``` + +## Prerequisites + +- **VS Code** 1.109+ with GitHub Copilot +- **GitHub CLI** (`gh`) — for dispatching and PR monitoring +- **Node.js** — for state management (`state-utils.js` installed to `~/.feature-orchestrator/`) +- **Azure DevOps MCP Server** — for work item management (optional but recommended) + +## Architecture + +``` +Plugin +├── agents/ # Orchestrator agent (conductor) +├── commands/ # Slash commands with agent routing +├── skills/ # Specialized skills for each phase +│ ├── codebase-researcher/ +│ ├── design-author/ +│ ├── design-reviewer/ +│ ├── feature-planner/ +│ │ └── references/pbi-template.md +│ ├── pbi-creator/ +│ └── pbi-dispatcher/ +├── hooks/ # State management CLI +├── schemas/ # Config JSON schema +└── .mcp.json # MCP server configuration +``` + +## License + +MIT diff --git a/feature-orchestrator-plugin/hooks/hooks.json b/feature-orchestrator-plugin/hooks/hooks.json new file mode 100644 index 00000000..deffac97 --- /dev/null +++ b/feature-orchestrator-plugin/hooks/hooks.json @@ -0,0 +1,3 @@ +{ + "hooks": {} +} diff --git a/feature-orchestrator-plugin/hooks/state-utils.js b/feature-orchestrator-plugin/hooks/state-utils.js new file mode 100644 index 00000000..007d748c --- /dev/null +++ b/feature-orchestrator-plugin/hooks/state-utils.js @@ -0,0 +1,210 @@ +#!/usr/bin/env node +/** + * Feature Orchestrator — State Management CLI + * + * Manages feature pipeline state for the orchestrator dashboard. + * State is stored at ~/.feature-orchestrator/state.json (fixed location). + * This script is installed to ~/.feature-orchestrator/state-utils.js during setup. + * + * Usage: + * node ~/.feature-orchestrator/state-utils.js add-feature '{"name": "...", "step": "designing"}' + * node ~/.feature-orchestrator/state-utils.js set-step "" + * node ~/.feature-orchestrator/state-utils.js set-design "" '{"docPath":"...","status":"approved"}' + * node ~/.feature-orchestrator/state-utils.js add-pbi "" '{"adoId":123,"title":"...","module":"...","status":"Committed"}' + * node ~/.feature-orchestrator/state-utils.js add-agent-pr "" '{"repo":"...","prNumber":1,"prUrl":"...","status":"open"}' + * node ~/.feature-orchestrator/state-utils.js list-features + * node ~/.feature-orchestrator/state-utils.js get-feature "" + * node ~/.feature-orchestrator/state-utils.js get + */ + +const fs = require('fs'); +const path = require('path'); +const os = require('os'); + +// Fixed state directory — always ~/.feature-orchestrator/ +const STATE_DIR = path.join(os.homedir(), '.feature-orchestrator'); +const STATE_FILE = path.join(STATE_DIR, 'state.json'); + +function ensureDir() { + if (!fs.existsSync(STATE_DIR)) fs.mkdirSync(STATE_DIR, { recursive: true }); +} + +function readState() { + if (!fs.existsSync(STATE_FILE)) return { version: 1, features: [], lastUpdated: 0 }; + try { return JSON.parse(fs.readFileSync(STATE_FILE, 'utf-8')); } + catch { return { version: 1, features: [], lastUpdated: 0 }; } +} + +function writeState(state) { + ensureDir(); + state.lastUpdated = Date.now(); + fs.writeFileSync(STATE_FILE, JSON.stringify(state, null, 2), 'utf-8'); +} + +function findFeature(state, identifier) { + if (!identifier) return null; + const byId = state.features.find(f => f.id === identifier); + if (byId) return byId; + const lower = identifier.toLowerCase(); + return state.features.find(f => f.name && f.name.toLowerCase() === lower) + || state.features.find(f => f.name && f.name.toLowerCase().includes(lower)) + || null; +} + +function checkAutoCompletion(feature) { + if (!feature.artifacts) return; + const pbis = feature.artifacts.pbis || []; + const prs = feature.artifacts.agentPrs || []; + if (pbis.length === 0) return; + const allPbisResolved = pbis.every(p => + ['Resolved', 'Done', 'Closed', 'Removed'].includes(p.status)); + const allPrsClosed = prs.length > 0 && prs.every(p => + ['merged', 'closed'].includes(p.status)); + if (allPbisResolved && allPrsClosed && feature.step !== 'completed') { + feature.step = 'completed'; + feature.completedAt = Date.now(); + if (!feature.phaseTimestamps) feature.phaseTimestamps = {}; + feature.phaseTimestamps.completed = Date.now(); + } +} + +const [,, command, ...args] = process.argv; + +switch (command) { + case 'get': { + const state = readState(); + console.log(JSON.stringify(state, null, 2)); + break; + } + case 'list-features': { + const state = readState(); + console.log(JSON.stringify(state.features.map(f => ({ + name: f.name, step: f.step, id: f.id, + updatedAt: new Date(f.updatedAt).toISOString() + })), null, 2)); + break; + } + case 'get-feature': { + const state = readState(); + const feature = findFeature(state, args[0]); + console.log(JSON.stringify(feature || null, null, 2)); + break; + } + case 'add-feature': { + const state = readState(); + const feature = JSON.parse(args[0]); + if (!feature.id) { + feature.id = 'feature-' + Date.now() + '-' + Math.random().toString(36).slice(2, 6); + } + const idx = state.features.findIndex(f => + f.name && feature.name && f.name.toLowerCase() === feature.name.toLowerCase()); + if (idx >= 0) { + state.features[idx] = { ...state.features[idx], ...feature, updatedAt: Date.now() }; + } else { + state.features.push({ + ...feature, + startedAt: Date.now(), + updatedAt: Date.now(), + artifacts: { designSpec: null, pbis: [], agentPrs: [] }, + phaseTimestamps: { [feature.step || 'designing']: Date.now() } + }); + } + writeState(state); + console.log(JSON.stringify({ ok: true, id: feature.id })); + break; + } + case 'set-step': { + const state = readState(); + const feature = findFeature(state, args[0]); + if (feature) { + feature.step = args[1]; + feature.updatedAt = Date.now(); + if (!feature.phaseTimestamps) feature.phaseTimestamps = {}; + feature.phaseTimestamps[args[1]] = Date.now(); + writeState(state); + console.log(JSON.stringify({ ok: true })); + } else { + console.log(JSON.stringify({ ok: false, error: 'Feature not found: ' + args[0] })); + } + break; + } + case 'set-design': { + const state = readState(); + const feature = findFeature(state, args[0]); + if (feature) { + const design = JSON.parse(args[1]); + if (!feature.artifacts) feature.artifacts = { designSpec: null, pbis: [], agentPrs: [] }; + feature.artifacts.designSpec = { ...feature.artifacts.designSpec, ...design }; + feature.updatedAt = Date.now(); + writeState(state); + console.log(JSON.stringify({ ok: true })); + } else { + console.log(JSON.stringify({ ok: false, error: 'Feature not found' })); + } + break; + } + case 'add-pbi': { + const state = readState(); + const feature = findFeature(state, args[0]); + if (feature) { + const pbi = JSON.parse(args[1]); + if (!feature.artifacts) feature.artifacts = { designSpec: null, pbis: [], agentPrs: [] }; + if (!feature.artifacts.pbis) feature.artifacts.pbis = []; + const existingIdx = feature.artifacts.pbis.findIndex(p => p.adoId === pbi.adoId); + if (existingIdx >= 0) { + feature.artifacts.pbis[existingIdx] = { ...feature.artifacts.pbis[existingIdx], ...pbi }; + } else { + feature.artifacts.pbis.push(pbi); + } + feature.updatedAt = Date.now(); + checkAutoCompletion(feature); + writeState(state); + console.log(JSON.stringify({ ok: true })); + } else { + console.log(JSON.stringify({ ok: false, error: 'Feature not found' })); + } + break; + } + case 'add-agent-pr': { + const state = readState(); + const feature = findFeature(state, args[0]); + if (feature) { + const pr = JSON.parse(args[1]); + if (!feature.artifacts) feature.artifacts = { designSpec: null, pbis: [], agentPrs: [] }; + if (!feature.artifacts.agentPrs) feature.artifacts.agentPrs = []; + const existingIdx = feature.artifacts.agentPrs.findIndex(p => + p.prNumber === pr.prNumber && p.repo === pr.repo); + if (existingIdx >= 0) { + feature.artifacts.agentPrs[existingIdx] = { ...feature.artifacts.agentPrs[existingIdx], ...pr }; + } else { + feature.artifacts.agentPrs.push(pr); + } + feature.updatedAt = Date.now(); + checkAutoCompletion(feature); + writeState(state); + console.log(JSON.stringify({ ok: true })); + } else { + console.log(JSON.stringify({ ok: false, error: 'Feature not found' })); + } + break; + } + case 'set-agent-info': { + const state = readState(); + const feature = findFeature(state, args[0]); + if (feature) { + const info = JSON.parse(args[1]); + feature.agentInfo = { ...feature.agentInfo, ...info }; + feature.updatedAt = Date.now(); + writeState(state); + console.log(JSON.stringify({ ok: true })); + } else { + console.log(JSON.stringify({ ok: false, error: 'Feature not found' })); + } + break; + } + default: + console.error('Feature Orchestrator State CLI'); + console.error('Commands: get, list-features, get-feature, add-feature, set-step,'); + console.error(' set-design, add-pbi, add-agent-pr, set-agent-info'); + process.exit(1); +} diff --git a/feature-orchestrator-plugin/schemas/orchestrator-config.schema.json b/feature-orchestrator-plugin/schemas/orchestrator-config.schema.json new file mode 100644 index 00000000..a31255e3 --- /dev/null +++ b/feature-orchestrator-plugin/schemas/orchestrator-config.schema.json @@ -0,0 +1,69 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "description": "Configuration for the Feature Orchestrator plugin. Run /feature-orchestrator-plugin:setup to generate this file interactively.", + "type": "object", + "properties": { + "project": { + "type": "object", + "description": "Project metadata", + "properties": { + "name": { "type": "string", "description": "Short project name (e.g., 'Android Auth')" }, + "description": { "type": "string", "description": "One-line project description" } + } + }, + "repositories": { + "type": "object", + "description": "Repository mapping. Keys are short names, values describe the hosting details.", + "additionalProperties": { + "type": "object", + "properties": { + "slug": { "type": "string", "description": "GitHub org/repo or ADO org/project/repo" }, + "host": { "type": "string", "enum": ["github", "ado"], "default": "github", "description": "Where the repo is hosted" }, + "baseBranch": { "type": "string", "default": "main", "description": "Default base branch for PRs" }, + "accountType": { "type": "string", "default": "public", "description": "Account label for gh CLI switching (maps to a username in developer-local.json). Optional if only one GitHub account." } + }, + "required": ["slug"] + } + }, + "modules": { + "type": "object", + "description": "Module-to-repo mapping. Keys are module names referenced in work items. Each module maps to a repository.", + "additionalProperties": { + "type": "object", + "properties": { + "repo": { "type": "string", "description": "Key from the repositories map that this module belongs to" }, + "path": { "type": "string", "description": "Path within the repo (e.g., 'common/' or 'src/api/'). Optional — omit if module is the whole repo." }, + "purpose": { "type": "string", "description": "Brief description of what this module does (used by codebase-researcher)" } + }, + "required": ["repo"] + } + }, + "github": { + "type": "object", + "description": "GitHub configuration. Usernames are stored in developer-local.json (gitignored), NOT here.", + "properties": { + "configFile": { "type": "string", "default": ".github/developer-local.json", "description": "Path to per-developer config file (gitignored) that maps repos to GitHub usernames" } + } + }, + "ado": { + "type": "object", + "description": "Azure DevOps configuration", + "properties": { + "org": { "type": "string", "description": "ADO organization URL or name" }, + "project": { "type": "string", "description": "ADO project name (e.g., 'Engineering')" }, + "workItemType": { "type": "string", "default": "Product Backlog Item", "description": "Work item type for PBIs" }, + "iterationDepth": { "type": "integer", "default": 6, "description": "Depth for iteration discovery (6 for monthly sprints)" } + } + }, + "design": { + "type": "object", + "description": "Design spec configuration", + "properties": { + "docsPath": { "type": "string", "description": "Path to design docs folder (e.g., 'design-docs/' or 'docs/designs/')" }, + "templatePath": { "type": "string", "description": "Path to design spec template (optional)" }, + "folderPattern": { "type": "string", "default": "[{platform}] {featureName}", "description": "Folder naming pattern for new designs" }, + "reviewRepo": { "type": "string", "description": "ADO/GitHub repo for design review PRs (optional)" } + } + } + } +} From d366106c090ce8a1d23690c3c0947f3e595d437e Mon Sep 17 00:00:00 2001 From: Shahzaib Date: Mon, 16 Mar 2026 16:33:04 -0700 Subject: [PATCH 2/4] Feature Orchestrator Plugin: Skills (2/5) Add all specialized skills for the orchestration pipeline: - codebase-researcher: Systematic codebase exploration with SharePoint support - design-author: Design spec creation with multi-source (repo + SharePoint) - design-reviewer: Inline design review comment processing - feature-planner: PBI decomposition with mandatory file paths and API signatures - pbi-creator: ADO work item creation with dependency linking - pbi-dispatcher-github: Dispatch to Copilot coding agent on GitHub repos - pbi-dispatcher-ado: Dispatch via ADO Agency (future) - pbi-dispatcher-ado-swe: Dispatch via ADO Copilot SWE (tag + assign) - pr-validator: Validate agent PRs against PBI acceptance criteria - references/pbi-template.md: Standard PBI description template --- .../skills/codebase-researcher/SKILL.md | 137 +++++++++ .../skills/design-author/SKILL.md | 181 +++++++++++ .../skills/design-reviewer/SKILL.md | 84 ++++++ .../skills/feature-planner/SKILL.md | 282 ++++++++++++++++++ .../references/pbi-template.md | 71 +++++ .../skills/pbi-creator/SKILL.md | 265 ++++++++++++++++ .../skills/pbi-dispatcher-ado-swe/SKILL.md | 185 ++++++++++++ .../skills/pbi-dispatcher-ado/SKILL.md | 134 +++++++++ .../skills/pbi-dispatcher-github/SKILL.md | 199 ++++++++++++ .../skills/pr-validator/SKILL.md | 151 ++++++++++ 10 files changed, 1689 insertions(+) create mode 100644 feature-orchestrator-plugin/skills/codebase-researcher/SKILL.md create mode 100644 feature-orchestrator-plugin/skills/design-author/SKILL.md create mode 100644 feature-orchestrator-plugin/skills/design-reviewer/SKILL.md create mode 100644 feature-orchestrator-plugin/skills/feature-planner/SKILL.md create mode 100644 feature-orchestrator-plugin/skills/feature-planner/references/pbi-template.md create mode 100644 feature-orchestrator-plugin/skills/pbi-creator/SKILL.md create mode 100644 feature-orchestrator-plugin/skills/pbi-dispatcher-ado-swe/SKILL.md create mode 100644 feature-orchestrator-plugin/skills/pbi-dispatcher-ado/SKILL.md create mode 100644 feature-orchestrator-plugin/skills/pbi-dispatcher-github/SKILL.md create mode 100644 feature-orchestrator-plugin/skills/pr-validator/SKILL.md diff --git a/feature-orchestrator-plugin/skills/codebase-researcher/SKILL.md b/feature-orchestrator-plugin/skills/codebase-researcher/SKILL.md new file mode 100644 index 00000000..d049760b --- /dev/null +++ b/feature-orchestrator-plugin/skills/codebase-researcher/SKILL.md @@ -0,0 +1,137 @@ +--- +name: codebase-researcher +description: Systematically explore codebases to find implementations, patterns, and architecture. Use for "where is X implemented", "how does Y work", "trace the flow of", or any request requiring codebase exploration with evidence-based findings. +--- + +# Codebase Researcher + +Explore this codebase systematically with evidence-based findings. + +## Project Knowledge + +Read `.github/copilot-instructions.md` for project-wide conventions and coding standards. + +## Repository Structure + +Discover the repository structure by exploring the workspace — check for modules, +sub-directories with their own build files, and README files. + +| Module | Purpose | Key Paths | +|--------|---------|-----------| +| *Discover by exploring the workspace* | | | + +**⚠️ CRITICAL: Always search across ALL modules/directories.** Code is often shared or duplicated. + +## Core Principles + +1. **Never guess** — Only report what is actually found in the repo +2. **Always cite sources** — Every finding must include file path and line numbers +3. **Acknowledge gaps** — Explicitly state when something cannot be found +4. **Rate confidence** — Assign HIGH/MEDIUM/LOW to each finding +5. **Search all modules** — Check every relevant directory for each query + +## Research Workflow + +### Step 1: Understand the Target + +Clarify what to find: +- Feature/concept name +- Which layer (client, service, shared, etc.) +- Expected patterns (class names, function signatures) + +### Step 2: Search Strategy + +Execute searches in this order, **always searching across all modules**: + +1. **Semantic search** — Start with natural language query +2. **Grep search** — Exact patterns, class names, error codes +3. **File search** — Find by naming convention (e.g., `**/*Operation*.kt`) +4. **Directory exploration** — List relevant directories in each module +5. **Read files** — Confirm findings with actual code + +### Step 3: Trace Call Chains + +For the feature area being researched, trace the complete flow: +- Identify the entry point +- Follow across module boundaries +- Note threading model and error handling at each boundary + +### Step 4: Identify Invariants + +Search for constraints that govern the affected code: +- Threading annotations, synchronization +- Serialization contracts, protocol versions +- Lifecycle dependencies, feature flags + +### Step 5: Validate Findings + +For each potential finding: +- Read the actual code (don't rely only on search snippets) +- Identify which module it belongs to +- Note the exact location (file + line range) +- Assess confidence level + +### Step 6: Report Results + +```markdown +## Research: [Topic] + +### Findings + +#### Finding 1: [Brief description] +- **Module**: [which module] +- **File**: [path/to/file.ext](path/to/file.ext#L10-L25) +- **Confidence**: HIGH | MEDIUM | LOW +- **Evidence**: [What makes this the right code] + +[Code snippet if helpful] + +#### Finding 2: ... + +### Unknowns & Risk Areas + +- [Thing searched for but not found] +- Search attempts: [what was tried] +- [Areas that might be affected but couldn't confirm] + +### Suggested Next Steps + +- [Additional areas to explore] +- [Related code that might be relevant] +``` + +## Confidence Levels + +| Level | Criteria | +|-------|----------| +| **HIGH** | Exact match. Code clearly implements the feature. Names match. | +| **MEDIUM** | Likely match. Code appears related but naming differs or implementation is partial. | +| **LOW** | Possible match. Found tangentially related code, or inference required. | + +## Data Flow Investigation + +When asked about **what data is returned**, **how data flows**, or **what happens to data**: + +1. **Find the Data Structure** — Confirm the field exists, check serialization +2. **Find Construction/Population Code** — Search for Builder/factory methods +3. **Check Conditional Logic** — Search for `if` statements, feature flag checks, version checks +4. **Trace the Complete Flow** — Follow from entry → processing → response → return + +### Flow Investigation Pitfalls + +❌ Don't stop after finding a field definition — check actual behavior +❌ Don't assume data flows unchanged — check for filtering/transformation +❌ Don't ignore version/flag checks — behavior often changes based on these +✅ Search for Builder usage and construction patterns +✅ Look for Adapter/Converter classes in the flow +✅ Check for conditional logic based on configuration or feature flags + +## Anti-Patterns to Avoid + +| Anti-Pattern | Problem | Correct Approach | +|--------------|---------|------------------| +| Searching only one module | Miss cross-module code | Search ALL modules | +| "This is likely in..." | Speculation without evidence | Search first, report only found | +| Path without line numbers | Imprecise, hard to verify | Always include line numbers | +| Stopping at definition | Misses conditional logic | Trace to construction/adapter | +| Brief summary | Loses detail for next step | Be thorough and comprehensive | diff --git a/feature-orchestrator-plugin/skills/design-author/SKILL.md b/feature-orchestrator-plugin/skills/design-author/SKILL.md new file mode 100644 index 00000000..2ce531b6 --- /dev/null +++ b/feature-orchestrator-plugin/skills/design-author/SKILL.md @@ -0,0 +1,181 @@ +--- +name: design-author +description: Create detailed design specs for features. Use when asked to design a feature, create a design spec, write a design doc, or create an implementation plan. Triggers include "design this feature", "create a design spec", "write a design doc". +--- + +# Design Author + +Create detailed design specs for features, save them locally, and optionally open PRs for review. + +## Configuration + +Read `.github/orchestrator-config.json` for: +- `design.docsPath` — where to save design docs (e.g., `design-docs/` or `docs/designs/`) +- `design.templatePath` — path to design spec template (optional) +- `design.folderPattern` — folder naming pattern (e.g., `[{platform}] {featureName}`) +- `design.reviewRepo` — repo for design review PRs (optional) + +If no config, save to `docs/designs/` and use the built-in template below. + +## Design Spec Template + +Key sections every design spec should include: + +1. **Title** — Feature name +2. **Components** — Which modules/repos affected +3. **Problem description** — User problem, business context, examples +4. **Requirements** — Functional requirements (must-have) +5. **System Qualities** — Performance, telemetry, security, supportability +6. **Solution options** — At least 2 options with pseudo code, pros/cons +7. **Solution Decision** — Recommended option with reasoning +8. **API surface** — Public/internal classes, methods (if applicable) +9. **Data flow** — Request/response flow across components +10. **Feature flag** — Flag name and gating strategy (if applicable) +11. **Telemetry** — Key metrics, span names, success/failure signals +12. **Testing strategy** — Unit tests, integration tests, E2E coverage +13. **Rollout plan** — Staged rollout, feature flag configuration +14. **Cross-repo impact** — Which repos need changes and in what order + +If a template file exists at the configured `design.templatePath`, follow that instead. + +## Workflow + +### Step 1: Understand the Feature + +Gather from the developer: +1. What the feature does and why it's needed +2. Which components/flows it affects +3. Scope boundaries (in/out) +4. Any existing designs to reference + +### Step 2: Research the Codebase + +Use the `codebase-researcher` skill to: +- Understand how related functionality currently works +- Identify which repos/files would be affected +- Find existing patterns to follow (feature flags, error handling, telemetry) +- Check for existing design docs on the same topic + +### Step 3: Research Existing Designs + +If `design.docsPath` is configured, search for related designs: +```bash +ls / | grep -i "" +``` +Use existing designs as **style reference and historical context**, not ground truth for behavior. + +### Step 4: Write the Design Spec + +Create the spec at: +``` +//.md +``` + +For the **Solution options** section: +- Always provide at least 2 options +- Include pseudo code / API signatures for each +- List concrete pros/cons +- Clear recommendation in Solution Decision + +### Agent Implementation Notes + +Write the design knowing a coding agent will implement it. Be explicit about: +- Class boundaries and responsibilities +- Threading model +- Error contracts +- Integration points with other modules + +### Step 5: Present Design for Review + +After writing, **STOP and present choices** using `askQuestion`: + +``` +askQuestion({ + question: "Design spec written. What would you like to do?", + options: [ + { label: "📖 Review locally", description: "Open in editor for inline review" }, + { label: "✅ Approve & plan PBIs", description: "Skip PR, move to work item planning" }, + { label: "📋 Open draft PR", description: "Push to review repo as draft PR" }, + { label: "🚀 Open published PR", description: "Push and publish PR for team review" }, + { label: "✏️ Request changes", description: "Tell me what to revise" } + ] +}) +``` + +**MANDATORY**: Wait for the developer's explicit choice. Do NOT auto-select. + +### Step 5a: Local Review (option 1) + +Open the file: `code ""` + +Tell the developer: +> "The spec is open. Here's how to review: +> 1. Click the **+ icon** in the gutter to add inline comments +> 2. When done, click the status bar button to submit comments +> 3. I'll address each comment and present choices again" + +### Step 5b: Push and Create PR (options 3 or 4) + +**Branch naming**: Discover alias from `git config user.email` (strip @domain): +```powershell +$alias = (git config user.email) -replace '@.*', '' +git checkout -b "$alias/design-" +``` + +**Git workflow** (from design docs directory): +```powershell +cd / +git add "" +git commit -m "Add design spec: " +git push origin $BRANCH_NAME +``` + +**Create PR**: Use `gh pr create` or ADO MCP tools if available. +- Set `--draft` for option 3, omit for option 4 +- **PR description**: Use actual line breaks or HTML formatting, NOT literal `\n` escape sequences +- Target branch: `main` (or the repo's default branch) + +Present the PR link: +```markdown +### PR Created +**PR**: [link to PR] +**Status**: Draft / Published + +### How to Review +1. Open the PR link above +2. Use inline commenting to leave feedback +3. When done, say: **"address my design review comments"** +4. I'll read the comments and update the spec + +When the team approves, say: **"design approved, plan the PBIs"** +``` + +### Step 6: Address Review Comments + +When asked to address comments (from PR or local review): +1. Read the feedback (from PR comments or `reviews.json`) +2. For each comment: + - Understand the feedback + - Edit the local design spec to address it + - If on a PR branch, reply to the thread confirming the resolution +3. Commit and push the updates to the same branch +4. Report a summary of changes made +5. Return to Step 5 (present choices again) + +### Step 7: Proceed to Implementation + +When the developer confirms the design is approved: +1. The PR can be completed/merged +2. Hand off to the `feature-planner` skill for PBI decomposition + +## Important Caveats + +- **Existing designs may be outdated** — last-minute PR discussions often cause code to deviate. + Always verify proposed patterns against the **current codebase**, not just existing designs. +- **Use existing designs as style reference**, not as ground truth for current behavior. +- For paths with brackets `[]` or spaces, use PowerShell with `-LiteralPath` + +### Open Questions + +If there are genuine unknowns during design, use `askQuestion` to resolve them interactively, +or list them in the spec for the team to discuss during review. diff --git a/feature-orchestrator-plugin/skills/design-reviewer/SKILL.md b/feature-orchestrator-plugin/skills/design-reviewer/SKILL.md new file mode 100644 index 00000000..d7f14931 --- /dev/null +++ b/feature-orchestrator-plugin/skills/design-reviewer/SKILL.md @@ -0,0 +1,84 @@ +--- +name: design-reviewer +description: Address review comments on design spec files. Use when a developer submits inline review comments and wants them addressed. Triggers include "address review comments", "handle my review", or review comment submission. +--- + +# Design Reviewer + +Address review comments on design spec files. + +## How Comments Are Stored + +Comments are stored in: +``` +.github/design-reviews/reviews.json +``` + +Format: +```json +{ + "reviews": { + "path/to/spec.md": [ + { "line": 30, "text": "Why is this needed?", "lineContent": "the line text" } + ] + } +} +``` + +## Workflow + +### Step 1: Read Review Comments + +1. Read `.github/design-reviews/reviews.json` +2. If a specific spec was mentioned, only process that spec's comments +3. If no comments found: + > "No review comments found. Add comments using the gutter icons in the editor." + +### Step 2: Read Spec Context + +For each comment, read ±5 lines around the comment's line number for full context. + +### Step 3: Evaluate Each Comment + +| Comment Type | How to Identify | Action | +|-------------|----------------|--------| +| **Genuine issue** | Points out bug, inaccuracy, missing info | Update the spec | +| **Improvement** | Suggests better approach | Update if it improves clarity | +| **Question** | "why?", "what?", "how?" | Answer clearly; update spec if answer should be documented | +| **Challenge** | "Are you sure?" | Verify against codebase; update if wrong, explain if correct | +| **Acknowledgment** | "nice", "👍" | Acknowledge briefly, no change | + +### Step 4: Apply Changes + +For each comment requiring a spec update: +1. Read the current content around the target line +2. Make the edit using `replace_string_in_file` + +### Step 5: Clean Up reviews.json + +After addressing all comments for a spec, remove that spec's entry from `reviews.json`. +If no reviews remain, delete the file. + +### Step 6: Present Summary + +```markdown +## Review Comments Addressed + +--- + +### Comment 1: Line N — "[short quote]" + +**Type**: Question / Issue / Improvement / Acknowledgment +**Action**: [What was done or why no change was needed] + +--- + +### Comment 2: Line N — "..." +... +``` + +**Rules:** +- Use `###` heading for EVERY comment — never a table +- Use `---` separators between comments +- If spec was edited, mention what changed +- If no change needed, explain why diff --git a/feature-orchestrator-plugin/skills/feature-planner/SKILL.md b/feature-orchestrator-plugin/skills/feature-planner/SKILL.md new file mode 100644 index 00000000..2c297852 --- /dev/null +++ b/feature-orchestrator-plugin/skills/feature-planner/SKILL.md @@ -0,0 +1,282 @@ +--- +name: feature-planner +description: Decompose features into detailed, repo-targeted work items. Use when asked to "plan this feature", "break this down into PBIs", "decompose this into tasks". Produces a structured plan for developer review — actual work item creation is handled by pbi-creator. +--- + +# Feature Planner + +Decompose features into detailed, right-sized work items for implementation. + +**This skill does NOT create work items.** It produces a plan for developer review. +Once approved, the `pbi-creator` skill handles creation in your tracking system. + +## Configuration + +Read `.github/orchestrator-config.json` for: +- `repositories` — repo hosting details (slug, host, baseBranch, accountType) +- `modules` — module-to-repo mapping (each module has a `repo` key pointing to a repository) +- `design.docsPath` — where design specs are stored + +## Repository Routing + +Use the `modules` and `repositories` maps from config to route each work item: + +```json +// Example from config: +"repositories": { + "common-repo": { "slug": "org/common-repo", "baseBranch": "dev" }, + "service-repo": { "slug": "org/service-repo", "baseBranch": "main" } +}, +"modules": { + "core": { "repo": "common-repo", "path": "core/", "purpose": "Shared utilities" }, + "service": { "repo": "service-repo", "purpose": "Backend processing" } +} +``` + +Work items target a **module name**. To find the repo: `modules..repo` → `repositories.`. + +**Routing heuristic:** +1. Shared contracts/data models/utilities → shared module +2. Client-facing API changes → client module +3. Server/service-side processing → service module +4. Most features span a shared module + one consumer — create separate work items for each + +## Workflow + +### Step 1: Check for Approved Design + +1. Check configured `design.docsPath` for a matching design spec +2. If design exists and is approved, use it as the primary source +3. If no design exists, ask the developer whether to create one first +4. For small, single-repo changes, skip design and proceed directly + +### Step 2: Understand the Feature + +Gather: +1. **What** the feature does +2. **Why** it's needed +3. **Which flows** it affects +4. **Scope boundaries** (in/out) + +### Step 3: Research Current Implementation + +Use the `codebase-researcher` skill to understand: +- How related functionality currently works +- Which repos/files would need changes +- Existing patterns to follow +- Test patterns in affected areas + +### Step 4: Decompose into Work Items + +Rules: +1. **One work item per repo** — never span multiple repos +2. **Dependency ordering** — document dependencies explicitly +3. **Right-sized** — each should be implementable in one agent session (~1-3 files, <500 lines) +4. **Self-contained description** — everything the coding agent needs, inline +5. **No local file paths** — the coding agent runs in the cloud with only the target repo cloned + +### Step 5: Write Descriptions + +Each description MUST include: +- **Objective**: What to implement and where +- **Context**: Why this change is needed, how it fits the broader feature +- **Technical Requirements**: Specific implementation guidance — see mandatory rules below +- **Acceptance Criteria**: Concrete, verifiable checklist +- **Dependencies**: Use WI-N references (resolved to AB# later) +- **Files to Modify/Create**: Specific paths extracted from research (see rule below) +- **Testing**: What tests to write + +#### ⚠️ MANDATORY: Preserve Technical Detail from Design Spec + +The coding agent implements ONLY from the PBI description. It does NOT see the design spec, +codebase-context.md, or any other local file. Therefore: + +**Every technical detail the agent needs to write correct code MUST be in the PBI.** + +1. **API signatures**: If the design spec includes method signatures, class interfaces, enum values, + or return types — copy them **verbatim** into the PBI. Do NOT summarize code into prose. + + **Bad** (prose summary — agent will guess the types wrong): + > "Create AuthTabManager that wraps AuthTabIntent.registerActivityResultLauncher() and launch()" + + **Good** (exact signatures from design spec — agent uses correct types): + > "Create `AuthTabManager` that wraps the AndroidX Browser 1.9.0 AuthTab API: + > ```kotlin + > // registerActivityResultLauncher returns ActivityResultLauncher, NOT + > // callback receives AuthTabIntent.AuthResult, NOT Uri + > fun registerLauncher(activity: ComponentActivity, callback: (AuthTabIntent.AuthResult) -> Unit): ActivityResultLauncher { + > return AuthTabIntent.registerActivityResultLauncher(activity, callback) + > } + > + > // launch() takes 3 params: launcher, uri, AND redirectScheme + > fun launch(launcher: ActivityResultLauncher, uri: Uri, redirectScheme: String) { + > AuthTabIntent.Builder().build().launch(launcher, uri, redirectScheme) + > } + > ```" + +2. **Rationale for changes**: Explain WHY something needs to change, not just what. The agent + makes better decisions when it understands the reason. + + **Bad**: "Change browserVersion from 1.7.0 to 1.9.0" + + **Good**: "Change `browserVersion` from `1.7.0` to `1.9.0` because AndroidX Browser 1.9.0 + introduces the `AuthTabIntent` API (Chrome 137+) which this feature depends on. Note: this + version bump changes the `onNewIntent` signature in `ComponentActivity` from + `onNewIntent(intent: Intent)` to `onNewIntent(intent: Intent?)` — any override in existing + code (e.g., `SwitchBrowserActivity`) must be updated to match." + +3. **Breaking side effects**: If a change in this PBI will break other code (even code not in + scope for this PBI), document it explicitly so the agent can fix it or the planner can + create a separate PBI. + + **Example**: "⚠️ Bumping browserVersion to 1.9.0 will break `SwitchBrowserActivity.onNewIntent()` + because the signature changed. Fix the override signature in this same PBI." + +4. **Third-party API details**: When wrapping a new library or API version, include: + - The exact dependency coordinates and version + - Key method signatures the agent needs to call (copied from docs or design spec) + - Any gotchas or differences from the agent's likely assumptions + - What the API returns and what types to expect + +5. **Code snippets from design spec**: If the design spec contains pseudocode, class skeletons, + or implementation patterns, include them in the PBI. The agent benefits enormously from + seeing a code sketch — even if it's pseudocode. + +#### ⚠️ MANDATORY: File Paths Rule + +The **"Files to Modify/Create"** field MUST list specific file paths from the research findings. +This is the single most important factor in coding agent success — it tells the agent WHERE +to look instead of forcing it to search blindly. + +**Good** (specific, extracted from research): +``` +Files to Modify/Create: +- common/common/src/main/java/com/microsoft/identity/common/internal/net/HttpClient.java — add retry logic +- common/common/src/main/java/com/microsoft/identity/common/internal/flight/CommonFlight.java — add RETRY_ENABLED flag +- common/common/src/test/java/com/microsoft/identity/common/internal/net/HttpClientTest.java — new test class +``` + +**Bad** (vague, agent has to guess): +``` +Files to Modify/Create: +- HTTP client module +- Flight definitions +- Tests +``` + +If the research didn't identify specific files for a task, state that explicitly: +``` +Files to Modify/Create: +- Exact paths not identified during research — agent should search for [specific class/pattern] + starting in [module/directory] +``` + +This gives the agent a starting point even when exact paths aren't known. + +### Quality Checklist + +Before finalizing each work item: +- [ ] Could someone unfamiliar implement it from the description alone? +- [ ] Does it explain WHY, not just WHAT? (rationale for every change) +- [ ] Is the scope clear with explicit exclusions? +- [ ] Are acceptance criteria concrete and testable? +- [ ] Is it right-sized? (1-3 files = ideal, >6 files = split it) +- [ ] Does "Files to Modify/Create" list specific paths from research? +- [ ] Are API signatures from the design spec included verbatim (not summarized to prose)? +- [ ] Are breaking side effects documented? (e.g., dependency bump breaks existing code) +- [ ] For third-party API wrapping: are exact method signatures and return types included? + +### Step 6: Present Plan for Review + +Use this **exact output format** — the `pbi-creator` skill depends on it. + +**IMPORTANT**: Do NOT use HTML tags (`
`, ``, etc.) — VS Code chat +renders markdown only. HTML tags appear as raw text. + +#### Output Format + +**1. Header:** + +```markdown +## Feature Plan: [Feature Name] + +**Feature flag**: `[flag_name]` (or "N/A") +**Design spec**: [path] (or "N/A") +**Total work items**: [N] +``` + +**2. Dependency graph:** + +```markdown +### Dependency Graph + +WI-1 (common) → WI-2 (service) + WI-3 (client) [parallel after WI-1] +``` + +**3. Summary table:** + +```markdown +### Summary Table + +| # | Title | Repo | Module | Priority | Depends On | +|---|-------|------|--------|----------|------------| +| WI-1 | [title] | common | shared | P1 | None | +| WI-2 | [title] | service | backend | P1 | WI-1 | +``` + +**4. Dispatch order:** + +```markdown +### Dispatch Order + +1. Dispatch **WI-1** first (no blockers) +2. After WI-1 merges → dispatch **WI-2** and **WI-3** in parallel +``` + +**5. Work item details:** + +```markdown +--- + +#### WI-1: [Title] + +| Field | Value | +|-------|-------| +| **Repo** | `[org/repo-name]` | +| **Module** | `[module]` | +| **Priority** | P[1-3] | +| **Depends on** | None / WI-X | +| **Tags** | `ai-generated; copilot-agent-ready; [feature-tag]` | + +##### Description + +[Full description in PLAIN MARKDOWN with: Objective, Context, Technical Requirements, +Acceptance Criteria, Files to Modify, Testing] +``` + +**6. Next step:** + +```markdown +### Next Step + +> Plan approved? Say **"create the PBIs"** to create work items in your tracking system. +``` + +## Common Patterns + +### Single-Repo Feature +One work item. Most bug fixes and small enhancements. + +### Two-Repo Feature (Shared + Consumer) +1. WI-1: Add shared logic/contract +2. WI-2: Consume from client or service + +### Multi-Repo Feature +1. WI-1: Shared contract/data model +2. WI-2: Service-side processing (depends on WI-1) +3. WI-3: Client-side API (depends on WI-1) +4. WI-4: (optional) Integration tests + +### Feature Flag Convention +All work items for a feature should use the **same feature flag name** across repos. +Include the flag name in each description. diff --git a/feature-orchestrator-plugin/skills/feature-planner/references/pbi-template.md b/feature-orchestrator-plugin/skills/feature-planner/references/pbi-template.md new file mode 100644 index 00000000..460e19dd --- /dev/null +++ b/feature-orchestrator-plugin/skills/feature-planner/references/pbi-template.md @@ -0,0 +1,71 @@ +# Work Item Template + +Use this structure for every work item description. The description must be +**self-contained** — the coding agent only has this text plus the target repo. + +## Objective + +[1-2 sentences: What to implement and in which module/repo] + +## Context + +[Why this change is needed. How it fits into the larger feature. +Include enough background that someone unfamiliar could understand the motivation.] + +## Technical Requirements + +### What to Build + +[Specific implementation details. Include:] + +- Classes/functions to create or modify +- Method signatures and data structures +- Error handling approach +- Threading/concurrency model (if relevant) +- Integration points with other modules + +### Code Patterns to Follow + +[Reference existing patterns in the repo. Include actual code examples +or file paths within the TARGET REPO (not other repos).] + +``` +// Example of the pattern to follow: +class ExistingPattern { + // show the convention +} +``` + +### What NOT to Do + +[Explicit exclusions to prevent scope creep:] +- Do NOT modify [specific files/features outside scope] +- Do NOT add [unnecessary abstractions] +- This work item does NOT cover [related but separate concern] + +## Acceptance Criteria + +- [ ] [Concrete, testable criterion 1] +- [ ] [Concrete, testable criterion 2] +- [ ] [Concrete, testable criterion 3] +- [ ] All existing tests pass +- [ ] New unit tests cover the happy path and error cases +- [ ] Code follows project conventions (from copilot-instructions.md) + +## Files to Modify + +| File | Change | +|------|--------| +| `path/to/file.ext` | [What to change] | +| `path/to/new-file.ext` | [New file — what it contains] | + +## Dependencies + +- **Depends on**: [WI-N / AB#ID — what must be merged first and why] +- **Depended on by**: [WI-N — who is waiting for this] + +## Testing + +- Unit tests for [specific logic] +- Integration tests for [cross-component interaction] (if applicable) +- Test file location: `path/to/tests/` diff --git a/feature-orchestrator-plugin/skills/pbi-creator/SKILL.md b/feature-orchestrator-plugin/skills/pbi-creator/SKILL.md new file mode 100644 index 00000000..efe1edec --- /dev/null +++ b/feature-orchestrator-plugin/skills/pbi-creator/SKILL.md @@ -0,0 +1,265 @@ +--- +name: pbi-creator +description: Create work items in Azure DevOps from a feature plan. Handles ADO metadata discovery (area path, iteration, assignee), work item creation, and dependency linking. Triggers include "create the PBIs", "create work items", "push PBIs to ADO". +--- + +# PBI Creator + +Create Azure DevOps work items from a feature plan produced by the `feature-planner` skill. + +## Configuration + +Read `.github/orchestrator-config.json` for: +- `ado.project` — ADO project name (e.g., "Engineering") +- `ado.org` — ADO organization name (e.g., "IdentityDivision") +- `ado.workItemType` — work item type (default: "Product Backlog Item") +- `ado.iterationDepth` — depth for iteration discovery (default: 6) + +### ⚠️ ADO Org/Project Parsing + +The `ado.org` and `ado.project` fields should contain **plain names only**, not full URLs. +If the config contains a URL, extract the relevant part: +- `https://dev.azure.com/IdentityDivision/Engineering/_workitems/edit/123` → org: `IdentityDivision`, project: `Engineering` +- `https://msazure.visualstudio.com/One/_git/repo` → org: `msazure`, project: `One` +- `IdentityDivision` → use as-is + +When calling MCP tools, pass only the **org name** (e.g., `IdentityDivision`) and +**project name** (e.g., `Engineering`), never a full URL with `https:`. URLs with colons +cause ADO API errors: "A potentially dangerous Request.Path value was detected." + +## Prerequisites + +- **ADO MCP Server** must be running (configured in `.mcp.json`) +- A **feature plan** in the current chat context (from `feature-planner` skill) + +## Workflow + +### Step 1: Parse the Feature Plan + +Read the plan from chat context. Extract for each work item: +- **Title** — from `#### WI-N: [Title]` header +- **Repo** — from metadata table +- **Module** — from metadata table +- **Priority** — P1→1, P2→2, P3→3 +- **Depends on** — WI-N references +- **Tags** — from metadata table +- **Description** — from `##### Description` section. **Convert to HTML** for ADO: + - `## Heading` → `

Heading

` + - `**bold**` → `bold` + - `- item` → `
  • item
` + - Or wrap in `
` tags if conversion is complex
+
+If no plan found, ask: "Run the `feature-planner` skill first, or paste PBI details."
+
+### Step 2: Discover ADO Defaults
+
+**Do this BEFORE asking the developer.** This ensures valid options.
+
+1. Call `mcp_ado_wit_my_work_items` to get recent work items
+2. Call `mcp_ado_wit_get_work_items_batch_by_ids` on 3-5 recent items
+3. Extract:
+   - `System.AreaPath` — all unique paths with frequency counts
+   - `System.IterationPath` — note the pattern
+   - `System.AssignedTo` — default assignee
+4. Call `mcp_ado_work_list_iterations` with `depth` from config (default 6)
+5. **Filter iterations to current month or future only** — discard past iterations
+
+### Step 3: Present Options for Confirmation
+
+## ⛔ HARD STOP — DO NOT SKIP THIS STEP
+
+**You MUST complete Step 2 and Step 3 BEFORE creating any work items.**
+Do NOT proceed to Step 4 until the developer has answered ALL four questions.
+This is not optional. This is not a suggestion. **STOP HERE and ask.**
+
+If you skip this step and auto-select defaults, the work items will be created
+in the wrong area path, wrong iteration, or wrong assignee — and the developer
+will have to manually fix every single one.
+
+**Batch ALL questions into a SINGLE `askQuestion` call:**
+
+```
+askQuestion({
+  questions: [
+    {
+      header: "Area Path",
+      question: "Which area path?",
+      options: [
+        { label: "", description: "From your recent work items", recommended: true },
+        { label: "" }
+      ],
+      allowFreeformInput: true
+    },
+    {
+      header: "Iteration",
+      question: "Which iteration? (Current date: )",
+      options: [
+        { label: "", description: "", recommended: true },
+        { label: "" }
+      ],
+      allowFreeformInput: true
+    },
+    {
+      header: "Assignee",
+      question: "Who should be assigned?",
+      options: [
+        { label: "", description: "From recent work items", recommended: true }
+      ],
+      allowFreeformInput: true
+    },
+    {
+      header: "Parent",
+      question: "Link to a parent Feature work item?",
+      options: [
+        { label: "Create new Feature", description: "New Feature titled ''" },
+        { label: "No parent", description: "Standalone PBIs" }
+      ],
+      allowFreeformInput: true
+    }
+  ]
+})
+```
+
+Wait for ALL answers before proceeding.
+
+### Step 4: Create Work Items
+
+Use `mcp_ado_wit_create_work_item` for each item in **dependency order**.
+
+**CRITICAL parameters** (read project from config):
+```json
+{
+  "project": "",
+  "workItemType": "",
+  "fields": [
+    {"name": "System.Title", "value": "[title]"},
+    {"name": "System.Description", "value": "[HTML description]", "format": "Html"},
+    {"name": "System.AreaPath", "value": "[confirmed path]"},
+    {"name": "System.IterationPath", "value": "[confirmed iteration]"},
+    {"name": "System.AssignedTo", "value": "[confirmed assignee]"},
+    {"name": "Microsoft.VSTS.Common.Priority", "value": "[number]"},
+    {"name": "System.Tags", "value": "[semicolon-separated tags]"}
+  ]
+}
+```
+
+**Common mistakes to avoid:**
+- Do NOT use top-level `title`, `description`, `areaPath` — they don't exist
+- The param is `workItemType`, NOT `type`
+- Description must be **HTML** with `"format": "Html"`
+- Tags are semicolon-separated
+- Area/iteration paths use backslashes
+- **Never hardcode paths** — use developer-confirmed values
+- **MUST include Area Path AND Iteration Path** — these come from Step 3 confirmations.
+  If you don't have them, you skipped Step 3. Go back.
+
+### ⚠️ Title Sanitization
+
+**Remove colons (`:`) from work item titles.** The ADO REST API encodes titles in the
+URL path, and colons trigger an HTTP 400 error: "A potentially dangerous Request.Path
+value was detected from the client (:)."
+
+Instead of: `WI-1: Add feature flag and ECS flight`
+Use: `WI-1 — Add feature flag and ECS flight` (em-dash) or just `Add feature flag and ECS flight`
+
+Also avoid these characters in titles: `<`, `>`, `#`, `%`, `{`, `}`, `|`, `\`, `^`, `~`, `[`, `]`, `` ` ``
+
+### ⚠️ NEVER Create Work Items With Minimal Descriptions
+
+**Every work item MUST include the FULL description from the feature plan.** This is the
+entire point of the orchestrator — the coding agent implements from the PBI description alone.
+
+If `mcp_ado_wit_create_work_item` fails:
+1. **Check the error** — is it a title character issue? Sanitize and retry.
+2. **Retry the same tool** with corrected input.
+3. **If the tool keeps failing**, report the error to the developer and ask them to help
+   troubleshoot the MCP server.
+
+**NEVER fall back to a different tool that creates work items without the full description.**
+**NEVER tell the user "descriptions are summaries" or suggest they update them manually.**
+If you can't create work items with full descriptions, STOP and report the failure.
+A PBI without a proper description is worse than no PBI at all.
+
+After each creation, record the returned `id` and map WI-N → AB#[id].
+
+### Step 5: Resolve Dependencies + Parent Links
+
+1. **Update descriptions**: Replace WI-N references with AB#[id] in each description
+2. **Link dependencies**: Use `mcp_ado_wit_work_items_link`:
+   ```json
+   {"updates": [{"id": [dependent], "linkToId": [dependency], "type": "predecessor"}]}
+   ```
+3. **Parent to Feature** (if created): Use `mcp_ado_wit_add_child_work_items`
+
+### Step 5.5: Mark as Committed
+
+Update all work items to **Committed** state:
+```json
+{"id": [id], "fields": [{"name": "System.State", "value": "Committed"}]}
+```
+
+### Step 6: Report Summary
+
+```markdown
+## Work Items Created: [Feature Name]
+
+| # | AB# | Title | Repo | Depends On | State | Link |
+|---|-----|-------|------|------------|-------|------|
+| WI-1 | AB#12345 | [title] | common | — | Committed | [link] |
+| WI-2 | AB#12346 | [title] | service | AB#12345 | Committed | [link] |
+
+### Settings Used
+- **Parent Feature**: AB#12340 (or "None")
+- **Area Path**: `[path]`
+- **Iteration**: `[path]`
+- **Assigned to**: `[assignee]`
+
+### Dispatch Order
+1. Dispatch **AB#12345** first
+2. After merge → dispatch **AB#12346** and **AB#12347** in parallel
+
+### Next Step
+> Say **"dispatch"** to send the first work item to Copilot coding agent.
+```
+
+## MCP Server Recovery
+
+If ADO MCP tools fail mid-workflow:
+1. Restart: Command Palette → `MCP: Restart Server` → `ado`
+2. If still broken, try a **new chat session**
+3. **Preserve progress**: Note which items were created (AB# IDs) so the new
+   session can continue without duplicating work items
+4. In the new session, the developer can say:
+   > "Continue creating PBIs for [feature]. WI-1 already created as AB#12345. Create WI-2 onwards."
+
+## Edge Cases
+
+### Plan has a single PBI
+Skip dependency linking. Create one work item and report.
+
+### Developer wants different area paths per PBI
+If PBIs target different teams or modules, ask if they want different area paths.
+Present discovered options for each PBI individually.
+
+### Developer modifies the plan before approving
+If the developer asks for changes (add/remove PBIs, change descriptions), defer back
+to the `feature-planner` skill to regenerate, then return here for creation.
+
+### Creating a Parent Feature Work Item
+
+If the developer wants a parent Feature, create it first:
+```json
+{
+  "project": "",
+  "workItemType": "Feature",
+  "fields": [
+    {"name": "System.Title", "value": "[Feature Name]"},
+    {"name": "System.Description", "value": "

[Brief description]

", "format": "Html"}, + {"name": "System.AreaPath", "value": "[confirmed path]"}, + {"name": "System.IterationPath", "value": "[confirmed iteration]"}, + {"name": "System.AssignedTo", "value": "[confirmed assignee]"}, + {"name": "System.Tags", "value": "ai-generated"} + ] +} +``` +Record the Feature ID for parenting PBIs. diff --git a/feature-orchestrator-plugin/skills/pbi-dispatcher-ado-swe/SKILL.md b/feature-orchestrator-plugin/skills/pbi-dispatcher-ado-swe/SKILL.md new file mode 100644 index 00000000..042521b5 --- /dev/null +++ b/feature-orchestrator-plugin/skills/pbi-dispatcher-ado-swe/SKILL.md @@ -0,0 +1,185 @@ +--- +name: pbi-dispatcher-ado-swe +description: Dispatch work items to Copilot SWE agent for ADO-hosted repos. Tags the work item with the target repo and assigns to GitHub Copilot, which creates a draft PR automatically. +--- + +# PBI Dispatcher — ADO (Copilot SWE) + +Dispatch work items to the Copilot SWE agent for ADO-hosted repos. The agent is triggered +by tagging the work item with the target repo and assigning it to **GitHub Copilot**. +**This skill is for ADO repos only.** For GitHub repos, use `pbi-dispatcher-github`. + +## Configuration + +Read `.github/orchestrator-config.json` for: +- `modules` — module-to-repo mapping (each module has a `repo` key) +- `repositories` — repo details: slug (`org/project/repo`), baseBranch, host +- `ado.org` — ADO organization name +- `ado.project` — ADO project name + +To resolve a module to dispatch details: +1. Look up `modules..repo` → get the repo key +2. Look up `repositories.` → get `slug`, `baseBranch` +3. Parse the slug to extract org, project, and repo name + +## Prerequisites + +- **ADO MCP Server** running — for updating work items +- Copilot SWE agent enabled/onboarded for the target ADO repository +- Work items with clear descriptions (from the `pbi-creator` skill) + +## Workflow + +### 1. Read Work Items + +Read PBI details from ADO (via MCP) or from chat context. Need: +- AB# ID (work item ID) +- Target repo module +- Full description should already be on the work item (set by `pbi-creator`) + +### 2. Check Dependencies + +For each work item, check if dependencies (other AB# IDs) have merged PRs. Skip blocked items. + +### 2a. Gather Cross-PBI Context for Dependencies + +For each work item that HAS dependencies on already-merged PBIs, enrich the work item +description with context about what those dependencies changed. This helps the Copilot +SWE agent understand what preceding PBIs introduced. + +For each merged dependency, query the linked PR (check the work item's links or search): +``` +Use mcp_ado_wit_get_work_item to read the dependency work item and check for linked PRs. +``` + +If a merged PR is found, **append** to the work item description (via `mcp_ado_wit_update_work_item`): +``` +## Dependency Context + +This work item depends on already-merged changes: + +### AB#: (PR merged) +Key changes introduced: [summary from PR title and description] +Build on these changes. Do NOT duplicate or re-implement what the dependency already added. +``` + +If no linked PR is found, skip — the PBI description is still self-contained. + +### 3. Tag Work Item with Target Repository + +Add a tag to the work item using the format: +``` +copilot:repo=//@ +``` + +Use `mcp_ado_wit_update_work_item` to add the tag: + +```json +{ + "id": , + "fields": [ + { + "name": "System.Tags", + "value": "; copilot:repo=//@" + } + ] +} +``` + +**Building the tag value** from config: +- The repo slug in config is `org/project/repo` format +- The base branch comes from `repositories..baseBranch` +- Example: slug `msazure/One/AD-MFA-phonefactor-phoneApp-android`, branch `working` + → tag: `copilot:repo=msazure/One/AD-MFA-phonefactor-phoneApp-android@working` + +**⚠️ IMPORTANT:** +- Use only ONE linking method per work item — the tag OR an artifact link, not both +- Only one repository can be linked per work item +- The branch after `@` is required — use the base branch from config +- **Append** the new tag to existing tags (semicolon-separated), don't overwrite them. + Read existing tags first via `mcp_ado_wit_get_work_item`, then append. + +### 4. Assign to GitHub Copilot + +Use `mcp_ado_wit_update_work_item` to assign the work item to **GitHub Copilot**: + +```json +{ + "id": , + "fields": [ + { + "name": "System.AssignedTo", + "value": "GitHub Copilot" + } + ] +} +``` + +**Note**: The display name is `GitHub Copilot`. If this doesn't work, the identity may +be registered differently in the org. Check with the user. + +### 5. What Happens Next + +After assignment, the Copilot SWE agent will automatically: +1. Create a **draft/WIP PR** in the target repo +2. Add a **comment to the work item** with the PR link +3. Link the PR to the work item +4. Begin implementing the solution from the work item description + +The agent uses `.github/copilot-instructions.md` in the target repo for coding conventions. + +### 6. Update Orchestrator State + +```powershell +$su = Join-Path $HOME ".feature-orchestrator" "state-utils.js" +node $su set-step "" monitoring +``` + +Note: The PR URL won't be available immediately — the agent takes a few minutes to create +the draft PR. The user can check status later via the Monitor phase. + +### 7. Report Summary + +```markdown +## Dispatch Summary + +| AB# | Repo | Method | Status | +|-----|------|--------|--------| +| AB#12345 | org/project/repo | Copilot SWE | ✅ Tagged & assigned to GitHub Copilot | +| AB#12346 | org/project/repo | Copilot SWE | ⏸ Blocked (waiting on AB#12345) | + +### What to Expect +- The Copilot SWE agent will create a **draft PR** in a few minutes +- It will add a comment on the work item with the PR link +- Once the PR is published, review the changes and add comments to iterate +- Tag `@GitHub Copilot` in PR comments to request changes + +### Next Step +> Check back in a few minutes and say **"status"** to see if the PR has been created. +> Or open the work item in ADO to see the agent's comment with the PR link. +``` + +## Iterating on the PR + +After the agent creates the PR: +- Add comments at the PR level or on specific files +- **Tag `@GitHub Copilot`** in PR comments (the agent won't act without the explicit tag) +- If ADO doesn't auto-complete the @-mention, type the literal text `@` +- The agent will create a new iteration with updates + +## Error Handling + +### "Repository is not yet onboarded" +The target repo needs to be onboarded to the Copilot SWE pilot program. +Guide the user to follow their org's onboarding process. + +### Assignment fails +The `GitHub Copilot` identity may not be available in the org. Check: +- Is Copilot SWE enabled for this ADO organization? +- Is the identity name different? (Try searching for "Copilot" in the assignee field) + +### Tag format errors +Ensure the tag follows exactly: `copilot:repo=//@` +- No spaces around `=` or `@` +- Branch name is required +- Org/project/repo must match exactly what's in ADO diff --git a/feature-orchestrator-plugin/skills/pbi-dispatcher-ado/SKILL.md b/feature-orchestrator-plugin/skills/pbi-dispatcher-ado/SKILL.md new file mode 100644 index 00000000..6b9fe62a --- /dev/null +++ b/feature-orchestrator-plugin/skills/pbi-dispatcher-ado/SKILL.md @@ -0,0 +1,134 @@ +--- +name: pbi-dispatcher-ado +description: Dispatch work items to ADO Agency for ADO-hosted repos. Uses the Agency REST API to create coding agent jobs that produce draft PRs. +--- + +# PBI Dispatcher — ADO (Agency) + +Dispatch work items to ADO Agency for ADO-hosted repos. Agency generates a solution +as a draft pull request in Azure DevOps. +**This skill is for ADO repos only.** For GitHub repos, use `pbi-dispatcher-github`. + +## Configuration + +Read `.github/orchestrator-config.json` for: +- `modules` — module-to-repo mapping (each module has a `repo` key) +- `repositories` — repo details: slug (`org/project/repo`), baseBranch, host +- `ado.org` — ADO organization name +- `ado.project` — ADO project name + +To resolve a module to dispatch details: +1. Look up `modules..repo` → get the repo key +2. Look up `repositories.` → get `slug`, `baseBranch`, `host` +3. Parse the slug to extract org, project, and repo name + +## Prerequisites + +- **Azure CLI** (`az`) authenticated — needed to acquire the Agency API token +- Work items in ADO with tag `copilot-agent-ready` +- Agency enabled for the target ADO organization/project + +## Workflow + +### 1. Read Work Items + +Read PBI details from ADO (via MCP) or from the chat context. Need: +- AB# ID +- Full description (Objective, Technical Requirements, Acceptance Criteria) +- Target repo module + +### 2. Check Dependencies + +For each work item, check if dependencies (other AB# IDs) have merged PRs. Skip blocked items. + +### 3. Acquire Agency API Token + +Use Azure CLI to get a bearer token for the Agency API: + +```powershell +$token = az account get-access-token --resource "api://81bbac67-d541-4a6d-a48b-b1c0f9a57888" --query accessToken -o tsv +``` + +If this fails: +- Check `az account show` — user may not be authenticated +- Guide: `az login` +- If `az` is not installed, tell the user Agency dispatch requires Azure CLI + +### 4. Dispatch to Agency + +For each ready work item, call the Agency REST API: + +```powershell +$body = @{ + organization = "" + project = "" + repository = "" + targetBranch = "" + prompt = @" +'.> +"@ + options = @{ + pullRequest = @{ + create = $true + publish = $true + } + } +} | ConvertTo-Json -Depth 4 + +$response = Invoke-RestMethod ` + -Uri "https://copilotswe.app.prod.gitops.startclean.microsoft.com/api/agency/jobs" ` + -Method Post ` + -Headers @{ Authorization = "Bearer $token"; "Content-Type" = "application/json" } ` + -Body $body + +Write-Host "Agency job created: $($response | ConvertTo-Json)" +``` + +**Parsing the repo slug** for Agency API parameters: +- Slug format is `org/project/repo` (from config) +- `organization` = first segment (e.g., `msazure`) +- `project` = second segment (e.g., `One`) +- `repository` = third segment (e.g., `AD-MFA-phonefactor-phoneApp-android`) + +**IMPORTANT for prompt content:** +- Include the FULL PBI description (not truncated) +- Include `Fixes AB#` so the PR links to the ADO work item +- Do NOT include local file paths — the agent can't access them + +### 5. Update ADO State + +Mark the ADO work item as `Active`, add tag `agent-dispatched`. + +### 6. Report Summary + +```markdown +## Dispatch Summary + +| AB# | Repo | Method | Status | +|-----|------|--------|--------| +| AB#12345 | org/project/repo | ADO Agency | ✅ Dispatched | +| AB#12346 | org/project/repo | ADO Agency | ⏸ Blocked (waiting on AB#12345) | + +### Next Step +> Say **"status"** to check agent PR progress. +> Agency will create a draft PR in ADO when implementation is ready. +``` + +## Error Handling + +### Token acquisition fails +``` +az account get-access-token --resource "api://81bbac67-d541-4a6d-a48b-b1c0f9a57888" +``` +If this returns an error: +- "AADSTS..." → user may not have access to Agency. They need to request access. +- "Please run 'az login'" → guide the user to authenticate + +### Agency API returns 403 +The user's account may not have Agency enabled for the target repo/org. +Tell the user to check their Agency access at their org's Agency administration page. + +### Agency API returns 400 +Check the request body — ensure org, project, and repository match exactly what's in ADO. +The repository name must match the ADO repo name, not a slug or URL. diff --git a/feature-orchestrator-plugin/skills/pbi-dispatcher-github/SKILL.md b/feature-orchestrator-plugin/skills/pbi-dispatcher-github/SKILL.md new file mode 100644 index 00000000..0798771e --- /dev/null +++ b/feature-orchestrator-plugin/skills/pbi-dispatcher-github/SKILL.md @@ -0,0 +1,199 @@ +--- +name: pbi-dispatcher-github +description: Dispatch work items to GitHub Copilot coding agent for GitHub-hosted repos. Uses `gh agent-task create` to create agent tasks. +--- + +# PBI Dispatcher — GitHub + +Dispatch work items to GitHub Copilot coding agent by creating agent tasks in GitHub-hosted repos. +**This skill is for GitHub repos only.** For ADO repos, use `pbi-dispatcher-ado`. + +## Configuration + +Read `.github/orchestrator-config.json` for: +- `modules` — module-to-repo mapping (each module has a `repo` key) +- `repositories` — repo details: slug, baseBranch, host +- `github.configFile` — per-developer config path (default: `.github/developer-local.json`) + +Read the developer-local config file for GitHub account mapping: +```json +// .github/developer-local.json +{ + "github_accounts": { + "org/common-repo": "johndoe", + "enterprise-org/service-repo": "johndoe_microsoft" + } +} +``` + +To resolve a module to dispatch details: +1. Look up `modules..repo` → get the repo key +2. Look up `repositories.` → get `slug`, `baseBranch`, `host` +3. Look up `developer-local.github_accounts.` → get the GitHub username +4. Run `gh auth switch --user ` before dispatching + +## Prerequisites + +- **GitHub CLI** (`gh`) authenticated +- Work items in ADO with tag `copilot-agent-ready` +- Copilot coding agent enabled on target repos + +## GitHub Account Discovery + +**CRITICAL**: Determine which `gh` CLI accounts to use. **Never hardcode usernames.** + +### Discovery Sequence (stop at first success) + +**Step 0: Verify `gh` CLI is installed:** +```powershell +gh --version +``` +If not found, offer to install: +- Windows: `winget install --id GitHub.cli -e` +- macOS: `brew install gh` + +**Step 1: Check developer config file** (from `github.configFile` in config): +```powershell +$config = Get-Content "" -Raw -ErrorAction SilentlyContinue | ConvertFrom-Json +``` + +**Step 2: Discover from `gh auth status`:** +```powershell +$ghStatus = gh auth status 2>&1 +``` +Map accounts to types: +- Non-EMU account (no `_` suffix) → `public` repos +- EMU account (`_microsoft` suffix) → `emu` repos + +**Step 3: Prompt the developer** (fallback): +> "I need your GitHub usernames: +> 1. **Public GitHub** (for public org repos): ___ +> 2. **GitHub EMU** (for enterprise repos, if applicable): ___" + +Offer to save to the developer config file. + +**Step 4: Not signed in at all:** +> "Please run: `gh auth login --hostname github.com`" + +## Repo Routing + +Use `modules` → `repositories` → `developer-local.json` to resolve dispatch details: + +```json +// orchestrator-config.json (committed, shared): +"repositories": { + "common-repo": { "slug": "org/common-repo", "host": "github", "baseBranch": "main" }, + "service-repo": { "slug": "enterprise-org/service-repo", "host": "github", "baseBranch": "dev" } +}, +"modules": { + "core": { "repo": "common-repo" }, + "service": { "repo": "service-repo" } +} + +// developer-local.json (per-developer, gitignored): +"github_accounts": { + "org/common-repo": "johndoe", + "enterprise-org/service-repo": "johndoe_microsoft" +} + +// Resolution: module "core" → repo "common-repo" → slug "org/common-repo" +// → gh account "johndoe" (from developer-local) → gh auth switch --user johndoe +``` + +## Workflow + +### 1. Read Work Items + +Read PBI details from ADO (via MCP) or from the chat context. Need: +- AB# ID +- Full description (Objective, Technical Requirements, Acceptance Criteria) +- Target repo module + +### 2. Check Dependencies + +For each work item, check if dependencies (other AB# IDs) have merged PRs. Skip blocked items. + +### 2a. Gather Cross-PBI Context for Dependencies + +For each work item that HAS dependencies on already-merged PBIs, enrich the dispatch +prompt with context about what those dependencies changed. This is critical — the coding +agent implementing PBI #3 needs to know what PBI #1 introduced. + +For each merged dependency: +```powershell +gh pr list --repo "" --search "AB#" --state merged --json number,title,files --jq '.[0]' +``` + +If a merged PR is found, add this block to the dispatch prompt: +``` +## Dependency Context + +This work item depends on already-merged changes: + +### AB#: (PR #, merged) +Files changed: +- (added/modified) +- (added/modified) + +Key APIs introduced: [extract from PR title/files — e.g., new classes, interfaces] +Build on these changes. Do NOT duplicate or re-implement what the dependency PR already added. +``` + +If the PR can't be found (no AB# match), skip gracefully — the PBI description is still self-contained. + +### 3. Switch Account + Dispatch + +For each ready work item: + +**Switch to correct account** (based on repo's `accountType` from config): +```powershell +gh auth switch --user +``` + +**Dispatch via `gh agent-task create`** (preferred, requires gh v2.80+): + +Write the full PBI description to a temp file to avoid shell escaping issues: +```powershell +$prompt = @" + +"@ +$prompt | Set-Content -Path "$env:TEMP\pbi-prompt.txt" +gh agent-task create (Get-Content "$env:TEMP\pbi-prompt.txt" -Raw) --repo "" --base +``` + +**IMPORTANT prompt content:** +- Include FULL PBI description (not truncated) +- Include `Fixes AB#` so PR links to ADO +- Include `Follow .github/copilot-instructions.md strictly` +- Do NOT include local file paths — agent can't access them + +**Fallback** (if `gh agent-task create` fails): Create a GitHub Issue and assign to Copilot. + +### 4. Update ADO State + +Mark the ADO work item as `Active`, add tag `agent-dispatched`. + +### 5. Report Summary + +```markdown +## Dispatch Summary + +| AB# | Repo | Method | Status | +|-----|------|--------|--------| +| AB#12345 | org/common-repo | agent-task | ✅ Dispatched | +| AB#12346 | org/service-repo | agent-task | ✅ Dispatched | +| AB#12347 | org/client-repo | ⏸ Blocked | Waiting on AB#12345 | + +### Next Step +> Say **"status"** to check agent PR progress. +> Use `@copilot` in PR comments to iterate with the coding agent. +``` + +## Review Feedback Loop + +After PRs are created, use `@copilot` in PR comments to iterate: +``` +@copilot Please add unit tests for the error case. +@copilot Use the Logger class instead of direct logging. +``` diff --git a/feature-orchestrator-plugin/skills/pr-validator/SKILL.md b/feature-orchestrator-plugin/skills/pr-validator/SKILL.md new file mode 100644 index 00000000..d6a02791 --- /dev/null +++ b/feature-orchestrator-plugin/skills/pr-validator/SKILL.md @@ -0,0 +1,151 @@ +--- +name: pr-validator +description: Validate an agent-created PR against its PBI acceptance criteria. Use during the Monitor phase to check whether a PR satisfies what was requested before human review. Triggers include "validate PR", "check PR quality", "does this PR match the spec". +--- + +# PR Validator + +Validate whether an agent-created PR satisfies its originating PBI's acceptance criteria +and follows project conventions. This runs during the Monitor phase — after the coding +agent creates a PR but before the human reviews it. + +## Purpose + +Save human review time by catching obvious gaps: +- Missing acceptance criteria +- Missing tests +- Convention violations that the agent should have followed +- Scope creep (changes beyond what was requested) + +**This is NOT a full code review.** It's a structured checklist that flags what to look at. + +## Inputs + +- PR number and repo slug +- The PBI that originated the PR (AB# ID or from feature state) + +## Process + +### Step 1: Gather PR Data + +```powershell +gh pr view --repo "" --json title,body,files,additions,deletions,commits,reviews,statusCheckRollup +``` + +Also get the diff stat: +```powershell +gh pr diff --repo "" --stat +``` + +### Step 2: Gather PBI Data + +Read the originating PBI's description from feature state or ADO: + +```powershell +$su = Join-Path $HOME ".feature-orchestrator" "state-utils.js" +node $su get-feature "" +``` + +Find the PBI that matches this PR (by repo + AB# reference in PR title/body). +Extract: +- **Acceptance Criteria** — the checklist from the PBI description +- **Files to Modify/Create** — expected file paths +- **Technical Requirements** — specific implementation guidance +- **Testing** — expected test coverage + +### Step 3: Acceptance Criteria Check + +For each acceptance criterion in the PBI: +1. Search the PR diff for evidence that it's addressed +2. Mark as: ✅ Addressed | ⚠️ Partially | ❌ Not found | ❓ Can't determine + +**How to check:** +- If the criterion mentions a specific behavior → look for code implementing it +- If it mentions a specific file → check if that file is in the PR's changed files +- If it mentions tests → check if test files are included +- If it's too abstract to verify from diff alone → mark ❓ + +### Step 4: File Coverage Check + +Compare the PBI's "Files to Modify/Create" against the PR's actual changed files: +- **Expected but not changed** → flag as potential gap +- **Changed but not expected** → flag as potential scope creep (may be fine — dependencies, imports) +- **New files created** → check naming conventions match the repo's patterns + +### Step 5: Convention Check + +Based on the repo's `.github/copilot-instructions.md` (which the agent should have followed), +spot-check: +- **Tests included?** If the PBI specified tests and no test files are in the diff → flag +- **Telemetry?** If the PBI mentioned telemetry/spans and no span-related code is visible → flag +- **Feature flag?** If the PBI mentioned a feature flag and none is visible → flag +- **License headers?** If new files were created, check for headers (don't read every file — just note if new files exist) + +**Do NOT** do a full code review. Don't check variable naming, code style, or logic correctness. +The human reviewer does that. Focus only on structural completeness. + +### Step 6: CI Status Check + +```powershell +gh pr checks --repo "" +``` + +Report: +- All passing → ✅ +- Some failing → list which checks failed +- Pending → note that CI is still running + +### Step 7: Present Report + +```markdown +## 🔍 PR Validation: # + +**PBI**: AB# +**Repo**: <slug> +**Changes**: +<additions> -<deletions> across <N> files + +### Acceptance Criteria + +| # | Criterion | Status | Evidence | +|---|-----------|--------|----------| +| 1 | [criterion text] | ✅ Addressed | [file or code reference] | +| 2 | [criterion text] | ⚠️ Partial | [what's missing] | +| 3 | [criterion text] | ❌ Not found | — | + +### File Coverage + +| Expected (from PBI) | In PR? | Notes | +|---------------------|--------|-------| +| path/to/File.java | ✅ | Modified | +| path/to/Test.java | ❌ | Not in diff — tests may be missing | + +**Unexpected changes**: [list files changed that weren't in the PBI, if any] + +### Convention Checks + +| Check | Status | +|-------|--------| +| Tests included | ✅ / ❌ | +| Telemetry spans | ✅ / ❌ / N/A | +| Feature flag gating | ✅ / ❌ / N/A | +| CI status | ✅ All passing / ❌ [failures] | + +### Summary + +**Overall**: 🟢 Looks good / 🟡 Review these gaps / 🔴 Significant gaps + +[1-2 sentence summary: what the human reviewer should focus on] +``` + +## When to Run + +- **Automatically**: When the Monitor phase detects a new PR from the coding agent +- **Manually**: When the user says "validate PR" or "check this PR" +- **On refresh**: When the dashboard refreshes PR status and a new open PR is found + +## Important Guidelines + +- **Speed over depth**: This should take <30 seconds. Don't read every line of code. +- **No false confidence**: If you can't verify a criterion from the diff, say ❓ not ✅ +- **Actionable output**: Every ❌ or ⚠️ should tell the human what to look for +- **Don't block**: This is informational. Even if gaps exist, the human decides whether to approve From d4b96da6e473798770df069dcec5fbfe1f3e3bc9 Mon Sep 17 00:00:00 2001 From: Shahzaib <shahzaib.jameel@microsoft.com> Date: Mon, 16 Mar 2026 16:33:24 -0700 Subject: [PATCH 3/4] Feature Orchestrator Plugin: Agents (3/5) Add specialized subagent definitions: - feature-orchestrator: Main coordinator agent for the full pipeline - codebase-researcher: Research agent with context-file-first approach - design-writer: Design spec author following team template - feature-planner: PBI decomposition with quality mandates - pbi-creator: ADO work item creation agent - agent-dispatcher: Routes dispatch to GitHub or ADO based on repo host --- .../agents/agent-dispatcher.agent.md | 34 +++ .../agents/codebase-researcher.agent.md | 34 +++ .../agents/design-writer.agent.md | 32 ++ .../agents/feature-orchestrator.agent.md | 285 ++++++++++++++++++ .../agents/feature-planner.agent.md | 25 ++ .../agents/pbi-creator.agent.md | 38 +++ 6 files changed, 448 insertions(+) create mode 100644 feature-orchestrator-plugin/agents/agent-dispatcher.agent.md create mode 100644 feature-orchestrator-plugin/agents/codebase-researcher.agent.md create mode 100644 feature-orchestrator-plugin/agents/design-writer.agent.md create mode 100644 feature-orchestrator-plugin/agents/feature-orchestrator.agent.md create mode 100644 feature-orchestrator-plugin/agents/feature-planner.agent.md create mode 100644 feature-orchestrator-plugin/agents/pbi-creator.agent.md diff --git a/feature-orchestrator-plugin/agents/agent-dispatcher.agent.md b/feature-orchestrator-plugin/agents/agent-dispatcher.agent.md new file mode 100644 index 00000000..189e3a5c --- /dev/null +++ b/feature-orchestrator-plugin/agents/agent-dispatcher.agent.md @@ -0,0 +1,34 @@ +--- +name: agent-dispatcher +description: Dispatch work items to coding agents. Routes to GitHub Copilot agent or ADO Copilot SWE based on repo host. +user-invocable: false +--- + +# Agent Dispatcher + +You dispatch work items to coding agents for autonomous implementation. + +## Routing — Choose the Right Dispatcher + +Read `.github/orchestrator-config.json` to determine each repo's `host` field: + +- **If host is `github`** → Use the `pbi-dispatcher-github` skill + (dispatches via `gh agent-task create`) +- **If host is `ado`** → Use the `pbi-dispatcher-ado-swe` skill + (tags the work item with the target repo and assigns to GitHub Copilot in ADO) + +Look up the module → repo → host chain: +1. `modules.<module>.repo` → get the repo key +2. `repositories.<repo>.host` → `"github"` or `"ado"` +3. Use the corresponding skill + +## Key Rules + +- Read `.github/orchestrator-config.json` for repo slugs, base branches, and host types +- **Route to the correct dispatcher** based on repo host — never use GitHub dispatch for ADO repos or vice versa +- Include `Fixes AB#<ID>` in every prompt so the PR links to ADO +- Include `Follow .github/copilot-instructions.md strictly` as a reminder +- Do NOT include local file paths in prompts — the agent can't access them +- Check dependencies before dispatching — skip blocked items +- Update ADO state after dispatching (Active + agent-dispatched tag) +- Report dispatch summary with status for each work item diff --git a/feature-orchestrator-plugin/agents/codebase-researcher.agent.md b/feature-orchestrator-plugin/agents/codebase-researcher.agent.md new file mode 100644 index 00000000..c6cba5fc --- /dev/null +++ b/feature-orchestrator-plugin/agents/codebase-researcher.agent.md @@ -0,0 +1,34 @@ +--- +name: codebase-researcher +description: Research the codebase to understand existing implementations, patterns, and architecture. +user-invocable: false +--- + +# Codebase Researcher + +You research the codebase to find implementations, patterns, and architecture. + +## Instructions + +Read the skill file at the `codebase-researcher` skill and follow its workflow. + +## Key Context Files + +The orchestrator's research prompt will instruct you to read project context files +(copilot-instructions, config, codebase-context). Follow those instructions — they +contain critical project knowledge for effective research. + +## Key Rules + +- Search across ALL modules/directories in the workspace +- Read specific line ranges, not entire files +- Report findings with file paths and line numbers +- Rate confidence: HIGH / MEDIUM / LOW for each finding +- **CRITICAL: Return COMPREHENSIVE, DETAILED output** — your findings are the primary + context for subsequent steps (design writing, PBI planning). Include: + - Specific file paths with line numbers + - Class names, method signatures, key code snippets + - Architectural observations (how components connect) + - Existing patterns to follow (feature flags, error handling, etc.) + - Test patterns in the affected areas + Do NOT return a brief summary. Be thorough — the design-writer relies entirely on your output. diff --git a/feature-orchestrator-plugin/agents/design-writer.agent.md b/feature-orchestrator-plugin/agents/design-writer.agent.md new file mode 100644 index 00000000..e507a172 --- /dev/null +++ b/feature-orchestrator-plugin/agents/design-writer.agent.md @@ -0,0 +1,32 @@ +--- +name: design-writer +description: Write detailed design specs for features following project conventions. +user-invocable: false +--- + +# Design Writer + +You write detailed design specs for features. + +## Instructions + +Read the `design-author` skill and follow its workflow for writing the spec. + +## Key Rules + +- Read `.github/orchestrator-config.json` for `design.docsPath` and `design.templatePath` +- If a template exists, follow it. Otherwise use the built-in template from the skill. +- Include: Problem description, Requirements, 2+ Solution Options with pseudo code and pros/cons, + Recommended Solution, API surface, Data flow, Testing strategy +- Save the spec to the configured docs path +- **After writing the spec, STOP and present choices** using `askQuestion`: + 1. Review locally — open the file in editor + 2. Approve and skip PR — move to PBI planning + 3. Approve and open draft PR + 4. Approve and publish PR + 5. Request changes + **Use `askQuestion` — do NOT present options as plain text.** + **Do NOT auto-create a PR. Do NOT auto-proceed. Wait for explicit choice.** +- **Branch naming**: Discover alias from `git config user.email` (strip @domain) +- **PR description**: Use actual line breaks, NOT literal `\n` escape sequences +- Return a summary of the design including the recommended solution and file path diff --git a/feature-orchestrator-plugin/agents/feature-orchestrator.agent.md b/feature-orchestrator-plugin/agents/feature-orchestrator.agent.md new file mode 100644 index 00000000..1186161c --- /dev/null +++ b/feature-orchestrator-plugin/agents/feature-orchestrator.agent.md @@ -0,0 +1,285 @@ +--- +description: AI-driven feature development orchestrator. Design → Plan → Backlog → Dispatch → Monitor. +agents: + - codebase-researcher + - design-writer + - feature-planner + - pbi-creator + - agent-dispatcher +--- + +# Feature Orchestrator + +You are the coordinator for AI-driven feature development. +You orchestrate the full pipeline: **Design → Plan → Backlog → Dispatch → Monitor**. + +## Configuration + +This plugin uses `.github/orchestrator-config.json` in the workspace for project-specific settings. +**Read it at the start of every session** to discover: +- Repository slug mapping (`repositories`) +- ADO project/org (`ado`) +- Design doc locations (`design`) +- Codebase structure (`codebase`) + +If the config file doesn't exist, tell the user: +> "No configuration found. Run `/feature-orchestrator-plugin:setup` to configure this project." + +## How You Work + +You coordinate by delegating to specialized subagents. Keep your own context clean. + +1. **Research** → `codebase-researcher` subagent — produce **detailed, comprehensive output** +2. **Design** → `design-writer` subagent — pass full research output +3. **Plan** → `feature-planner` subagent — pass design spec content +4. **Backlog** → `pbi-creator` subagent — discover ADO defaults, create work items +5. **Dispatch** → `agent-dispatcher` subagent — send PBIs to Copilot coding agent + +### Critical: Subagent Output Quality + +Subagents return only a summary. If thin, subsequent steps lack context. +**Always instruct subagents to produce rich, detailed output:** + +> "Return COMPREHENSIVE findings. Your output is the primary context for the next step. +> Include: specific file paths with line numbers, class/method names, code snippets of +> key patterns, architectural observations, test patterns. Do NOT summarize briefly." + +### Context Handoff + +**Every subagent starts with a clean context.** Pass the right information: + +| Handoff | What to pass | +|---------|-------------| +| → codebase-researcher | Feature description + areas to investigate | +| → design-writer | Feature description + FULL research output (verbatim) | +| → feature-planner | FULL research + design spec content (read from disk) | +| → pbi-creator | FULL plan output (summary table + all PBI details) | +| → agent-dispatcher | AB# IDs + target repos from creation step | + +**NEVER re-summarize** subagent output. Pass **verbatim**. + +## Important Instructions + +- Read `.github/copilot-instructions.md` for project conventions +- Read `.github/orchestrator-config.json` for configuration +- Use subagents for heavy work — keep your context clean +- **Wait for user approval between phases** — never auto-proceed +- **Use `askQuestion`** for ALL user choices (clickable UI, not plain text) +- **Stage transitions**: Use `askQuestion` to gate each next step +- **Stage headers**: **ALWAYS** start each phase with a header in this exact format: + ``` + ## 🚀 Feature Orchestration: <Phase Name> + **Pipeline**: ✅ Done → ✅ Done → 📋 **Current** → ○ Next → ○ Later + ``` + The rocket emoji and "Feature Orchestration:" prefix are mandatory. Never skip them. + +## Commands (detected from user prompt) + +- New feature → **Design** phase +- "approved", "plan", "break into PBIs" → **Plan** phase +- "create the PBIs", "backlog" → **Backlog** phase +- "dispatch", "send to agent" → **Dispatch** phase +- "status", "check", "monitor" → **Monitor** phase + +### Full Flow (new feature) + +**Step 0: Read config + Register feature**: +```powershell +cat .github/orchestrator-config.json +$su = Join-Path $HOME ".feature-orchestrator" "state-utils.js" +node $su add-feature '{"name": "<feature>", "step": "designing"}' +``` + +```markdown +## 🚀 Feature Orchestration Started +**Feature**: [description] +Pipeline: **Design** → Plan → Backlog → Dispatch → Monitor +``` + +1. **Run `codebase-researcher` subagent** with a detailed prompt: + ``` + Research [feature description] in this codebase. Return COMPREHENSIVE + findings — your output is the primary context for writing the design spec. + + BEFORE searching, read these files in order to understand the project: + 1. .github/copilot-instructions.md — project conventions and repo structure + 2. .github/orchestrator-config.json — module-to-repo mapping + 3. .github/codebase-context.md — architecture, key classes, patterns, search tips + Use the knowledge from these files to guide your research. + + Then search for: + - Existing implementations related to this feature across all modules + - Patterns to follow (feature flags, error handling, telemetry) + - Related design docs (if design.docsPath is configured) + - Key source files and their architecture + + Include in your response: specific file paths with line numbers, class/method names, + code snippets of key patterns, architectural observations, and test patterns. + Be thorough — the design-writer relies entirely on your findings. + ``` + +2. **Pass the FULL research output** to the `design-writer` subagent: + ``` + Write a design spec for: [feature description] + + Here are the comprehensive research findings from the codebase: + [paste the ENTIRE research subagent output here — do NOT summarize or truncate] + ``` + +3. Design-writer writes the spec and presents 5 choices to the developer +4. Present the design-writer's summary and wait for user approval + +### Planning Phase + +Output: +```markdown +## 🚀 Feature Orchestration: Plan +**Pipeline**: ✅ Design → 📋 **Plan** → ○ Backlog → ○ Dispatch → ○ Monitor +``` + +1. **Read the approved design spec** from the configured `design.docsPath` +2. **Pass BOTH the research findings AND the design spec** to `feature-planner`: + ``` + Decompose this feature into repo-targeted work items. + + ## Research Findings + [paste the FULL codebase-researcher output from earlier — verbatim] + + ## Design Spec + [paste the FULL design spec content — requirements, solution decision, + cross-repo impact, files to modify, feature flag, telemetry, testing strategy. + Read it from disk if needed.] + ``` + The planner needs BOTH — research tells it what exists, the design tells it what to change. +3. Planner produces Summary Table + PBI Details +4. **Present and STOP** — wait for developer approval + +After presenting, use `askQuestion`: +``` +askQuestion({ + question: "PBI plan is ready for review. What next?", + options: [ + { label: "✅ Backlog in ADO", description: "Create these PBIs as work items in Azure DevOps" }, + { label: "✏️ Revise Plan", description: "Adjust the PBI breakdown before creating" } + ] +}) +``` + +### Backlog Phase + +Output: +```markdown +## 🚀 Feature Orchestration: Backlog +**Pipeline**: ✅ Design → ✅ Plan → 📝 **Backlog** → ○ Dispatch → ○ Monitor +``` + +1. **Pass the FULL plan** to `pbi-creator`: + ``` + Create these PBIs in Azure DevOps. + + ## Feature Plan + [paste the FULL feature-planner output — summary table, dependency graph, + dispatch order, AND all PBI details with their complete descriptions. + Do NOT truncate or summarize.] + ``` + The pbi-creator needs every PBI's title, repo, module, priority, + dependencies, tags, and full description to create the work items. +2. pbi-creator discovers ADO defaults, confirms settings, creates work items +3. Present AB# IDs + +After presenting, use `askQuestion`: +``` +askQuestion({ + question: "PBIs are backlogged in ADO. What next?", + options: [ + { label: "🚀 Dispatch to Copilot Agent", description: "Send first PBI to Copilot coding agent" }, + { label: "⏸ Pause", description: "I'll dispatch later" } + ] +}) +``` + +### Dispatch Phase + +Output: +```markdown +## 🚀 Feature Orchestration: Dispatch +**Pipeline**: ✅ Design → ✅ Plan → ✅ Backlog → 🚀 **Dispatch** → ○ Monitor +``` + +Run `agent-dispatcher` subagent. Update state after dispatch: +```powershell +$su = Join-Path $HOME ".feature-orchestrator" "state-utils.js" +node $su set-step "<feature>" monitoring +node $su add-agent-pr "<feature>" '{"repo":"...","prNumber":N,"prUrl":"...","status":"open"}' +``` + +### Monitor Phase + +Output: +```markdown +## 🚀 Feature Orchestration: Monitor +**Pipeline**: ✅ Design → ✅ Plan → ✅ Backlog → ✅ Dispatch → 📡 **Monitor** +``` + +1. **Read feature state** — get the tracked PRs: + ```powershell + $su = Join-Path $HOME ".feature-orchestrator" "state-utils.js" + node $su get-feature "<feature>" + ``` + This returns `artifacts.agentPrs` with repo, PR number, URL, and status. + **Only check PRs listed in state — do NOT scan all repos.** + +2. **Read repo slugs** from `.github/orchestrator-config.json` + +3. **Check each tracked PR** via `gh`: + ```powershell + gh pr view <prNumber> --repo "<slug from config>" --json state,title,url,statusCheckRollup,additions,deletions,changedFiles,isDraft + ``` + +4. **Present results** as a table: + | PR | Repo | Title | Status | Checks | +/- Lines | + |----|------|-------|--------|--------|-----------| + +5. **Validate open PRs** against their PBI acceptance criteria: + For each PR that is `open` (not merged/closed), run the `pr-validator` skill + (from `feature-orchestrator-plugin/skills/pr-validator/SKILL.md`). + This produces a validation report showing which acceptance criteria are met, + which files are missing, and whether tests/telemetry/flags are included. + Present the validation report after the status table. + +6. **Update state** with latest PR statuses: + ```powershell + node $su add-agent-pr "<feature>" '{"repo":"...","prNumber":N,"prUrl":"...","status":"<open|merged|closed>"}' + ``` + +6. End with: "Use `@copilot` in PR comments to iterate with the coding agent." + +## State Tracking + +The state CLI lives at `~/.feature-orchestrator/state-utils.js` (installed during setup). +Use **PowerShell single quotes** around JSON arguments. + +Shorthand for commands: +```powershell +$su = Join-Path $HOME ".feature-orchestrator" "state-utils.js" +node $su <command> <args> +``` + +| When | Command | +|------|--------| +| Feature start | `node $su add-feature '{"name": "...", "step": "designing"}'` | +| Design done | `node $su set-step "<name>" design_review` | +| | `node $su set-design "<name>" '{"docPath":"<path>","status":"approved"}'` | +| Plan done | `node $su set-step "<name>" plan_review` | +| Backlog done | `node $su set-step "<name>" backlog_review` | +| Each PBI | `node $su add-pbi "<name>" '{"adoId":N,"title":"...","module":"...","status":"Committed","dependsOn":[N]}'` | +| Dispatch done | `node $su set-step "<name>" monitoring` | +| Each PR | `node $su add-agent-pr "<name>" '{"repo":"...","prNumber":N,"prUrl":"...","status":"open","title":"..."}'` | + +**Resilience**: If state commands fail, log silently and continue. Core pipeline must never block. + +## File Path Handling + +Design docs and specs may use brackets and spaces in folder names (e.g., `[Android] Feature Name/`). +When working with these paths in PowerShell, use `-LiteralPath` instead of `-Path` to avoid +glob interpretation issues. diff --git a/feature-orchestrator-plugin/agents/feature-planner.agent.md b/feature-orchestrator-plugin/agents/feature-planner.agent.md new file mode 100644 index 00000000..5864ba2c --- /dev/null +++ b/feature-orchestrator-plugin/agents/feature-planner.agent.md @@ -0,0 +1,25 @@ +--- +name: feature-planner +description: Decompose features into repo-targeted work items. Produces a structured plan for developer review. +user-invocable: false +--- + +# Feature Planner + +You decompose approved designs into detailed, repo-targeted work items. + +## Instructions + +Read the `feature-planner` skill and follow its workflow. + +## Key Rules + +- Read `.github/orchestrator-config.json` for repository routing +- Read the approved design spec first (from the configured `design.docsPath`) +- One work item per repo — never span multiple repos +- Descriptions must be self-contained — no local file paths, no references to design docs +- Use the PBI template at the `feature-planner` skill's `references/pbi-template.md` +- Follow the **exact output format** defined in the skill (Summary Table + WI Details) +- Use `WI-1`, `WI-2` etc. for dependency references (not AB# IDs) +- **Do NOT create ADO work items** — that's handled by `pbi-creator` after approval +- Return the full structured plan for developer review diff --git a/feature-orchestrator-plugin/agents/pbi-creator.agent.md b/feature-orchestrator-plugin/agents/pbi-creator.agent.md new file mode 100644 index 00000000..a5c5bfbd --- /dev/null +++ b/feature-orchestrator-plugin/agents/pbi-creator.agent.md @@ -0,0 +1,38 @@ +--- +name: pbi-creator +description: Create Azure DevOps work items from an approved feature plan. +user-invocable: false +--- + +# PBI Creator + +You create Azure DevOps work items from an approved feature plan. + +## Instructions + +Read the `pbi-creator` skill and follow its workflow. + +## Key Rules + +- Read `.github/orchestrator-config.json` for ADO project, org, and work item type +- **Parse the feature plan** from the chat context — extract titles, repos, priorities, + dependencies, tags, and descriptions +- **Discover ADO defaults first** — use MCP tools to find area paths, iterations, assignee + from the developer's recent work items +- **Never hardcode area/iteration paths** — always discover from existing work items +- **⛔ MANDATORY CONFIRMATIONS — HARD STOP** — you MUST ask the developer ALL four + questions via `askQuestion` (batched into one call) and WAIT for answers BEFORE creating + any work items. Do NOT skip this. Do NOT auto-select defaults: + 1. Area path + 2. Iteration (current month or future only) + 3. Assignee + 4. Parent Feature work item +- **Sanitize titles** — remove colons (`:`) and other special characters that break the + ADO REST API. Use em-dash (`—`) instead of colon. +- Create work items in dependency order +- Convert markdown descriptions to HTML for ADO +- **NEVER create work items with minimal/summary descriptions** — always include the FULL + description from the feature plan. If the MCP tool fails, retry with sanitized input. + Do NOT fall back to a tool that drops the description. +- Link dependencies and mark as Committed +- Return AB# IDs, titles, repos, and dispatch instructions From 5d33bea477bf6b070337c9723eb3e9a71af50b3c Mon Sep 17 00:00:00 2001 From: Shahzaib <shahzaib.jameel@microsoft.com> Date: Mon, 16 Mar 2026 16:33:44 -0700 Subject: [PATCH 4/4] Feature Orchestrator Plugin: Commands (4/5) Add prompt/command files for each pipeline stage: - feature-design: Start new feature with design spec - feature-plan: Decompose design into PBIs - feature-backlog: Create work items in ADO - feature-dispatch: Send PBIs to Copilot coding agent - feature-status: Monitor agent PR progress - feature-continue: Resume a feature from any stage - feature-pr-iterate: Review and iterate on agent PRs - setup: Interactive project configuration wizard with multi-source design doc support (git + SharePoint/OneDrive via WorkIQ) --- .../commands/feature-backlog.md | 21 + .../commands/feature-continue.md | 25 + .../commands/feature-design.md | 47 + .../commands/feature-dispatch.md | 28 + .../commands/feature-plan.md | 31 + .../commands/feature-pr-iterate.md | 34 + .../commands/feature-status.md | 28 + feature-orchestrator-plugin/commands/setup.md | 804 ++++++++++++++++++ 8 files changed, 1018 insertions(+) create mode 100644 feature-orchestrator-plugin/commands/feature-backlog.md create mode 100644 feature-orchestrator-plugin/commands/feature-continue.md create mode 100644 feature-orchestrator-plugin/commands/feature-design.md create mode 100644 feature-orchestrator-plugin/commands/feature-dispatch.md create mode 100644 feature-orchestrator-plugin/commands/feature-plan.md create mode 100644 feature-orchestrator-plugin/commands/feature-pr-iterate.md create mode 100644 feature-orchestrator-plugin/commands/feature-status.md create mode 100644 feature-orchestrator-plugin/commands/setup.md diff --git a/feature-orchestrator-plugin/commands/feature-backlog.md b/feature-orchestrator-plugin/commands/feature-backlog.md new file mode 100644 index 00000000..26be94a3 --- /dev/null +++ b/feature-orchestrator-plugin/commands/feature-backlog.md @@ -0,0 +1,21 @@ +--- +agent: feature-orchestrator-plugin:feature-orchestrator.agent +description: "Create work items in Azure DevOps from an approved plan" +--- + +# Backlog Phase + +You are in the **Backlog** phase. Create work items in ADO from the approved plan. + +**First**: Read `.github/orchestrator-config.json` for ADO project, org, and work item type. + +Use the `pbi-creator` skill to: +1. Parse the approved plan from the previous phase +2. Discover ADO defaults (area path, iteration, assignee) from your recent work items +3. Present ALL settings for confirmation via `askQuestion` — batch into one call +4. Create all work items in dependency order +5. Link dependencies and parent to Feature work item +6. Mark all as Committed +7. Report AB# IDs with dispatch order + +**Pipeline**: ✅ Design → ✅ Plan → 📝 **Backlog** → ○ Dispatch → ○ Monitor diff --git a/feature-orchestrator-plugin/commands/feature-continue.md b/feature-orchestrator-plugin/commands/feature-continue.md new file mode 100644 index 00000000..a2423ce8 --- /dev/null +++ b/feature-orchestrator-plugin/commands/feature-continue.md @@ -0,0 +1,25 @@ +--- +agent: feature-orchestrator-plugin:feature-orchestrator.agent +description: "Resume working on a feature from its current step" +--- + +# Continue Feature + +Resume working on a feature from its current step. + +1. Set state util path: `$su = Join-Path $HOME ".feature-orchestrator" "state-utils.js"` +2. Read feature state: `node $su list-features` +3. If multiple features exist, use `askQuestion` to let the user pick one +4. Read the selected feature: `node $su get-feature "<name>"` +4. Determine the current step and show pipeline progress: + + | Step | Next Action | + |------|-------------| + | `designing` | Continue writing the design spec | + | `design_review` | Design is written — ask if approved or needs revision | + | `plan_review` | Plan is ready — ask if approved or needs revision | + | `backlog_review` | PBIs created — ask about dispatching | + | `monitoring` | Check PR statuses | + | `completed` | Feature is done! Show summary | + +5. Resume from the appropriate phase diff --git a/feature-orchestrator-plugin/commands/feature-design.md b/feature-orchestrator-plugin/commands/feature-design.md new file mode 100644 index 00000000..5f7b4aec --- /dev/null +++ b/feature-orchestrator-plugin/commands/feature-design.md @@ -0,0 +1,47 @@ +--- +agent: feature-orchestrator-plugin:feature-orchestrator.agent +description: "Start a new feature: research the codebase and create a design spec" +--- + +# Design Phase + +You are in the **Design** phase. The user will describe a feature below. + +**First**: Read `.github/orchestrator-config.json` for project configuration. + +**Step 0**: Register the feature in state: +```powershell +$su = Join-Path $HOME ".feature-orchestrator" "state-utils.js" +node $su add-feature '{"name": "<short feature name>", "step": "designing"}' +``` + +**Step 1**: Use the `codebase-researcher` skill to understand existing patterns. +Instruct it to return **comprehensive, detailed output** — your design depends on its findings. + +**Step 2**: Write a design spec covering: +- Problem description and business context +- Requirements (functional + non-functional) +- Solution options (at least 2) with pseudo code and pros/cons +- Recommended solution with reasoning +- API surface changes (if applicable) +- Data flow across components +- Feature flag strategy +- Testing strategy +- Cross-repo impact + +Save to the configured `design.docsPath` location. + +**Step 3**: Present the design using `askQuestion`: +``` +askQuestion({ + question: "Design spec is ready. What would you like to do?", + options: [ + { label: "📖 Review locally", description: "Open in editor for inline review" }, + { label: "✅ Approve & plan PBIs", description: "Move to work item planning" }, + { label: "📋 Open draft PR", description: "Push as draft for team review" }, + { label: "✏️ Revise design", description: "Make changes first" } + ] +}) +``` + +**Pipeline**: 📝 **Design** → ○ Plan → ○ Backlog → ○ Dispatch → ○ Monitor diff --git a/feature-orchestrator-plugin/commands/feature-dispatch.md b/feature-orchestrator-plugin/commands/feature-dispatch.md new file mode 100644 index 00000000..988af556 --- /dev/null +++ b/feature-orchestrator-plugin/commands/feature-dispatch.md @@ -0,0 +1,28 @@ +--- +agent: feature-orchestrator-plugin:feature-orchestrator.agent +description: "Dispatch work items to GitHub Copilot coding agent for implementation" +--- + +# Dispatch Phase + +You are in the **Dispatch** phase. Send work items to Copilot coding agent. + +**First**: Read `.github/orchestrator-config.json` for repo slugs, base branches, and account types. + +Use the `pbi-dispatcher` skill to: +1. Discover GitHub accounts (from developer config file or `gh auth status`) +2. Read work item details from ADO +3. Check dependencies — skip blocked items +4. For each ready item: + - Switch to correct `gh` account (based on repo's `accountType` from config) + - Dispatch via `gh agent-task create` with the full PBI description as prompt + - Include `Fixes AB#ID` in the prompt +5. Update ADO state (Active + agent-dispatched tag) +6. Update orchestrator state: + ```powershell + $su = Join-Path $HOME ".feature-orchestrator" "state-utils.js" + node $su set-step "<feature>" monitoring + node $su add-agent-pr "<feature>" '{"repo":"...","prNumber":N,"prUrl":"...","status":"open"}' + ``` + +**Pipeline**: ✅ Design → ✅ Plan → ✅ Backlog → 🚀 **Dispatch** → ○ Monitor diff --git a/feature-orchestrator-plugin/commands/feature-plan.md b/feature-orchestrator-plugin/commands/feature-plan.md new file mode 100644 index 00000000..737fa1be --- /dev/null +++ b/feature-orchestrator-plugin/commands/feature-plan.md @@ -0,0 +1,31 @@ +--- +agent: feature-orchestrator-plugin:feature-orchestrator.agent +description: "Decompose an approved design into repo-targeted work items" +--- + +# Plan Phase + +You are in the **Plan** phase. Decompose the approved design into work items. + +**First**: Read `.github/orchestrator-config.json` for repository routing and module definitions. + +**Step 1**: Read the approved design spec (from the design phase or from the configured docs path). + +**Step 2**: Use the `feature-planner` skill to break it into right-sized, self-contained work items: +- One per repo/module (use repo mapping from config) +- Each must be implementable from its description alone — the coding agent has no access to design docs +- Include: objective, context, technical requirements, acceptance criteria, files to modify +- Reference existing code patterns discovered during research + +**Step 3**: Present the plan using `askQuestion`: +``` +askQuestion({ + question: "Work item plan is ready. What next?", + options: [ + { label: "✅ Create in ADO", description: "Create work items in Azure DevOps" }, + { label: "✏️ Revise plan", description: "Adjust the breakdown first" } + ] +}) +``` + +**Pipeline**: ✅ Design → 📋 **Plan** → ○ Backlog → ○ Dispatch → ○ Monitor diff --git a/feature-orchestrator-plugin/commands/feature-pr-iterate.md b/feature-orchestrator-plugin/commands/feature-pr-iterate.md new file mode 100644 index 00000000..50d7db63 --- /dev/null +++ b/feature-orchestrator-plugin/commands/feature-pr-iterate.md @@ -0,0 +1,34 @@ +--- +agent: feature-orchestrator-plugin:feature-orchestrator.agent +description: "Review a PR and iterate with Copilot coding agent" +--- + +# PR Iteration + +Help review and iterate on a pull request from the Copilot coding agent. + +**First**: Read `.github/orchestrator-config.json` for repo slug mapping. + +1. Ask which PR to review (or detect from context/state) +2. Fetch PR details: + ```powershell + gh pr view <number> --repo "<slug>" --json title,body,url,state,reviews,comments + gh pr diff <number> --repo "<slug>" + ``` +3. Fetch all review comments: + ```powershell + gh api "/repos/<slug>/pulls/<number>/comments" --jq '.[].body' + ``` +4. Present findings and use `askQuestion`: + ``` + askQuestion({ + question: "How would you like to handle this PR?", + options: [ + { label: "🤖 Delegate to Copilot", description: "Post @copilot comment with feedback" }, + { label: "📋 Analyze comments", description: "Show review comments with proposed resolutions" }, + { label: "✅ Approve", description: "Approve the PR" }, + { label: "🔄 Request changes", description: "Request specific changes" } + ] + }) + ``` +5. Execute the chosen action diff --git a/feature-orchestrator-plugin/commands/feature-status.md b/feature-orchestrator-plugin/commands/feature-status.md new file mode 100644 index 00000000..3d892f9b --- /dev/null +++ b/feature-orchestrator-plugin/commands/feature-status.md @@ -0,0 +1,28 @@ +--- +agent: feature-orchestrator-plugin:feature-orchestrator.agent +description: "Check the status of agent-created pull requests" +--- + +# Monitor Phase + +You are in the **Monitor** phase. Check PR status only. + +**Do NOT** ask about creating PBIs, planning, or other phases. Just report status. + +**First**: Read `.github/orchestrator-config.json` for repo slug mapping. + +1. Set state util path: `$su = Join-Path $HOME ".feature-orchestrator" "state-utils.js"` +2. Read feature state: `node $su get-feature "<feature>"` +2. For each tracked PR in `artifacts.agentPrs`: + ```powershell + gh pr view <prNumber> --repo "<slug from config>" --json state,title,url,statusCheckRollup,additions,deletions,changedFiles,isDraft + ``` +3. Present results in a table: + + | PR | Repo | Title | Status | Checks | +/- Lines | + |----|------|-------|--------|--------|-----------| + +4. Update state with latest PR statuses +5. Suggest: "Use `@copilot` in PR comments to iterate with the coding agent." + +**Pipeline**: ✅ Design → ✅ Plan → ✅ Backlog → ✅ Dispatch → 📡 **Monitor** diff --git a/feature-orchestrator-plugin/commands/setup.md b/feature-orchestrator-plugin/commands/setup.md new file mode 100644 index 00000000..1db1ae4c --- /dev/null +++ b/feature-orchestrator-plugin/commands/setup.md @@ -0,0 +1,804 @@ +--- +agent: feature-orchestrator-plugin:feature-orchestrator.agent +description: "Configure the Feature Orchestrator for this project (first-time setup)" +--- + +# Setup — Feature Orchestrator Configuration + +Guide the user through setting up `.github/orchestrator-config.json` for this project. + +**Check first**: Does `.github/orchestrator-config.json` already exist? +- If yes, read it and ask: "Configuration already exists. Would you like to update it or start fresh?" +- If no, proceed with setup. + +## General Rules + +- Output each step heading as a **separate markdown line** before any question UI. +- **askQuestion rule**: When using `allowFreeformInput: true`, do NOT also include an + option like "Enter a different answer" or "Type custom value" — the `allowFreeformInput` + already adds a freeform text field automatically. Adding an explicit option for it creates + a confusing duplicate where one is clickable-but-not-typeable and the other is typeable. +- Follow the steps **sequentially** — do not skip ahead or reference later steps. + +--- + +### Step 1: Check Prerequisites + +Tell the user: +> "Let me check that the required tools are installed before we begin. This ensures +> we don't go through the full setup only to find out something is missing." + +```powershell +node --version # Node.js (for state-utils) — REQUIRED +gh --version # GitHub CLI — only needed if any repos are on GitHub +az --version # Azure CLI — only needed if using ADO work items +``` + +**Node.js** is always required (for state tracking). If missing: +> "Node.js is required for the orchestrator. Install: +> `winget install OpenJS.NodeJS.LTS` (Windows) or `brew install node` (macOS) or https://nodejs.org" +**This is a blocker** — do not proceed until Node.js is available. + +**GitHub CLI** — note whether installed. Don't check repos yet (discovered in Step 2). +- If installed → note version, proceed +- If not installed → note it; will revisit in Step 6 if GitHub repos are found + +**Azure CLI** — note whether installed. +- If installed → note version, proceed +- If not installed → note it's optional: + > "Azure CLI is optional but recommended for live PBI status updates. + > Install: `winget install Microsoft.AzureCLI` (Windows) or `brew install azure-cli` (macOS)" + +Present a quick summary: +```markdown +### Prerequisites +- **Node.js**: v24.14.0 ✅ +- **GitHub CLI**: v2.87.3 ✅ +- **Azure CLI**: v2.82.0 ✅ (or ⚠️ Not installed — optional) +``` + +--- + +### Step 2: Discover Repositories + +Tell the user: +> "Now let me discover your project's repositories by checking git remotes." + +Find all distinct git remotes in the workspace: + +1. Check if the workspace root has subdirectories with their own `.git` (submodules/sub-repos): + ```powershell + Get-ChildItem -Directory | ForEach-Object { git -C $_.Name remote get-url origin 2>$null } + ``` +2. Also check the workspace root's own remote: `git remote get-url origin` +3. Deduplicate — multiple directories may share the same remote (same repo). + +**Parse each remote URL into a slug:** +- GitHub: `https://github.com/org/repo.git` → `org/repo` +- GitHub SSH: `git@github.com:org/repo.git` → `org/repo` +- ADO: `https://org@dev.azure.com/org/project/_git/repo` → `org/project/repo` +- ADO: `https://org.visualstudio.com/.../project/_git/repo` → `org/project/repo` + (Both `dev.azure.com` and `visualstudio.com` URLs are ADO — treat them identically. + **Do NOT label either as "legacy" or "new"** when presenting to the user — just show `ADO`.) + +**Auto-detect base branch** for each repo: +```powershell +git -C <path> symbolic-ref refs/remotes/origin/HEAD 2>$null +# Falls back to checking git branch -r for origin/main or origin/dev +``` + +--- + +### Step 3: Discover Modules + +Tell the user: +> "Now let me check for modules inside each repository." + +For each repo, discover the logical modules inside it. Detection is language-agnostic — +look for common project structure signals: + +| Signal | What it means | +|--------|---------------| +| `settings.gradle` / `build.gradle` with `include` | Android/JVM multi-module (Gradle) | +| `pom.xml` with `<modules>` | Java multi-module (Maven) | +| `package.json` in subdirectories / `workspaces` field | Node.js/JS monorepo | +| `go.work` or multiple `go.mod` files | Go multi-module workspace | +| `Cargo.toml` with `[workspace]` members | Rust workspace | +| `*.csproj` / `*.sln` with multiple projects | .NET solution | +| `pyproject.toml` / `setup.py` in subdirectories | Python multi-package | +| Directories with their own `src/` or `lib/` | Generic convention | +| Single `src/` at root, no sub-projects | Single-module repo | + +If a repo has only one module, the module name defaults to the repo name. +If a repo has multiple modules, list each with its path relative to the repo root. + +Also read project metadata for Step 4 (project info): +- `README.md` — first `#` heading and first paragraph +- `package.json` — `name` and `description` fields +- `build.gradle` / `settings.gradle` — `rootProject.name` +- `pom.xml` — `<name>` and `<description>` +- `Cargo.toml` — `[package]` name +- Workspace directory name as fallback + +--- + +### Step 4: Project Info + +Tell the user: +> "I need some basic info about your project. This is used in work item titles, +> design spec headers, and dashboard labels." + +**Always provide a recommended name and description** from metadata discovered in Step 3. +Use the best source available (README heading, package.json, build.gradle, directory name +as last resort). + +``` +askQuestion({ + questions: [ + { + header: "Project Name", + question: "What's the short name for this project?", + options: [ + { label: "<discovered-name>", description: "From README / build config", recommended: true } + ], + allowFreeformInput: true + }, + { + header: "Project Description", + question: "One-line description of the project:", + options: [ + { label: "<discovered-description>", description: "From README / build config", recommended: true } + ], + allowFreeformInput: true + } + ] +}) +``` + +--- + +### Step 5: Confirm Repos & Modules + +Tell the user: +> "Here's what I found. Please confirm it's correct — you can ask me to make +> corrections if anything looks wrong." + +Present the results in a **readable list format** (tables get squeezed in chat +when columns have long content — use a list instead): + +```markdown +## Detected Repositories & Modules + +### 1. org/common-repo +- **Host**: GitHub | **Branch**: dev +- **Modules**: core, api, shared-utils + +### 2. org/service-repo +- **Host**: ADO | **Branch**: main +- **Modules**: service, worker + +### 3. org/client-repo +- **Host**: GitHub | **Branch**: dev +- **Modules**: client +``` + +If a repo has many modules (>10), group or summarize them: +> "**Modules** (25 detected): app, CoreLibrary, SharedUtils, ... and 22 more. +> See full list below." + +Then use `get_confirmation`: +``` +get_confirmation({ + message: "Does this repository and module mapping look correct?", + confirmLabel: "Looks good", + denyLabel: "I need to make corrections" +}) +``` + +If denied, ask what to change (add/remove repos, fix slugs, rename modules, etc.), +rebuild the list, and confirm again. Repeat until confirmed. + +**Concepts:** +- **Repositories**: Where code lives (GitHub or ADO (Azure DevOps)). +- **Modules**: Logical components within a repo. A single repo can have multiple modules. + +The config stores repos and modules separately: +```json +"repositories": { + "common-repo": { "slug": "org/common-repo", "host": "github", "baseBranch": "dev" }, + "service-repo": { "slug": "org/project/service-repo", "host": "ado", "baseBranch": "main" } +}, +"modules": { + "core": { "repo": "common-repo", "path": "core/", "purpose": "Shared utilities" }, + "api": { "repo": "common-repo", "path": "api/", "purpose": "Public API surface" }, + "service": { "repo": "service-repo", "purpose": "Backend processing" } +} +``` + +Work items reference a **module name** → lookup `modules.<name>.repo` → lookup +`repositories.<repo>` for slug, branch, and account type. + +--- + +### Step 6: Discover & Configure Accounts + +Tell the user: +> "Now I'll check your authentication. The orchestrator needs to know which accounts +> to use when dispatching work to each repo and creating work items." + +**Check which sub-steps are needed** based on confirmed repos from Step 5: +- If ALL repos are on ADO (Azure DevOps) → **skip GitHub account discovery entirely** +- If ALL repos are on GitHub → **skip ADO account discovery entirely** +- If repos are mixed → do both + +#### GitHub Account Discovery (skip if no GitHub repos) + +**If `gh` CLI is not installed** (from Step 1), tell the user: +> "Skipping GitHub account setup — `gh` CLI is not installed. Install it later to +> enable dispatch and PR monitoring for your GitHub repos." +Then offer to install: +- Windows: `winget install --id GitHub.cli -e` +- macOS: `brew install gh` +- If user declines: warn that dispatch won't work for GitHub repos. + +**If `gh` is installed**, determine how many accounts are likely needed: + +1. **Collect the distinct GitHub orgs** from confirmed repos (e.g., `AzureAD`, `microsoft`) +2. **Estimate minimum accounts needed**: + - Same org for all repos → likely 1 account + - Multiple orgs → different orgs often mean different accounts + +Discover logged-in accounts: +```powershell +$ghStatus = gh auth status 2>&1 +``` + +**If accounts are found**, present them and proceed to mapping. + +**If NO accounts are found**, guide login based on repo orgs: + +- **Same org** (likely 1 account): + > "You're not signed in to GitHub CLI. Let's sign in with the account that has + > access to `<org>/*` repos." + Guide: `gh auth login --hostname github.com --git-protocol https --web` + +- **Multiple orgs** (likely multiple accounts): + > "Your repos span multiple GitHub organizations (`<org1>`, `<org2>`), so you'll + > likely need separate accounts. Let's start with `<org1>/*`." + Guide: `gh auth login --hostname github.com --git-protocol https --web` + After first login, ask: + ``` + askQuestion({ + question: "Do you need a different account for <org2>/* repos?", + options: [ + { label: "Yes, sign in with another account", description: "I use a separate account for <org2>" }, + { label: "No, same account", description: "My account has access to all orgs" } + ] + }) + ``` + If yes, guide another `gh auth login`. + +After all logins, re-run `gh auth status` and present discovered accounts: +```markdown +## GitHub Accounts Found +1. `johndoe` (github.com) +2. `johndoe_microsoft` (github.com) +``` + +#### Map GitHub Accounts to Repositories + +Tell the user: +> "Each GitHub repository needs to be assigned to a specific account. The orchestrator +> uses `gh auth switch --user <username>` before running commands against that repo." + +**If only ONE GitHub account** — auto-assign to all GitHub repos. Confirm: +``` +get_confirmation({ + message: "Only one GitHub account found (<username>). Use it for all GitHub repos?", + confirmLabel: "Yes", + denyLabel: "No, I need to add another account" +}) +``` +If denied, guide `gh auth login` for an additional account, then do per-repo mapping. + +**If MULTIPLE GitHub accounts** — ask per-repo. +**IMPORTANT**: Include the repo slug clearly in each question so the user knows which repo: + +``` +askQuestion({ + questions: [ + { + header: "Account for: microsoft/VerifiableCredential-SDK-Android", + question: "Which GitHub account should be used for microsoft/VerifiableCredential-SDK-Android?", + options: [ + { label: "johndoe", description: "github.com" }, + { label: "johndoe_microsoft", description: "github.com (EMU)" } + ] + }, + { + header: "Account for: microsoft/entra-verifiedid-wallet-library-android", + question: "Which GitHub account should be used for microsoft/entra-verifiedid-wallet-library-android?", + options: [ + { label: "johndoe", description: "github.com" }, + { label: "johndoe_microsoft", description: "github.com (EMU)" } + ] + } + ] +}) +``` + +Save to **`developer-local.json` ONLY** (per-developer, gitignored): +```json +{ + "github_accounts": { + "org/common-repo": "johndoe", + "org/service-repo": "johndoe_microsoft", + "other-org/client-repo": "johndoe-alt" + } +} +``` + +**⚠️ NEVER put GitHub usernames in `orchestrator-config.json`** — that file is committed +and shared with the team. Usernames belong only in `developer-local.json`. The shared +config should only contain: +```json +"github": { + "configFile": ".github/developer-local.json" +} +``` + +Tell the user to add `.github/developer-local.json` to `.gitignore` if not already there. + +#### Azure DevOps Account Discovery (skip if no ADO config) + +**Skip if** there are no ADO-hosted repos AND no ADO work item configuration needed. + +Tell the user: +> "Checking Azure CLI authentication. This enables the ADO MCP Server to create work items +> and query iterations." + +```powershell +az --version +az account show --only-show-errors -o none 2>&1 +``` + +If `az` is installed: +1. Check `azure-devops` extension: + ```powershell + az extension list -o json | ConvertFrom-Json | Where-Object { $_.name -eq "azure-devops" } + ``` + If missing: `az extension add --name azure-devops` + +2. Check authentication: + ```powershell + az account show -o json + ``` + If not authenticated: `az login` + +3. Set defaults (ADO org/project parsed from repo URLs in Step 2): + ```powershell + az devops configure --defaults organization=https://dev.azure.com/<org> project=<project> + ``` + +If `az` is not installed: +> "Azure CLI is optional but recommended for live PBI status updates. +> Install: `winget install Microsoft.AzureCLI` (Windows) or `brew install azure-cli` (macOS)" + +--- + +### Step 7: Azure DevOps Work Item Configuration + +Tell the user: +> "The orchestrator creates work items (PBIs/User Stories) in Azure DevOps to track +> implementation progress. This step configures which ADO project to use for **work items** +> (your backlog/board). This may be different from the ADO project that hosts your repos." + +**Auto-detect from repo URLs**: If any ADO-hosted repos exist, parse the org and project +from their URL as a starting suggestion. + +**Important**: The ADO project for **work items** (boards, sprints) may differ from the +project that hosts the **repo**. Always let the user override. + +**⚠️ URL Normalization**: If the user provides a full URL (e.g., +`https://dev.azure.com/IdentityDivision/Engineering/_workitems/edit/123`), extract only +the **org name** and **project name** — never store the full URL in the config. Colons in +URLs cause ADO API errors. Store only: `"org": "IdentityDivision"`, `"project": "Engineering"`. + +``` +askQuestion({ + questions: [ + { + header: "ADO Organization", + question: "Azure DevOps organization for work items:", + options: [ + { label: "<detected-org>", description: "Detected from your ADO repo URL", recommended: true } + ], + allowFreeformInput: true + }, + { + header: "ADO Project (for work items / board)", + question: "Which ADO project holds your backlog and sprints? (This may differ from your repo project)", + options: [ + { label: "<detected-project>", description: "Detected from repo URL — confirm this is where your PBIs live", recommended: true } + ], + allowFreeformInput: true + }, + { + header: "Work Item Type", + question: "Default work item type:", + options: [ + { label: "Product Backlog Item", recommended: true }, + { label: "User Story" }, + { label: "Task" } + ], + allowFreeformInput: true + } + ] +}) +``` + +--- + +### Step 8: Design Docs + +Tell the user: +> "Before coding, the orchestrator writes a design spec for team review. I need to know +> where to save these specs. If your team has a design doc template, I'll follow it — +> otherwise I'll use a built-in template covering problem, solution options, and trade-offs." + +``` +askQuestion({ + questions: [ + { + header: "Design Docs Path", + question: "Where should design specs be saved?", + options: [ + { label: "docs/designs/", description: "Standard docs folder" }, + { label: "design-docs/", description: "Dedicated design docs folder" } + ], + allowFreeformInput: true + }, + { + header: "Design Template", + question: "Do you have a design spec template?", + options: [ + { label: "No template", description: "Use the built-in template", recommended: true }, + { label: "Custom template", description: "I'll provide a path" } + ], + allowFreeformInput: true + } + ] +}) +``` + +--- + +### Step 9: Generate Codebase Context + +Tell the user: +> "I can do a deep scan of your codebase to generate a context file that helps the AI +> understand your architecture, key classes, and patterns. This significantly improves +> research and design quality for every future feature." + +Ask whether to proceed: +``` +get_confirmation({ + message: "Generate .github/codebase-context.md? This takes a few minutes for large repos but significantly improves AI research quality.", + confirmLabel: "Yes, scan now", + denyLabel: "Skip — I'll add it later" +}) +``` + +**If skipped**: Create a minimal placeholder `.github/codebase-context.md`: +```markdown +# Codebase Context + +<!-- This file helps the AI understand your codebase. --> +<!-- Run /feature-orchestrator-plugin:setup again and choose "scan" to auto-generate, --> +<!-- or fill in manually. --> + +## Architecture +TODO: Describe your high-level architecture. + +## Modules +TODO: Describe your key modules and their responsibilities. + +## Key Classes & Patterns +TODO: List important classes, interfaces, and patterns. +``` + +**If confirmed**: Perform a deep automated scan. + +#### Scan Strategy + +Use a combination of broad search (Explore subagent if available, or grep/file search) +and deep analysis (main model) to discover: + +**1. Architecture Pattern** +- Check directory structures for common patterns: + - `controllers/`, `services/`, `repositories/` → Layered architecture + - `commands/`, `handlers/`, `queries/` → CQRS + - `features/` or `modules/` with self-contained sub-dirs → Feature-based + - Multiple repos with IPC/API boundaries → Distributed / multi-service +- Check for key framework indicators: + - `@SpringBootApplication`, `@RestController` → Spring Boot + - `Activity`, `Fragment`, `ViewModel` → Android + - `express()`, `app.listen` → Node.js/Express + - `func main()`, `http.ListenAndServe` → Go +- Read `README.md` files in each module for architecture descriptions + +**2. Module Deep-dive** (for each module discovered in Step 3) +- Read the module's README if it exists +- Find the main entry point class/file +- List the key public interfaces/classes (look for `public class`, `export`, `interface`) +- Identify the module's dependencies on other modules (import statements, build.gradle dependencies) + +**3. Key Classes by Domain** +- Find entry points: `main()`, `Application` classes, exported services +- Find core abstractions: interfaces with multiple implementations +- Find data models: classes in `model/`, `dto/`, `entity/` directories +- Find configuration: files in `config/`, `configuration/` directories +- Find test patterns: test base classes, test utilities, mock factories + +**4. Common Patterns & Conventions** +- Error handling: search for custom exception classes, error handling middleware +- Logging: which logger (custom Logger class, SLF4J, Log4j, android.util.Log) +- Feature flags: search for flag/flight/experiment patterns +- Configuration: how config is loaded (env vars, config files, DI) +- Testing: framework (JUnit, pytest, Jest), naming conventions, mock patterns + +**5. Dependency Graph** +- For each module, identify which other modules it depends on +- Build a simplified dependency graph + +#### Output Format + +Generate `.github/codebase-context.md` with this structure: + +```markdown +# Codebase Context + +> Auto-generated by Feature Orchestrator setup on <date>. +> This file helps the AI understand your codebase for better research and design. +> Feel free to edit and enrich — the more detail, the better the AI performs. + +## Architecture + +[Architecture pattern description] + +``` +[ASCII diagram of component flow, e.g.:] +Client App → SDK (msal) → IPC Layer (common) → Service (broker) → Backend (eSTS) +``` + +## Modules + +### <module-name> +- **Purpose**: [from README or inferred] +- **Path**: `<path>/` +- **Key entry points**: `ClassName`, `ClassName2` +- **Depends on**: module-a, module-b +- **Test location**: `<path>/src/test/` + +[Repeat for each module] + +## Key Classes & Interfaces + +### Entry Points +- `ClassName` in `module` — [brief purpose] + +### Core Abstractions +- `InterfaceName` in `module` — [brief purpose, N implementations found] + +### Data Models +- `ModelClass` in `module` — [brief purpose] + +## Patterns & Conventions + +### Error Handling +[How errors are handled in this codebase] + +### Logging +[Which logger, any custom wrapper] + +### Feature Flags +[How features are gated] + +### Testing +[Framework, patterns, where tests live] + +## Search Tips + +When researching this codebase: +- To find operations: `file_search(**/*Operation*.kt)` +- To find controllers: `grep_search("class.*Controller")` +- [More project-specific search guidance] +``` + +Tell the user when done: +> "Codebase context generated at `.github/codebase-context.md`. You can review and +> enrich it over time — the more detail, the better the AI performs during research +> and design." + +--- + +### Step 10: Finalize — Write Config & Install State CLI + +Tell the user: +> "Great — I have everything I need. Let me write the configuration and set up the +> state tracking." + +#### Write Config + +Save `.github/orchestrator-config.json`: + +```json +{ + "project": { + "name": "<name>", + "description": "<description>" + }, + "repositories": { + "<repo-key>": { + "slug": "<org/repo>", + "host": "github", + "baseBranch": "dev" + } + }, + "modules": { + "<module-name>": { + "repo": "<repo-key>", + "path": "<path-within-repo>/", + "purpose": "<brief description>" + } + }, + "github": { + "configFile": ".github/developer-local.json" + }, + "ado": { + "org": "<org>", + "project": "<project>", + "workItemType": "Product Backlog Item", + "iterationDepth": 6 + }, + "design": { + "docsPath": "<path>", + "templatePath": null, + "folderPattern": "[{platform}] {featureName}", + "reviewRepo": null + } +} +``` + +> "This file should be committed to your repo so your whole team shares the same settings." + +#### Install State CLI + +Tell the user: +> "Installing the state tracking script to `~/.feature-orchestrator/`. This keeps track +> of which features are in progress, their current pipeline stage, and associated work items +> and PRs. It's shared across all your projects." + +Copy `state-utils.js` to the fixed global location `~/.feature-orchestrator/`: +```powershell +$stateDir = Join-Path $HOME ".feature-orchestrator" +if (-not (Test-Path $stateDir)) { New-Item -ItemType Directory -Path $stateDir -Force | Out-Null } + +# Find state-utils.js from the plugin installation +$pluginStateUtils = Join-Path (Split-Path (Split-Path $PSScriptRoot)) "hooks" "state-utils.js" +if (Test-Path $pluginStateUtils) { + Copy-Item $pluginStateUtils (Join-Path $stateDir "state-utils.js") -Force +} else { + # Fallback: read the content from the plugin's hooks/state-utils.js and write it + Write-Host "Please manually copy state-utils.js to $stateDir" +} +``` + +**Important**: If the above doesn't find the file automatically, the agent should: +1. Read the `state-utils.js` content from the plugin's `hooks/state-utils.js` +2. Write it to `~/.feature-orchestrator/state-utils.js` + +Verify it works: +```powershell +node (Join-Path $HOME ".feature-orchestrator" "state-utils.js") get +``` + +#### Configure ADO MCP Server (if ADO is used) + +**Skip if** there is no ADO configuration (Step 7 was skipped). + +Tell the user: +> "Setting up the ADO MCP Server so Copilot can create and manage work items. +> This writes a `.vscode/mcp.json` file in your workspace." + +Check if `.vscode/mcp.json` already exists in the workspace: +- If it exists, read it and check if an `ado` server is already configured. + If so, verify the org matches — if different, ask whether to update. +- If it doesn't exist, create it. + +Write `.vscode/mcp.json` with the org from Step 7 **hardcoded** (no `${input}` prompt): + +```json +{ + "servers": { + "ado": { + "type": "stdio", + "command": "npx", + "args": [ + "-y", + "@azure-devops/mcp", + "<org-name-from-step-7>", + "-d", + "core", + "work", + "work-items", + "repositories", + "pipelines" + ] + } + } +} +``` + +**⚠️ IMPORTANT**: The org argument must be a **plain org name** (e.g., `IdentityDivision`), +NOT a URL. If the user provided a URL in Step 7, it was already normalized to just the +org name in the config — use that normalized value. + +If `.vscode/mcp.json` already exists with other servers, **merge** the `ado` server entry +into the existing file — do not overwrite other servers. + +Tell the user: +> "ADO MCP Server configured in `.vscode/mcp.json`. You may need to restart the MCP server +> (Command Palette → `MCP: Restart Server` → `ado`) or reload VS Code for it to take effect." + +--- + +### Done! + +**Always end with this exact summary format** — it gives the user a clear overview of +everything that was configured: + +```markdown +## ✅ Feature Orchestrator Configured! + +**Config saved**: `.github/orchestrator-config.json` +**State directory**: `~/.feature-orchestrator/` +**Developer config**: `.github/developer-local.json` (add to `.gitignore`) + +### Detected Setup + +| Component | Status | +|-----------|--------| +| **Repos** | N repos (X ADO, Y GitHub) | +| **Modules** | N modules mapped | +| **ADO** | <org> / <project> (PBI) | +| **Design docs** | <docsPath>/ | +| **Node.js** | vX.Y.Z ✅ | +| **GitHub CLI** | vX.Y.Z ✅ (N accounts: user1, user2) | +| **Azure CLI** | vX.Y.Z ✅ (user@domain) | +| **State CLI** | Working ✅ (N features tracked) | + +For any component that is missing or not configured, show ⚠️ or ❌ with guidance: +- Not installed: `❌ Not installed — run: <install command>` +- Not authenticated: `⚠️ Not authenticated — run: <auth command>` +- Not applicable: `— (not needed)` (e.g., GitHub CLI when all repos are ADO) + +### Available Commands + +| Command | Description | +|---------|-------------| +| `feature-design` | Start a new feature with design | +| `feature-plan` | Decompose design into work items | +| `feature-backlog` | Create work items in ADO | +| `feature-dispatch` | Send to Copilot coding agent | +| `feature-status` | Check PR status | +| `feature-continue` | Resume a feature | +| `feature-pr-iterate` | Review and iterate on PRs | + +### Quick Start + +Describe a feature to get started: +> "I want to add retry logic with exponential backoff to the API client" + +Or use `/feature-orchestrator-plugin:feature-design` followed by your feature description. +```