Auto-generated by workflow-generator itself. After reviewer passes,
committer creates a feature branch, stages workflow files, commits
with a meaningful message, and pushes to origin.
小橘 🍊(NEKO Team)
1308 lines
41 KiB
TypeScript
1308 lines
41 KiB
TypeScript
import { existsSync, readFileSync, unlinkSync, writeFileSync } from "node:fs";
|
|
import { tmpdir } from "node:os";
|
|
import { join } from "node:path";
|
|
import type {
|
|
RoleResult,
|
|
StartStep,
|
|
WorkflowDefinition,
|
|
WorkflowMessage,
|
|
} from "@uncaged/nerve-core";
|
|
import { END, parseNerveConfig } from "@uncaged/nerve-core";
|
|
import type { SpawnError } from "@uncaged/nerve-workflow-utils";
|
|
import {
|
|
cursorAgent,
|
|
isDryRun,
|
|
llmExtract,
|
|
nerveAgentContext,
|
|
readNerveYaml,
|
|
spawnSafe,
|
|
} from "@uncaged/nerve-workflow-utils";
|
|
import { z } from "zod";
|
|
|
|
const HOME = process.env.HOME ?? "/home/azureuser";
|
|
const NERVE_ROOT = join(HOME, ".uncaged-nerve");
|
|
const WORKFLOWS_DIR = join(NERVE_ROOT, "workflows");
|
|
|
|
function getNerveYaml(): string {
|
|
const result = readNerveYaml({ nerveRoot: NERVE_ROOT });
|
|
return result.ok ? result.value : "# nerve.yaml unavailable";
|
|
}
|
|
|
|
async function cfgGet(key: string): Promise<string | null> {
|
|
const result = await spawnSafe("cfg", ["get", key], {
|
|
cwd: NERVE_ROOT,
|
|
env: null,
|
|
timeoutMs: 10_000,
|
|
});
|
|
if (!result.ok) {
|
|
return null;
|
|
}
|
|
return result.value.stdout.trim() || null;
|
|
}
|
|
|
|
async function resolveDashScopeProvider(): Promise<{
|
|
baseUrl: string;
|
|
apiKey: string;
|
|
model: string;
|
|
} | null> {
|
|
const apiKey = process.env.DASHSCOPE_API_KEY ?? (await cfgGet("DASHSCOPE_API_KEY"));
|
|
const baseUrl = process.env.DASHSCOPE_BASE_URL ?? (await cfgGet("DASHSCOPE_BASE_URL"));
|
|
const model = process.env.DASHSCOPE_MODEL ?? (await cfgGet("DASHSCOPE_MODEL")) ?? "qwen-plus";
|
|
if (!apiKey || !baseUrl) {
|
|
return null;
|
|
}
|
|
return { apiKey, baseUrl, model };
|
|
}
|
|
|
|
function formatSpawnFailure(error: SpawnError): string {
|
|
if (error.kind === "spawn_failed") {
|
|
return error.message;
|
|
}
|
|
if (error.kind === "timeout") {
|
|
return `timeout (stdout=${error.stdout.slice(0, 200)})`;
|
|
}
|
|
return `exit ${error.exitCode} stderr=${error.stderr.slice(0, 400)}`;
|
|
}
|
|
|
|
function spawnErrorStreams(error: SpawnError): { stdout: string; stderr: string } {
|
|
if (error.kind === "spawn_failed") {
|
|
return { stdout: "", stderr: "" };
|
|
}
|
|
return { stdout: error.stdout, stderr: error.stderr };
|
|
}
|
|
|
|
function buildSenseGeneratorReference(): string {
|
|
const ref = join(WORKFLOWS_DIR, "sense-generator", "index.ts");
|
|
if (!existsSync(ref)) {
|
|
return "(reference file workflows/sense-generator/index.ts not found)";
|
|
}
|
|
return readFileSync(ref, "utf-8");
|
|
}
|
|
|
|
function lastMetaForRole<M>(messages: WorkflowMessage[], role: string): M | null {
|
|
for (let i = messages.length - 1; i >= 0; i--) {
|
|
if (messages[i].role === role) {
|
|
return messages[i].meta as M;
|
|
}
|
|
}
|
|
return null;
|
|
}
|
|
|
|
const roleEntrySchema = z
|
|
.object({
|
|
name: z.string().default("").describe("Role key / identifier in kebab-case or short snake name"),
|
|
description: z.string().default("").describe("What this role does in one or two sentences"),
|
|
responsibilities: z
|
|
.string()
|
|
.default("")
|
|
.describe("Concrete responsibilities, inputs, and outputs for this role"),
|
|
})
|
|
.describe("One role in the generated workflow");
|
|
|
|
const analystExtractSchema = z
|
|
.object({
|
|
workflowName: z
|
|
.string()
|
|
.default("")
|
|
.describe("kebab-case package directory name under workflows/, e.g. 'ticket-triage'"),
|
|
roles: z.array(roleEntrySchema).default([]).describe("Planned roles for the new workflow"),
|
|
moderatorFlow: z
|
|
.string()
|
|
.default("")
|
|
.describe("How the moderator should route between roles; start and exit conditions"),
|
|
externalDeps: z
|
|
.string()
|
|
.default("")
|
|
.describe("External tools, CLIs, HTTP APIs, or services the workflow must integrate with"),
|
|
dataFlow: z
|
|
.string()
|
|
.default("")
|
|
.describe("How data moves between roles: what each step consumes and produces in content/meta"),
|
|
})
|
|
.describe("Structured workflow specification extracted from the analysis");
|
|
|
|
type AnalystMetaItem = {
|
|
name: string;
|
|
description: string;
|
|
responsibilities: string;
|
|
};
|
|
|
|
type WorkflowMeta = {
|
|
analyst: {
|
|
userPrompt: string;
|
|
analysis: string;
|
|
workflowName: string;
|
|
roles: AnalystMetaItem[];
|
|
moderatorFlow: string;
|
|
externalDeps: string;
|
|
dataFlow: string;
|
|
};
|
|
architect: { workflowName: string; design: string };
|
|
coder: {
|
|
workflowName: string;
|
|
files: { indexTs: boolean; packageJson: boolean; tsconfigJson: boolean };
|
|
cursorOutput: string;
|
|
};
|
|
reviewer: {
|
|
passed: boolean;
|
|
workflowName: string;
|
|
reason: string;
|
|
attempt: number;
|
|
validationLog: string;
|
|
};
|
|
committer: {
|
|
branch: string | null;
|
|
commitHash: string | null;
|
|
pushed: boolean | null;
|
|
skipped: boolean;
|
|
error: string | null;
|
|
stagedPaths: string[];
|
|
};
|
|
};
|
|
|
|
const emptyAnalystMeta = (userContent: string): WorkflowMeta["analyst"] => ({
|
|
userPrompt: userContent,
|
|
analysis: "",
|
|
workflowName: "",
|
|
roles: [],
|
|
moderatorFlow: "",
|
|
externalDeps: "",
|
|
dataFlow: "",
|
|
});
|
|
|
|
function verifyNerveWorkflowEntry(workflowName: string): { ok: true } | { ok: false; reason: string } {
|
|
const readResult = readNerveYaml({ nerveRoot: NERVE_ROOT });
|
|
if (!readResult.ok) {
|
|
return { ok: false, reason: `readNerveYaml: ${readResult.error.message}` };
|
|
}
|
|
const parsed = parseNerveConfig(readResult.value);
|
|
if (!parsed.ok) {
|
|
return { ok: false, reason: `parseNerveConfig: ${parsed.error.message}` };
|
|
}
|
|
if (parsed.value.workflows[workflowName] === undefined) {
|
|
return { ok: false, reason: `nerve.yaml has no workflows.${workflowName} entry` };
|
|
}
|
|
return { ok: true };
|
|
}
|
|
|
|
function scanGeneratedCodePitfalls(source: string): string[] {
|
|
const issues: string[] = [];
|
|
if (/\bawait\s+import\s*\(/.test(source)) {
|
|
issues.push(
|
|
"Uses the await keyword with a parenthesized import() call — only allowed in sense-runtime / workflow-worker with a documented comment",
|
|
);
|
|
}
|
|
if (/\bimport\s*\(\s*["'`]/.test(source) && !source.includes("Dynamic import required")) {
|
|
issues.push("Dynamic import() without documented exception comment");
|
|
}
|
|
if (/\bexport\s+default\s+/.test(source) === false) {
|
|
issues.push("Missing default export of WorkflowDefinition (engine loads the default export)");
|
|
}
|
|
return issues;
|
|
}
|
|
|
|
async function runReviewerValidation(
|
|
workflowDir: string,
|
|
workflowName: string,
|
|
dry: boolean,
|
|
): Promise<{ ok: true; log: string } | { ok: false; log: string; reason: string }> {
|
|
const logParts: string[] = [];
|
|
|
|
const indexPath = join(workflowDir, "index.ts");
|
|
const pkgPath = join(workflowDir, "package.json");
|
|
const tsconfigPath = join(workflowDir, "tsconfig.json");
|
|
if (!existsSync(indexPath) || !existsSync(pkgPath) || !existsSync(tsconfigPath)) {
|
|
const miss: string[] = [];
|
|
if (!existsSync(indexPath)) miss.push("index.ts");
|
|
if (!existsSync(pkgPath)) miss.push("package.json");
|
|
if (!existsSync(tsconfigPath)) miss.push("tsconfig.json");
|
|
return { ok: false, log: "", reason: `Missing required file(s): ${miss.join(", ")}` };
|
|
}
|
|
|
|
const source = readFileSync(indexPath, "utf-8");
|
|
const pitfalls = scanGeneratedCodePitfalls(source);
|
|
if (pitfalls.length > 0) {
|
|
const pitfallText = pitfalls.join("\n");
|
|
logParts.push(`=== static checks ===\n${pitfallText}`);
|
|
return { ok: false, log: logParts.join("\n\n"), reason: pitfallText };
|
|
}
|
|
|
|
const tsc = await spawnSafe("npx", ["tsc", "--noEmit"], {
|
|
cwd: workflowDir,
|
|
env: null,
|
|
timeoutMs: 300_000,
|
|
dryRun: dry,
|
|
});
|
|
if (!tsc.ok) {
|
|
const msg = formatSpawnFailure(tsc.error);
|
|
logParts.push(`=== npx tsc --noEmit ===\n${msg}`);
|
|
return { ok: false, log: logParts.join("\n\n"), reason: `Typecheck failed: ${msg}` };
|
|
}
|
|
const tscOut = tsc.value.stderr.trim() || tsc.value.stdout.trim() || "(no output)";
|
|
logParts.push(`=== npx tsc --noEmit ===\n${tscOut}`);
|
|
|
|
const nerveCheck = verifyNerveWorkflowEntry(workflowName);
|
|
if (!nerveCheck.ok) {
|
|
logParts.push(`=== nerve.yaml ===\n${nerveCheck.reason}`);
|
|
return {
|
|
ok: false,
|
|
log: logParts.join("\n\n"),
|
|
reason: `nerve.yaml: ${nerveCheck.reason}`,
|
|
};
|
|
}
|
|
logParts.push(`=== nerve.yaml ===\nworkflows.${workflowName} is present.`);
|
|
|
|
const importLines = source.split("\n").filter((l) => /^\s*import\s/.test(l));
|
|
logParts.push(`=== import lines ===\n${importLines.join("\n")}`);
|
|
|
|
return { ok: true, log: logParts.join("\n\n") };
|
|
}
|
|
|
|
function summarizeText(s: string, maxLen: number): string {
|
|
const one = s.replace(/\s+/g, " ").trim();
|
|
if (one.length <= maxLen) {
|
|
return one;
|
|
}
|
|
return `${one.slice(0, maxLen - 3)}...`;
|
|
}
|
|
|
|
function sanitizeBranchSegment(name: string): string {
|
|
const t = name
|
|
.trim()
|
|
.replace(/[^a-zA-Z0-9_-]+/g, "-")
|
|
.replace(/^-+|-+$/g, "");
|
|
return t.length > 0 ? t : "workflow";
|
|
}
|
|
|
|
function resolveWorkflowNameForCommitter(messages: WorkflowMessage[]): string {
|
|
const rev = lastMetaForRole<WorkflowMeta["reviewer"]>(messages, "reviewer");
|
|
if (rev !== null && rev.workflowName.trim().length > 0) {
|
|
return rev.workflowName.trim();
|
|
}
|
|
const coder = lastMetaForRole<WorkflowMeta["coder"]>(messages, "coder");
|
|
if (coder !== null && coder.workflowName.trim().length > 0) {
|
|
return coder.workflowName.trim();
|
|
}
|
|
const analyst = lastMetaForRole<WorkflowMeta["analyst"]>(messages, "analyst");
|
|
if (analyst !== null && analyst.workflowName.trim().length > 0) {
|
|
return analyst.workflowName.trim();
|
|
}
|
|
return "";
|
|
}
|
|
|
|
function buildCoreStagePaths(
|
|
workflowName: string,
|
|
files: WorkflowMeta["coder"]["files"],
|
|
includeNerveYaml: boolean,
|
|
): string[] {
|
|
const base = `workflows/${workflowName}`;
|
|
const paths: string[] = [];
|
|
if (files.indexTs) {
|
|
paths.push(`${base}/index.ts`);
|
|
}
|
|
if (files.packageJson) {
|
|
paths.push(`${base}/package.json`);
|
|
const lockRel = `${base}/pnpm-lock.yaml`;
|
|
if (existsSync(join(NERVE_ROOT, lockRel))) {
|
|
paths.push(lockRel);
|
|
}
|
|
}
|
|
if (files.tsconfigJson) {
|
|
paths.push(`${base}/tsconfig.json`);
|
|
}
|
|
if (includeNerveYaml) {
|
|
paths.push("nerve.yaml");
|
|
}
|
|
return paths;
|
|
}
|
|
|
|
async function nerveYamlShouldBeStaged(workflowName: string, start: StartStep): Promise<boolean> {
|
|
const dry = isDryRun(start);
|
|
if (dry) {
|
|
return verifyNerveWorkflowEntry(workflowName).ok;
|
|
}
|
|
const st = await spawnSafe("git", ["status", "--porcelain", "--", "nerve.yaml"], {
|
|
cwd: NERVE_ROOT,
|
|
env: null,
|
|
timeoutMs: 30_000,
|
|
dryRun: false,
|
|
});
|
|
if (st.ok && st.value.stdout.trim().length > 0) {
|
|
return true;
|
|
}
|
|
const d1 = await spawnSafe("git", ["diff", "--name-only", "--", "nerve.yaml"], {
|
|
cwd: NERVE_ROOT,
|
|
env: null,
|
|
timeoutMs: 30_000,
|
|
dryRun: false,
|
|
});
|
|
if (d1.ok && d1.value.stdout.trim().length > 0) {
|
|
return true;
|
|
}
|
|
const d2 = await spawnSafe("git", ["diff", "--cached", "--name-only", "--", "nerve.yaml"], {
|
|
cwd: NERVE_ROOT,
|
|
env: null,
|
|
timeoutMs: 30_000,
|
|
dryRun: false,
|
|
});
|
|
return d2.ok && d2.value.stdout.trim().length > 0;
|
|
}
|
|
|
|
async function listUntrackedUnderWorkflowDir(workflowName: string, start: StartStep): Promise<string[]> {
|
|
const dry = isDryRun(start);
|
|
if (dry) {
|
|
return [];
|
|
}
|
|
const prefix = `workflows/${workflowName}/`;
|
|
const r = await spawnSafe("git", ["status", "--porcelain", "-u", "--", prefix], {
|
|
cwd: NERVE_ROOT,
|
|
env: null,
|
|
timeoutMs: 30_000,
|
|
dryRun: false,
|
|
});
|
|
if (!r.ok) {
|
|
return [];
|
|
}
|
|
const out: string[] = [];
|
|
for (const line of r.value.stdout.split("\n")) {
|
|
const t = line.trimEnd();
|
|
if (t.startsWith("?? ")) {
|
|
const p = t.slice(3).trim();
|
|
if (p.startsWith(prefix)) {
|
|
out.push(p);
|
|
}
|
|
}
|
|
}
|
|
return out;
|
|
}
|
|
|
|
async function resolveDefaultBranchName(start: StartStep, logLines: string[]): Promise<string> {
|
|
const dry = isDryRun(start);
|
|
const sym = await spawnSafe("git", ["symbolic-ref", "refs/remotes/origin/HEAD"], {
|
|
cwd: NERVE_ROOT,
|
|
env: null,
|
|
timeoutMs: 10_000,
|
|
dryRun: dry,
|
|
});
|
|
if (sym.ok) {
|
|
const out = sym.value.stdout.trim();
|
|
if (out.length > 0 && !out.includes("[dryRun]")) {
|
|
const m = out.match(/refs\/remotes\/origin\/(.+)$/);
|
|
if (m !== null && m[1] !== undefined && m[1].length > 0) {
|
|
logLines.push(`[branch] default via symbolic-ref: ${m[1]}`);
|
|
return m[1];
|
|
}
|
|
}
|
|
}
|
|
if (dry) {
|
|
logLines.push("[branch] dry-run: assuming default branch name `main`");
|
|
return "main";
|
|
}
|
|
const abbrev = await spawnSafe("git", ["rev-parse", "--abbrev-ref", "origin/HEAD"], {
|
|
cwd: NERVE_ROOT,
|
|
env: null,
|
|
timeoutMs: 10_000,
|
|
dryRun: dry,
|
|
});
|
|
if (abbrev.ok) {
|
|
const line = abbrev.value.stdout.trim();
|
|
if (line.length > 0 && line.includes("/")) {
|
|
const parts = line.split("/");
|
|
const last = parts[parts.length - 1];
|
|
if (last !== undefined && last.length > 0) {
|
|
logLines.push(`[branch] default via origin/HEAD: ${last}`);
|
|
return last;
|
|
}
|
|
}
|
|
}
|
|
for (const b of ["main", "master"] as const) {
|
|
const v = await spawnSafe("git", ["rev-parse", "--verify", `refs/remotes/origin/${b}`], {
|
|
cwd: NERVE_ROOT,
|
|
env: null,
|
|
timeoutMs: 10_000,
|
|
dryRun: dry,
|
|
});
|
|
if (v.ok) {
|
|
logLines.push(`[branch] default fallback: origin/${b} exists`);
|
|
return b;
|
|
}
|
|
}
|
|
logLines.push("[branch] default fallback: main (no origin/* resolved)");
|
|
return "main";
|
|
}
|
|
|
|
function appendIoSnippet(logLines: string[], label: string, stdout: string, stderr: string): void {
|
|
const so = stdout.trim().slice(0, 500);
|
|
const se = stderr.trim().slice(0, 500);
|
|
if (so.length > 0) {
|
|
logLines.push(`${label} stdout (truncated): ${so}`);
|
|
}
|
|
if (se.length > 0) {
|
|
logLines.push(`${label} stderr (truncated): ${se}`);
|
|
}
|
|
}
|
|
|
|
const workflow: WorkflowDefinition<WorkflowMeta> = {
|
|
name: "workflow-generator",
|
|
|
|
roles: {
|
|
async analyst(
|
|
start: StartStep,
|
|
_messages: WorkflowMessage[],
|
|
): Promise<RoleResult<WorkflowMeta["analyst"]>> {
|
|
const dry = isDryRun(start);
|
|
const userInput = start.content;
|
|
const empty = emptyAnalystMeta(userInput);
|
|
|
|
const provider = await resolveDashScopeProvider();
|
|
if (provider === null) {
|
|
return {
|
|
content:
|
|
"Cannot run analyst: set DASHSCOPE_API_KEY and DASHSCOPE_BASE_URL (or configure via `cfg get`), " +
|
|
"and optionally DASHSCOPE_MODEL.",
|
|
meta: empty,
|
|
};
|
|
}
|
|
|
|
const askPrompt = `You are analyzing a user request to build a new Nerve **workflow** (multi-role automaton with a moderator).
|
|
|
|
${nerveAgentContext}
|
|
|
|
User's natural language description:
|
|
${userInput}
|
|
|
|
Nerve root: ${NERVE_ROOT}
|
|
Target workflows live under: ${WORKFLOWS_DIR}/<workflow-name>/
|
|
|
|
## Your task
|
|
- Clarify the goal, constraints, and success criteria.
|
|
- Identify a good kebab-case workflow package name.
|
|
- Propose a role breakdown: what each role should do, in order.
|
|
- Describe how a moderator should route between roles and when to end.
|
|
- List external tools/APIs and how data should flow in \`content\` vs \`meta\` between roles.
|
|
|
|
Current nerve.yaml (for context only; do not edit here):
|
|
\`\`\`yaml
|
|
${getNerveYaml()}
|
|
\`\`\`
|
|
|
|
For reference, here is a complete existing workflow (patterns to mirror, not to copy literally):
|
|
\`\`\`ts
|
|
${buildSenseGeneratorReference().slice(0, 18_000)}
|
|
\`\`\`
|
|
|
|
Output a thorough analysis in markdown. Do not write final implementation code.`;
|
|
|
|
const planResult = await cursorAgent({
|
|
prompt: askPrompt,
|
|
mode: "ask",
|
|
cwd: NERVE_ROOT,
|
|
env: null,
|
|
timeoutMs: null,
|
|
dryRun: dry,
|
|
});
|
|
if (!planResult.ok) {
|
|
return {
|
|
content: `cursor-agent failed: ${formatSpawnFailure(planResult.error)}`,
|
|
meta: { ...empty, analysis: "" },
|
|
};
|
|
}
|
|
const analysis = planResult.value;
|
|
|
|
const extracted = await llmExtract({
|
|
text: analysis,
|
|
schema: analystExtractSchema,
|
|
provider,
|
|
dryRun: dry,
|
|
});
|
|
if (dry) {
|
|
return {
|
|
content: "[dry-run] analyst complete",
|
|
meta: {
|
|
...empty,
|
|
analysis: analysis || "(dry-run)",
|
|
workflowName: "dry-run-test",
|
|
roles: [{ name: "placeholder", description: "dry-run role", responsibilities: "n/a" }],
|
|
moderatorFlow: "placeholder → END",
|
|
externalDeps: "none",
|
|
dataFlow: "n/a",
|
|
},
|
|
};
|
|
}
|
|
if (!extracted.ok) {
|
|
return {
|
|
content: `${analysis}\n\n[llmExtract error] ${JSON.stringify(extracted.error)}`,
|
|
meta: {
|
|
userPrompt: userInput,
|
|
analysis,
|
|
workflowName: "",
|
|
roles: [],
|
|
moderatorFlow: "",
|
|
externalDeps: "",
|
|
dataFlow: "",
|
|
},
|
|
};
|
|
}
|
|
|
|
const e = extracted.value;
|
|
const summary =
|
|
`## Analysis\n\n${analysis}\n\n` +
|
|
`## Structured spec\n\n` +
|
|
`**workflowName:** ${e.workflowName}\n\n` +
|
|
`**moderatorFlow:**\n${e.moderatorFlow}\n\n` +
|
|
`**externalDeps:**\n${e.externalDeps}\n\n` +
|
|
`**dataFlow:**\n${e.dataFlow}\n\n` +
|
|
`**roles:**\n` +
|
|
e.roles
|
|
.map(
|
|
(r, i) =>
|
|
`${i + 1}. **${r.name}** — ${r.description}\n - ${r.responsibilities}`,
|
|
)
|
|
.join("\n\n");
|
|
|
|
return {
|
|
content: summary,
|
|
meta: {
|
|
userPrompt: userInput,
|
|
analysis,
|
|
workflowName: e.workflowName,
|
|
roles: e.roles,
|
|
moderatorFlow: e.moderatorFlow,
|
|
externalDeps: e.externalDeps,
|
|
dataFlow: e.dataFlow,
|
|
},
|
|
};
|
|
},
|
|
|
|
async architect(
|
|
start: StartStep,
|
|
messages: WorkflowMessage[],
|
|
): Promise<RoleResult<WorkflowMeta["architect"]>> {
|
|
const dry = isDryRun(start);
|
|
if (dry) {
|
|
return {
|
|
content: "[dry-run] architect complete",
|
|
meta: { workflowName: "dry-run-test", design: "(dry-run design)" },
|
|
};
|
|
}
|
|
const spec = lastMetaForRole<WorkflowMeta["analyst"]>(messages, "analyst");
|
|
if (spec === null) {
|
|
return {
|
|
content: "Architect skipped — no analyst output in message history.",
|
|
meta: { workflowName: "", design: "" },
|
|
};
|
|
}
|
|
const wfName = spec.workflowName.trim();
|
|
|
|
if (wfName.length === 0) {
|
|
return {
|
|
content: "Architect skipped — analyst did not produce a workflow name.",
|
|
meta: { workflowName: "", design: "" },
|
|
};
|
|
}
|
|
|
|
const rolesText = spec.roles
|
|
.map(
|
|
(r) =>
|
|
`### ${r.name}\n- **description:** ${r.description}\n- **responsibilities:** ${r.responsibilities}`,
|
|
)
|
|
.join("\n\n");
|
|
|
|
const designPrompt = `You are the architect for a new Nerve **workflow** (multi-role state machine with a \`WorkflowDefinition\` and moderator).
|
|
|
|
${nerveAgentContext}
|
|
|
|
Target package directory: ${WORKFLOWS_DIR}/${wfName}/
|
|
|
|
## Analyst output
|
|
|
|
**User prompt:**
|
|
${spec.userPrompt}
|
|
|
|
**Moderator / routing (from analyst):**
|
|
${spec.moderatorFlow}
|
|
|
|
**External dependencies:**
|
|
${spec.externalDeps}
|
|
|
|
**Data flow:**
|
|
${spec.dataFlow}
|
|
|
|
**Roles (planned):**
|
|
${rolesText}
|
|
|
|
## Your task (design document only, no file contents)
|
|
|
|
Produce an implementation-ready design in markdown:
|
|
|
|
1. **Meta type (TypeScript)**
|
|
- A concrete \`type WorkflowMeta = { ... }\` using \`type\` (not interface), no optional \`?:\` — use \`T | null\` for nullable fields.
|
|
- One entry per role with the exact fields each role will put in \`RoleResult\` meta.
|
|
|
|
2. **Role functions**
|
|
- For each role: parameters (\`StartStep\`, \`WorkflowMessage[]\`), return \`RoleResult<…>\`, what to read from \`start\` / prior messages, what to put in \`content\` vs \`meta\`.
|
|
|
|
3. **Moderator**
|
|
- Pseudocode for \`moderator(context)\` using \`END\` from \`@uncaged/nerve-core\`, edge conditions, and error paths (routed in moderator, not via process exit).
|
|
|
|
4. **Error handling**
|
|
- How each role reports recoverable failure (content + meta) and how the moderator steers the thread.
|
|
|
|
5. **Imports**
|
|
- List required imports from \`@uncaged/nerve-core\` and \`@uncaged/nerve-workflow-utils\` only as needed by the final code.
|
|
|
|
6. **Files the coder will write**
|
|
- \`${WORKFLOWS_DIR}/${wfName}/index.ts\` — \`export default\` a \`WorkflowDefinition<YourMeta>\`
|
|
- \`${WORKFLOWS_DIR}/${wfName}/package.json\` with \`"type": "module"\` and dependencies (include \`zod\` if the workflow parses structured data)
|
|
- \`${WORKFLOWS_DIR}/${wfName}/tsconfig.json\` — if \`${NERVE_ROOT}/tsconfig.workflow.base.json\` exists, extend it; else a strict NodeNext \`noEmit\` project
|
|
|
|
7. **nerve.yaml**
|
|
- The coder must add a \`workflows:${wfName}\` block to \`${NERVE_ROOT}/nerve.yaml\` (concurrency, overflow) without removing existing keys.
|
|
|
|
8. **Nerve code rules to preserve in the generated \`index.ts\`**
|
|
- No dynamic \`import()\` in the generated workflow (except documented exceptions in engine loaders).
|
|
- \`type\` over \`interface\`, \`function\` over \`class\` for the workflow’s own code.
|
|
|
|
## Reference (meta-workflow style)
|
|
\`\`\`ts
|
|
${buildSenseGeneratorReference().slice(0, 22_000)}
|
|
\`\`\`
|
|
|
|
Current nerve.yaml:
|
|
\`\`\`yaml
|
|
${getNerveYaml()}
|
|
\`\`\`
|
|
|
|
Output ONLY the design markdown.`;
|
|
|
|
const planResult = await cursorAgent({
|
|
prompt: designPrompt,
|
|
mode: "ask",
|
|
cwd: NERVE_ROOT,
|
|
env: null,
|
|
timeoutMs: null,
|
|
dryRun: dry,
|
|
});
|
|
if (!planResult.ok) {
|
|
return {
|
|
content: `cursor-agent failed: ${formatSpawnFailure(planResult.error)}`,
|
|
meta: { workflowName: wfName, design: "" },
|
|
};
|
|
}
|
|
|
|
return {
|
|
content: planResult.value,
|
|
meta: { workflowName: wfName, design: planResult.value },
|
|
};
|
|
},
|
|
|
|
async coder(
|
|
start: StartStep,
|
|
messages: WorkflowMessage[],
|
|
): Promise<RoleResult<WorkflowMeta["coder"]>> {
|
|
const dry = isDryRun(start);
|
|
if (dry) {
|
|
return {
|
|
content: "[dry-run] coder complete",
|
|
meta: {
|
|
workflowName: "dry-run-test",
|
|
files: { indexTs: false, packageJson: false, tsconfigJson: false },
|
|
cursorOutput: "(dry-run)",
|
|
},
|
|
};
|
|
}
|
|
const analystMeta = lastMetaForRole<WorkflowMeta["analyst"]>(messages, "analyst");
|
|
const architectMeta = lastMetaForRole<WorkflowMeta["architect"]>(messages, "architect");
|
|
const priorReviewer = lastMetaForRole<WorkflowMeta["reviewer"]>(messages, "reviewer");
|
|
|
|
if (analystMeta === null || architectMeta === null) {
|
|
return {
|
|
content: "coder: missing analyst or architect message in history",
|
|
meta: {
|
|
workflowName: "",
|
|
files: { indexTs: false, packageJson: false, tsconfigJson: false },
|
|
cursorOutput: "",
|
|
},
|
|
};
|
|
}
|
|
|
|
const wfName = analystMeta.workflowName.trim();
|
|
if (wfName.length === 0) {
|
|
return {
|
|
content: "coder: empty workflow name",
|
|
meta: {
|
|
workflowName: "",
|
|
files: { indexTs: false, packageJson: false, tsconfigJson: false },
|
|
cursorOutput: "",
|
|
},
|
|
};
|
|
}
|
|
|
|
const fixSection =
|
|
priorReviewer !== null && priorReviewer.passed === false
|
|
? `\n\n## Previous review (address these before anything else)\n${priorReviewer.reason}\n\nFull validation log:\n${priorReviewer.validationLog}\n`
|
|
: "";
|
|
|
|
const codePrompt = `You are implementing a new Nerve workflow package at ${WORKFLOWS_DIR}/${wfName}/.
|
|
|
|
## Architect design (authoritative for structure)
|
|
${architectMeta.design}
|
|
|
|
## Analyst structured fields
|
|
${JSON.stringify(
|
|
{
|
|
workflowName: analystMeta.workflowName,
|
|
userPrompt: analystMeta.userPrompt,
|
|
roles: analystMeta.roles,
|
|
moderatorFlow: analystMeta.moderatorFlow,
|
|
externalDeps: analystMeta.externalDeps,
|
|
dataFlow: analystMeta.dataFlow,
|
|
},
|
|
null,
|
|
2,
|
|
)}
|
|
${fixSection}
|
|
|
|
## Files to create or update
|
|
1. \`${WORKFLOWS_DIR}/${wfName}/index.ts\` — \`export default\` a \`WorkflowDefinition\` (same style as sense-generator: named imports, default export at end).
|
|
2. \`${WORKFLOWS_DIR}/${wfName}/package.json\` — \`"type": "module"\`, dependencies on \`@uncaged/nerve-core\`, \`@uncaged/nerve-workflow-utils\`, \`zod\` if used; add \`typescript\` in devDependencies so \`npx tsc --noEmit\` works in that directory.
|
|
3. \`${WORKFLOWS_DIR}/${wfName}/tsconfig.json\` — strict, \`module\`/\`moduleResolution\` NodeNext, \`noEmit: true\`, include all \`.ts\` in the folder.
|
|
|
|
4. **Register the workflow** — merge a new block into the existing \`${NERVE_ROOT}/nerve.yaml\` under the top-level \`workflows:\` key:
|
|
\`\`\`yaml
|
|
${wfName}:
|
|
concurrency: 1
|
|
overflow: drop
|
|
\`\`\`
|
|
Do not remove or overwrite unrelated senses, reflexes, or other workflow entries. Preserve valid YAML.
|
|
|
|
## Implementation patterns (when applicable)
|
|
- \`resolveDashScopeProvider\`, \`nerveAgentContext\`, \`readNerveYaml\`, \`cursorAgent\`, \`llmExtract\`, \`spawnSafe\`, \`formatSpawnFailure\` from \`@uncaged/nerve-workflow-utils\` as in sense-generator.
|
|
- No dynamic \`import()\` in the new workflow code.
|
|
- **Every zod schema passed to \`llmExtract\` MUST have \`.default()\` on all fields** (especially arrays and objects). This ensures \`dryRun\` mode returns structurally valid data instead of empty primitives. Example: \`z.array(roleSchema).default([])\`, \`z.string().default("unknown")\`.
|
|
|
|
## Reference workflow
|
|
\`\`\`ts
|
|
${buildSenseGeneratorReference().slice(0, 20_000)}
|
|
\`\`\`
|
|
|
|
Current nerve.yaml (merge carefully; keep all existing content):
|
|
\`\`\`yaml
|
|
${getNerveYaml()}
|
|
\`\`\`
|
|
|
|
Implement now.`;
|
|
|
|
const agentResult = await cursorAgent({
|
|
prompt: codePrompt,
|
|
mode: "default",
|
|
cwd: NERVE_ROOT,
|
|
env: null,
|
|
timeoutMs: null,
|
|
dryRun: dry,
|
|
});
|
|
|
|
const workflowDir = join(WORKFLOWS_DIR, wfName);
|
|
const files = {
|
|
indexTs: existsSync(join(workflowDir, "index.ts")),
|
|
packageJson: existsSync(join(workflowDir, "package.json")),
|
|
tsconfigJson: existsSync(join(workflowDir, "tsconfig.json")),
|
|
};
|
|
|
|
if (!agentResult.ok) {
|
|
const errText = `cursor-agent failed: ${formatSpawnFailure(agentResult.error)}`;
|
|
return {
|
|
content: errText,
|
|
meta: { workflowName: wfName, files, cursorOutput: errText },
|
|
};
|
|
}
|
|
|
|
return {
|
|
content: agentResult.value,
|
|
meta: { workflowName: wfName, files, cursorOutput: agentResult.value },
|
|
};
|
|
},
|
|
|
|
async reviewer(
|
|
start: StartStep,
|
|
messages: WorkflowMessage[],
|
|
): Promise<RoleResult<WorkflowMeta["reviewer"]>> {
|
|
const dry = isDryRun(start);
|
|
if (dry) {
|
|
const attempt = messages.filter((m) => m.role === "reviewer").length + 1;
|
|
return {
|
|
content: "[dry-run] reviewer complete — validation skipped; treating as PASS",
|
|
meta: {
|
|
passed: true,
|
|
workflowName: "dry-run-test",
|
|
reason: "Dry-run: reviewer validation not executed",
|
|
attempt,
|
|
validationLog: "(dry-run)",
|
|
},
|
|
};
|
|
}
|
|
const coderEntry = lastMetaForRole<WorkflowMeta["coder"]>(messages, "coder");
|
|
const attempt = messages.filter((m) => m.role === "reviewer").length + 1;
|
|
|
|
if (coderEntry === null) {
|
|
return {
|
|
content: "FAIL — no coder message in history",
|
|
meta: {
|
|
passed: false,
|
|
workflowName: "",
|
|
reason: "Reviewer could not find a prior coder step",
|
|
attempt,
|
|
validationLog: "",
|
|
},
|
|
};
|
|
}
|
|
|
|
const { workflowName, files } = coderEntry;
|
|
|
|
const missing: string[] = [];
|
|
if (!files.indexTs) missing.push("index.ts");
|
|
if (!files.packageJson) missing.push("package.json");
|
|
if (!files.tsconfigJson) missing.push("tsconfig.json");
|
|
if (missing.length > 0) {
|
|
return {
|
|
content: `FAIL — missing: ${missing.join(", ")}`,
|
|
meta: {
|
|
passed: false,
|
|
workflowName,
|
|
reason: `Missing required file(s): ${missing.join(", ")}`,
|
|
attempt,
|
|
validationLog: "",
|
|
},
|
|
};
|
|
}
|
|
|
|
const name = workflowName.trim();
|
|
if (name.length === 0) {
|
|
return {
|
|
content: "FAIL — empty workflow name in coder meta",
|
|
meta: {
|
|
passed: false,
|
|
workflowName: "",
|
|
reason: "Coder meta had empty workflowName",
|
|
attempt,
|
|
validationLog: "",
|
|
},
|
|
};
|
|
}
|
|
|
|
const workflowDir = join(WORKFLOWS_DIR, name);
|
|
const checks = await runReviewerValidation(workflowDir, name, dry);
|
|
|
|
if (!checks.ok) {
|
|
return {
|
|
content: `FAIL — ${checks.reason}`,
|
|
meta: {
|
|
passed: false,
|
|
workflowName: name,
|
|
reason: checks.reason,
|
|
attempt,
|
|
validationLog: checks.log,
|
|
},
|
|
};
|
|
}
|
|
|
|
return {
|
|
content: `PASS — typecheck and nerve.yaml check OK.\n\n${checks.log.slice(0, 8000)}`,
|
|
meta: {
|
|
passed: true,
|
|
workflowName: name,
|
|
reason: "npx tsc --noEmit passed and nerve.yaml contains the workflow entry",
|
|
attempt,
|
|
validationLog: checks.log,
|
|
},
|
|
};
|
|
},
|
|
|
|
async committer(
|
|
start: StartStep,
|
|
messages: WorkflowMessage[],
|
|
): Promise<RoleResult<WorkflowMeta["committer"]>> {
|
|
const dry = isDryRun(start);
|
|
const logLines: string[] = [];
|
|
const gitDir = join(NERVE_ROOT, ".git");
|
|
const nullMeta = (): WorkflowMeta["committer"] => ({
|
|
branch: null,
|
|
commitHash: null,
|
|
pushed: null,
|
|
skipped: true,
|
|
error: null,
|
|
stagedPaths: [],
|
|
});
|
|
|
|
logLines.push("[1] Check `.git` at NERVE_ROOT");
|
|
if (!existsSync(gitDir)) {
|
|
logLines.push(`Result: no .git at ${NERVE_ROOT} — skipping all git operations.`);
|
|
return {
|
|
content: logLines.join("\n"),
|
|
meta: nullMeta(),
|
|
};
|
|
}
|
|
|
|
const analystMeta = lastMetaForRole<WorkflowMeta["analyst"]>(messages, "analyst");
|
|
const userPrompt = analystMeta?.userPrompt ?? "";
|
|
const reviewerMeta = lastMetaForRole<WorkflowMeta["reviewer"]>(messages, "reviewer");
|
|
const coderMeta = lastMetaForRole<WorkflowMeta["coder"]>(messages, "coder");
|
|
const files =
|
|
coderMeta !== null
|
|
? coderMeta.files
|
|
: { indexTs: false, packageJson: false, tsconfigJson: false };
|
|
|
|
const wfName = resolveWorkflowNameForCommitter(messages);
|
|
if (wfName.length === 0) {
|
|
logLines.push("ERROR: could not resolve workflowName from analyst/coder/reviewer meta.");
|
|
return {
|
|
content: logLines.join("\n"),
|
|
meta: {
|
|
branch: null,
|
|
commitHash: null,
|
|
pushed: null,
|
|
skipped: false,
|
|
error: "Empty workflowName — cannot infer paths to stage",
|
|
stagedPaths: [],
|
|
},
|
|
};
|
|
}
|
|
|
|
const includeNerve = await nerveYamlShouldBeStaged(wfName, start);
|
|
const untracked = await listUntrackedUnderWorkflowDir(wfName, start);
|
|
const corePaths = buildCoreStagePaths(wfName, files, includeNerve);
|
|
const plannedSet = new Set<string>(corePaths);
|
|
for (const u of untracked) {
|
|
plannedSet.add(u);
|
|
}
|
|
const plannedPaths = [...plannedSet].filter(
|
|
(p) => p === "nerve.yaml" || existsSync(join(NERVE_ROOT, p)),
|
|
);
|
|
|
|
const dryPlanPaths =
|
|
plannedPaths.length > 0
|
|
? plannedPaths
|
|
: buildCoreStagePaths(
|
|
wfName,
|
|
{ indexTs: true, packageJson: true, tsconfigJson: true },
|
|
verifyNerveWorkflowEntry(wfName).ok,
|
|
);
|
|
|
|
if (plannedPaths.length === 0 && !dry) {
|
|
logLines.push("No candidate paths to `git add` (all file flags false and nerve.yaml not staged).");
|
|
return {
|
|
content: logLines.join("\n"),
|
|
meta: {
|
|
branch: null,
|
|
commitHash: null,
|
|
pushed: null,
|
|
skipped: false,
|
|
error: "Nothing to stage for this workflow",
|
|
stagedPaths: [],
|
|
},
|
|
};
|
|
}
|
|
|
|
const defaultBranch = await resolveDefaultBranchName(start, logLines);
|
|
const shortSuffix = Date.now().toString(36);
|
|
const newBranch = `wf/${sanitizeBranchSegment(wfName)}-${shortSuffix}`;
|
|
|
|
if (dry) {
|
|
logLines.push("[dry-run] Would run: `git checkout <default>` then `git checkout -b " + newBranch + "`");
|
|
logLines.push(`[dry-run] Would run: \`git add -- ${dryPlanPaths.join(" ")}\``);
|
|
logLines.push("[dry-run] Would run: `git commit` with message summarizing workflow + user prompt + reviewer");
|
|
logLines.push(`[dry-run] Would run: \`git push -u origin ${newBranch}\``);
|
|
return {
|
|
content: logLines.join("\n"),
|
|
meta: {
|
|
branch: newBranch,
|
|
commitHash: null,
|
|
pushed: null,
|
|
skipped: false,
|
|
error: null,
|
|
stagedPaths: dryPlanPaths,
|
|
},
|
|
};
|
|
}
|
|
|
|
logLines.push(`[2] git checkout ${defaultBranch}`);
|
|
const checkoutBase = await spawnSafe("git", ["checkout", defaultBranch], {
|
|
cwd: NERVE_ROOT,
|
|
env: null,
|
|
timeoutMs: 120_000,
|
|
dryRun: false,
|
|
});
|
|
if (!checkoutBase.ok) {
|
|
const err = formatSpawnFailure(checkoutBase.error);
|
|
const io = spawnErrorStreams(checkoutBase.error);
|
|
appendIoSnippet(logLines, "checkout", io.stdout, io.stderr);
|
|
logLines.push(`ERROR: ${err}`);
|
|
return {
|
|
content: logLines.join("\n"),
|
|
meta: {
|
|
branch: null,
|
|
commitHash: null,
|
|
pushed: null,
|
|
skipped: false,
|
|
error: `git checkout ${defaultBranch}: ${err}`,
|
|
stagedPaths: [],
|
|
},
|
|
};
|
|
}
|
|
appendIoSnippet(logLines, "checkout", checkoutBase.value.stdout, checkoutBase.value.stderr);
|
|
|
|
logLines.push(`[3] git checkout -b ${newBranch}`);
|
|
const checkoutNew = await spawnSafe("git", ["checkout", "-b", newBranch], {
|
|
cwd: NERVE_ROOT,
|
|
env: null,
|
|
timeoutMs: 120_000,
|
|
dryRun: false,
|
|
});
|
|
if (!checkoutNew.ok) {
|
|
const err = formatSpawnFailure(checkoutNew.error);
|
|
const ioNb = spawnErrorStreams(checkoutNew.error);
|
|
appendIoSnippet(logLines, "checkout -b", ioNb.stdout, ioNb.stderr);
|
|
logLines.push(`ERROR: ${err}`);
|
|
return {
|
|
content: logLines.join("\n"),
|
|
meta: {
|
|
branch: null,
|
|
commitHash: null,
|
|
pushed: null,
|
|
skipped: false,
|
|
error: `git checkout -b: ${err}`,
|
|
stagedPaths: [],
|
|
},
|
|
};
|
|
}
|
|
appendIoSnippet(logLines, "checkout -b", checkoutNew.value.stdout, checkoutNew.value.stderr);
|
|
|
|
logLines.push(`[4] git add -- ${plannedPaths.join(" ")}`);
|
|
const addArgs = ["add", "--", ...plannedPaths];
|
|
const addR = await spawnSafe("git", addArgs, {
|
|
cwd: NERVE_ROOT,
|
|
env: null,
|
|
timeoutMs: 120_000,
|
|
dryRun: false,
|
|
});
|
|
if (!addR.ok) {
|
|
const err = formatSpawnFailure(addR.error);
|
|
const ioAdd = spawnErrorStreams(addR.error);
|
|
appendIoSnippet(logLines, "git add", ioAdd.stdout, ioAdd.stderr);
|
|
logLines.push(`ERROR: ${err}`);
|
|
return {
|
|
content: logLines.join("\n"),
|
|
meta: {
|
|
branch: newBranch,
|
|
commitHash: null,
|
|
pushed: null,
|
|
skipped: false,
|
|
error: `git add: ${err}`,
|
|
stagedPaths: [],
|
|
},
|
|
};
|
|
}
|
|
appendIoSnippet(logLines, "git add", addR.value.stdout, addR.value.stderr);
|
|
|
|
const stagedR = await spawnSafe("git", ["diff", "--cached", "--name-only"], {
|
|
cwd: NERVE_ROOT,
|
|
env: null,
|
|
timeoutMs: 30_000,
|
|
dryRun: false,
|
|
});
|
|
const stagedPaths = stagedR.ok
|
|
? stagedR.value.stdout
|
|
.split("\n")
|
|
.map((s) => s.trim())
|
|
.filter((s) => s.length > 0)
|
|
: [];
|
|
|
|
const userBrief = summarizeText(userPrompt, 200);
|
|
const reasonBrief =
|
|
reviewerMeta !== null ? summarizeText(reviewerMeta.reason, 240) : "(no reviewer reason)";
|
|
const subject = `workflow: ${wfName}`;
|
|
const body =
|
|
`Workflow: ${wfName}\n` +
|
|
`User request (summary): ${userBrief}\n` +
|
|
`Reviewer (summary): ${reasonBrief}\n` +
|
|
`Staged paths:\n${stagedPaths.map((p) => `- ${p}`).join("\n") || "(none)"}\n`;
|
|
const commitMessage = `${subject}\n\n${body}`;
|
|
|
|
const msgPath = join(tmpdir(), `nerve-workflow-generator-commit-${Date.now()}.txt`);
|
|
logLines.push(`[5] git commit -F ${msgPath}`);
|
|
let commitHash: string | null = null;
|
|
let commitErr: string | null = null;
|
|
try {
|
|
writeFileSync(msgPath, commitMessage, "utf-8");
|
|
const commitR = await spawnSafe("git", ["commit", "-F", msgPath], {
|
|
cwd: NERVE_ROOT,
|
|
env: null,
|
|
timeoutMs: 120_000,
|
|
dryRun: false,
|
|
});
|
|
if (!commitR.ok) {
|
|
commitErr = `git commit: ${formatSpawnFailure(commitR.error)}`;
|
|
const ioCommit = spawnErrorStreams(commitR.error);
|
|
appendIoSnippet(logLines, "git commit", ioCommit.stdout, ioCommit.stderr);
|
|
logLines.push(`ERROR: ${commitErr}`);
|
|
} else {
|
|
appendIoSnippet(logLines, "git commit", commitR.value.stdout, commitR.value.stderr);
|
|
const revR = await spawnSafe("git", ["rev-parse", "HEAD"], {
|
|
cwd: NERVE_ROOT,
|
|
env: null,
|
|
timeoutMs: 10_000,
|
|
dryRun: false,
|
|
});
|
|
if (!revR.ok) {
|
|
commitErr = `git rev-parse HEAD: ${formatSpawnFailure(revR.error)}`;
|
|
logLines.push(`ERROR: ${commitErr}`);
|
|
} else {
|
|
commitHash = revR.value.stdout.trim() || null;
|
|
logLines.push(`[6] commit hash: ${commitHash ?? "(empty)"}`);
|
|
}
|
|
}
|
|
} finally {
|
|
try {
|
|
unlinkSync(msgPath);
|
|
} catch {
|
|
/* ignore */
|
|
}
|
|
}
|
|
|
|
if (commitErr !== null) {
|
|
return {
|
|
content: logLines.filter(Boolean).join("\n"),
|
|
meta: {
|
|
branch: newBranch,
|
|
commitHash,
|
|
pushed: null,
|
|
skipped: false,
|
|
error: commitErr,
|
|
stagedPaths,
|
|
},
|
|
};
|
|
}
|
|
|
|
logLines.push("[7] git remote get-url origin");
|
|
const urlR = await spawnSafe("git", ["remote", "get-url", "origin"], {
|
|
cwd: NERVE_ROOT,
|
|
env: null,
|
|
timeoutMs: 10_000,
|
|
dryRun: false,
|
|
});
|
|
if (!urlR.ok) {
|
|
logLines.push(`No origin remote (skip push): ${formatSpawnFailure(urlR.error)}`);
|
|
return {
|
|
content: logLines.filter(Boolean).join("\n"),
|
|
meta: {
|
|
branch: newBranch,
|
|
commitHash,
|
|
pushed: null,
|
|
skipped: false,
|
|
error: null,
|
|
stagedPaths,
|
|
},
|
|
};
|
|
}
|
|
const originUrl = urlR.value.stdout.trim();
|
|
if (originUrl.length === 0) {
|
|
logLines.push("origin URL empty — skip push");
|
|
return {
|
|
content: logLines.filter(Boolean).join("\n"),
|
|
meta: {
|
|
branch: newBranch,
|
|
commitHash,
|
|
pushed: null,
|
|
skipped: false,
|
|
error: null,
|
|
stagedPaths,
|
|
},
|
|
};
|
|
}
|
|
logLines.push(`origin: ${originUrl}`);
|
|
|
|
logLines.push(`[8] git push -u origin ${newBranch}`);
|
|
const pushR = await spawnSafe("git", ["push", "-u", "origin", newBranch], {
|
|
cwd: NERVE_ROOT,
|
|
env: null,
|
|
timeoutMs: 300_000,
|
|
dryRun: false,
|
|
});
|
|
if (!pushR.ok) {
|
|
const pe = `git push: ${formatSpawnFailure(pushR.error)}`;
|
|
const ioPush = spawnErrorStreams(pushR.error);
|
|
appendIoSnippet(logLines, "git push", ioPush.stdout, ioPush.stderr);
|
|
logLines.push(`ERROR: ${pe}`);
|
|
return {
|
|
content: logLines.filter(Boolean).join("\n"),
|
|
meta: {
|
|
branch: newBranch,
|
|
commitHash,
|
|
pushed: false,
|
|
skipped: false,
|
|
error: pe,
|
|
stagedPaths,
|
|
},
|
|
};
|
|
}
|
|
appendIoSnippet(logLines, "git push", pushR.value.stdout, pushR.value.stderr);
|
|
|
|
return {
|
|
content: logLines.filter(Boolean).join("\n"),
|
|
meta: {
|
|
branch: newBranch,
|
|
commitHash,
|
|
pushed: true,
|
|
skipped: false,
|
|
error: null,
|
|
stagedPaths,
|
|
},
|
|
};
|
|
},
|
|
},
|
|
|
|
moderator(context) {
|
|
if (context.steps.length === 0) {
|
|
return "analyst";
|
|
}
|
|
|
|
const last = context.steps[context.steps.length - 1];
|
|
|
|
if (last.role === "analyst") {
|
|
if (last.meta.workflowName.trim().length === 0) {
|
|
return END;
|
|
}
|
|
return "architect";
|
|
}
|
|
|
|
if (last.role === "architect") {
|
|
if (last.meta.workflowName.trim().length === 0 || last.meta.design.trim().length === 0) {
|
|
return END;
|
|
}
|
|
return "coder";
|
|
}
|
|
|
|
if (last.role === "coder") {
|
|
return "reviewer";
|
|
}
|
|
|
|
if (last.role === "reviewer") {
|
|
if (last.meta.passed === true) {
|
|
return "committer";
|
|
}
|
|
if (last.meta.passed === false && last.meta.attempt < 3) {
|
|
return "coder";
|
|
}
|
|
return END;
|
|
}
|
|
|
|
if (last.role === "committer") {
|
|
return END;
|
|
}
|
|
|
|
return END;
|
|
},
|
|
};
|
|
|
|
export default workflow;
|