Compare commits
No commits in common. "master" and "feat/210-sense-generator-factories" have entirely different histories.
master
...
feat/210-s
2
.gitignore
vendored
2
.gitignore
vendored
@ -4,5 +4,3 @@ logs/
|
||||
nerve.pid
|
||||
nerve.sock
|
||||
false/
|
||||
*.db
|
||||
dist/
|
||||
|
||||
154
CONVENTIONS.md
154
CONVENTIONS.md
@ -1,154 +0,0 @@
|
||||
# Nerve Workspace Conventions
|
||||
|
||||
This document defines coding and workflow conventions for the nerve-workspace (`~/.uncaged-nerve`).
|
||||
All roles (planner, coder, reviewer, tester) should reference this file.
|
||||
|
||||
## Language & Paradigm
|
||||
|
||||
### Functional-first
|
||||
|
||||
Use `function` + `type`, not `class` + `interface`.
|
||||
|
||||
```typescript
|
||||
// ✅ Good
|
||||
type Signal = { senseId: string; value: unknown; ts: number };
|
||||
function createSignal(senseId: string, value: unknown): Signal { ... }
|
||||
|
||||
// ❌ Bad
|
||||
class Signal implements ISignal { ... }
|
||||
```
|
||||
|
||||
### Rules
|
||||
|
||||
| Rule | Description |
|
||||
|------|-------------|
|
||||
| `type` over `interface` | All type definitions use `type` |
|
||||
| `function` over `class` | Pure functions + closures, no class |
|
||||
| No `this` | Functions must not depend on `this` context |
|
||||
| No inheritance | No `extends`, `implements`, `abstract` |
|
||||
| Composition over inheritance | Use function composition |
|
||||
| No optional properties | Use `T \| null` instead of `?:` |
|
||||
| No dynamic `import()` | Always static top-level `import` |
|
||||
| `async/await` only | Never `.then()` chains |
|
||||
|
||||
### Exceptions
|
||||
|
||||
Classes allowed when required by a library (e.g. Drizzle `sqliteTable`) or Error subclasses.
|
||||
|
||||
## Naming
|
||||
|
||||
| Type | Style | Example |
|
||||
|------|-------|---------|
|
||||
| Files | kebab-case | `signal-bus.ts` |
|
||||
| Types | PascalCase | `SignalBus` |
|
||||
| Functions/variables | camelCase | `createSignalBus` |
|
||||
| Constants | UPPER_SNAKE | `MAX_RETRY_COUNT` |
|
||||
|
||||
## Error Handling
|
||||
|
||||
- Use `Result<T, E>` for expected failures
|
||||
- `throw` only for unrecoverable bugs
|
||||
- No try-catch for flow control
|
||||
|
||||
```typescript
|
||||
type Result<T, E = Error> = { ok: true; value: T } | { ok: false; error: E };
|
||||
```
|
||||
|
||||
## Workflow Structure
|
||||
|
||||
Each workflow follows the multi-file pattern:
|
||||
|
||||
```
|
||||
workflows/<name>/
|
||||
index.ts — WorkflowDefinition default export (thin entry point)
|
||||
build.ts — factory function with dependency injection
|
||||
moderator.ts — moderator function + WorkflowMeta type
|
||||
roles/
|
||||
<role>/
|
||||
index.ts — build function + meta schema
|
||||
prompt.ts — prompt pure function (string template)
|
||||
package.json — with esbuild build script
|
||||
tsconfig.json
|
||||
```
|
||||
|
||||
### Role Implementation Patterns
|
||||
|
||||
| Pattern | When to use | Example |
|
||||
|---------|-------------|---------|
|
||||
| `createCursorRole` | Needs file system access (code generation, planning) | planner, coder |
|
||||
| `createHermesRole` | Needs shell + tools (testing, reviewing) | tester, reviewer |
|
||||
| `createLlmRole` | Pure LLM reasoning, no tools | analysis roles |
|
||||
| `createRole(hermesAdapter, …)` | Agent role with LLM + shell (branch/commit/push from thread context) | solve-issue committer, publish |
|
||||
| Direct `Role<Meta>` | No LLM needed, scripted logic | thin wrappers only |
|
||||
|
||||
### Meta Convention
|
||||
|
||||
Meta is a **routing signal only** — one boolean per role:
|
||||
- `{ ready: boolean }` — planner
|
||||
- `{ done: boolean }` — coder
|
||||
- `{ approved: boolean }` — reviewer
|
||||
- `{ passed: boolean }` — tester
|
||||
- `{ committed: boolean }` — committer (solve-issue: branch created, pushed)
|
||||
- `{ success: boolean }` — publish (PR opened)
|
||||
|
||||
### Standard Flow
|
||||
|
||||
```
|
||||
planner → coder → reviewer → tester → committer → END
|
||||
```
|
||||
|
||||
- Reviewer rejection → back to coder (within MAX_CODER_ITERATIONS)
|
||||
- Tester failure → back to coder (within MAX_CODER_ITERATIONS)
|
||||
- Committer failure → back to coder (within MAX_CODER_ITERATIONS)
|
||||
|
||||
## Sense Structure
|
||||
|
||||
```
|
||||
senses/<name>/
|
||||
src/
|
||||
index.ts — compute() function + schema
|
||||
schema.ts — Drizzle table definition
|
||||
migrations/ — SQLite migrations
|
||||
package.json — with esbuild build script
|
||||
```
|
||||
|
||||
## Toolchain
|
||||
|
||||
| Tool | Purpose |
|
||||
|------|---------|
|
||||
| **pnpm** | Package manager (workspace mode) |
|
||||
| **TypeScript** | Type checking |
|
||||
| **esbuild** | Bundling (each workflow/sense bundles independently) |
|
||||
|
||||
### Commands
|
||||
|
||||
```bash
|
||||
pnpm build # build all packages
|
||||
pnpm -r build # same, explicit recursive
|
||||
cd workflows/<name> && pnpm build # build one workflow
|
||||
```
|
||||
|
||||
## Git & Commit Convention
|
||||
|
||||
```
|
||||
<type>(<scope>): <description>
|
||||
|
||||
type: feat | fix | refactor | docs | chore | test
|
||||
scope: workflow | sense | core | ...
|
||||
```
|
||||
|
||||
### What NOT to commit
|
||||
|
||||
- `node_modules/`
|
||||
- `dist/` (build outputs, generated by esbuild)
|
||||
- `.DS_Store`
|
||||
- pnpm cache artifacts (e.g. `false/` directories from `--no-cache` misuse)
|
||||
- Secrets, API keys, tokens
|
||||
- Unrelated file changes outside the task scope
|
||||
|
||||
## Dependencies
|
||||
|
||||
Shared packages from the nerve monorepo:
|
||||
- `@uncaged/nerve-core` — types, END constant, WorkflowDefinition
|
||||
- `@uncaged/nerve-workflow-utils` — role factories, spawnSafe, llmExtract, cursorAgent
|
||||
- `zod` — schema definitions for meta extraction
|
||||
50
nerve.yaml
50
nerve.yaml
@ -1,26 +1,48 @@
|
||||
# nerve.yaml — Nerve workspace configuration
|
||||
|
||||
extract:
|
||||
provider: dashscope
|
||||
model: qwen-plus
|
||||
|
||||
senses:
|
||||
linux-system-health:
|
||||
group: system
|
||||
throttle: 10s
|
||||
timeout: 15s
|
||||
grace_period: null
|
||||
hermes-gateway-health:
|
||||
group: system
|
||||
interval: 2m
|
||||
throttle: 30s
|
||||
timeout: 30s
|
||||
grace_period: null
|
||||
hermes-session-message-stats:
|
||||
group: hermes
|
||||
throttle: 30s
|
||||
timeout: 60s
|
||||
grace_period: null
|
||||
|
||||
workflows:
|
||||
develop-sense:
|
||||
sense-generator:
|
||||
concurrency: 1
|
||||
overflow: queue
|
||||
develop-workflow:
|
||||
overflow: drop
|
||||
workflow-generator:
|
||||
concurrency: 1
|
||||
overflow: queue
|
||||
solve-issue:
|
||||
overflow: drop
|
||||
pr-summarizer:
|
||||
concurrency: 1
|
||||
overflow: queue
|
||||
extract-knowledge:
|
||||
overflow: drop
|
||||
pr-code-reviewer:
|
||||
concurrency: 1
|
||||
overflow: queue
|
||||
overflow: drop
|
||||
hello-world:
|
||||
concurrency: 1
|
||||
overflow: drop
|
||||
gitea-issue-solver:
|
||||
concurrency: 1
|
||||
overflow: drop
|
||||
|
||||
reflexes:
|
||||
- kind: sense
|
||||
sense: linux-system-health
|
||||
interval: 30s
|
||||
- kind: sense
|
||||
sense: hermes-gateway-health
|
||||
interval: 2m
|
||||
- kind: sense
|
||||
sense: hermes-session-message-stats
|
||||
interval: 15m
|
||||
|
||||
23
package.json
23
package.json
@ -3,39 +3,24 @@
|
||||
"version": "0.0.1",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"build": "node scripts/build.mjs"
|
||||
},
|
||||
"dependencies": {
|
||||
"@uncaged/nerve-adapter-cursor": "link:../repos/nerve/packages/adapter-cursor",
|
||||
"@uncaged/nerve-adapter-hermes": "link:../repos/nerve/packages/adapter-hermes",
|
||||
"@uncaged/nerve-core": "latest",
|
||||
"@uncaged/nerve-daemon": "link:../repos/nerve/packages/daemon",
|
||||
"@uncaged/nerve-role-committer": "link:../repos/nerve/packages/role-committer",
|
||||
"@uncaged/nerve-role-reviewer": "link:../repos/nerve/packages/role-reviewer",
|
||||
"@uncaged/nerve-workflow-meta": "link:../repos/nerve/packages/workflow-meta",
|
||||
"@uncaged/nerve-workflow-utils": "link:../repos/nerve/packages/workflow-utils",
|
||||
"@uncaged/nerve-daemon": "latest",
|
||||
"@uncaged/nerve-workflow-utils": "latest",
|
||||
"drizzle-orm": "latest",
|
||||
"zod": "^4.3.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.0.0",
|
||||
"drizzle-kit": "latest",
|
||||
"esbuild": "^0.27.0",
|
||||
"typescript": "^5.7.0"
|
||||
"drizzle-kit": "latest"
|
||||
},
|
||||
"pnpm": {
|
||||
"onlyBuiltDependencies": [
|
||||
"esbuild"
|
||||
],
|
||||
"overrides": {
|
||||
"@uncaged/nerve-adapter-cursor": "link:../repos/nerve/packages/adapter-cursor",
|
||||
"@uncaged/nerve-adapter-hermes": "link:../repos/nerve/packages/adapter-hermes",
|
||||
"@uncaged/nerve-daemon": "link:../repos/nerve/packages/daemon",
|
||||
"@uncaged/nerve-core": "link:../repos/nerve/packages/core",
|
||||
"@uncaged/nerve-workflow-utils": "link:../repos/nerve/packages/workflow-utils",
|
||||
"@uncaged/nerve-role-committer": "link:../repos/nerve/packages/role-committer",
|
||||
"@uncaged/nerve-workflow-meta": "link:../repos/nerve/packages/workflow-meta"
|
||||
"@uncaged/nerve-workflow-utils": "link:../repos/nerve/packages/workflow-utils"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
225
pnpm-lock.yaml
generated
225
pnpm-lock.yaml
generated
@ -5,39 +5,20 @@ settings:
|
||||
excludeLinksFromLockfile: false
|
||||
|
||||
overrides:
|
||||
'@uncaged/nerve-adapter-cursor': link:../repos/nerve/packages/adapter-cursor
|
||||
'@uncaged/nerve-adapter-hermes': link:../repos/nerve/packages/adapter-hermes
|
||||
'@uncaged/nerve-daemon': link:../repos/nerve/packages/daemon
|
||||
'@uncaged/nerve-core': link:../repos/nerve/packages/core
|
||||
'@uncaged/nerve-workflow-utils': link:../repos/nerve/packages/workflow-utils
|
||||
'@uncaged/nerve-role-committer': link:../repos/nerve/packages/role-committer
|
||||
'@uncaged/nerve-workflow-meta': link:../repos/nerve/packages/workflow-meta
|
||||
|
||||
importers:
|
||||
|
||||
.:
|
||||
dependencies:
|
||||
'@uncaged/nerve-adapter-cursor':
|
||||
specifier: link:../repos/nerve/packages/adapter-cursor
|
||||
version: link:../repos/nerve/packages/adapter-cursor
|
||||
'@uncaged/nerve-adapter-hermes':
|
||||
specifier: link:../repos/nerve/packages/adapter-hermes
|
||||
version: link:../repos/nerve/packages/adapter-hermes
|
||||
'@uncaged/nerve-core':
|
||||
specifier: link:../repos/nerve/packages/core
|
||||
version: link:../repos/nerve/packages/core
|
||||
'@uncaged/nerve-daemon':
|
||||
specifier: link:../repos/nerve/packages/daemon
|
||||
version: link:../repos/nerve/packages/daemon
|
||||
'@uncaged/nerve-role-committer':
|
||||
specifier: link:../repos/nerve/packages/role-committer
|
||||
version: link:../repos/nerve/packages/role-committer
|
||||
'@uncaged/nerve-role-reviewer':
|
||||
specifier: link:../repos/nerve/packages/role-reviewer
|
||||
version: link:../repos/nerve/packages/role-reviewer
|
||||
'@uncaged/nerve-workflow-meta':
|
||||
specifier: link:../repos/nerve/packages/workflow-meta
|
||||
version: link:../repos/nerve/packages/workflow-meta
|
||||
'@uncaged/nerve-workflow-utils':
|
||||
specifier: link:../repos/nerve/packages/workflow-utils
|
||||
version: link:../repos/nerve/packages/workflow-utils
|
||||
@ -48,196 +29,9 @@ importers:
|
||||
specifier: ^4.3.6
|
||||
version: 4.3.6
|
||||
devDependencies:
|
||||
'@types/node':
|
||||
specifier: ^22.0.0
|
||||
version: 22.19.17
|
||||
drizzle-kit:
|
||||
specifier: latest
|
||||
version: 0.31.10
|
||||
esbuild:
|
||||
specifier: ^0.27.0
|
||||
version: 0.27.7
|
||||
typescript:
|
||||
specifier: ^5.7.0
|
||||
version: 5.9.3
|
||||
|
||||
senses/git-workspace-status:
|
||||
devDependencies:
|
||||
'@types/node':
|
||||
specifier: ^22.0.0
|
||||
version: 22.19.17
|
||||
esbuild:
|
||||
specifier: ^0.27.0
|
||||
version: 0.27.7
|
||||
typescript:
|
||||
specifier: ^5.7.0
|
||||
version: 5.9.3
|
||||
|
||||
senses/hermes-gateway-health:
|
||||
devDependencies:
|
||||
'@types/node':
|
||||
specifier: ^22.0.0
|
||||
version: 22.19.17
|
||||
esbuild:
|
||||
specifier: ^0.27.0
|
||||
version: 0.27.7
|
||||
typescript:
|
||||
specifier: ^5.7.0
|
||||
version: 5.9.3
|
||||
|
||||
senses/hermes-session-message-stats:
|
||||
devDependencies:
|
||||
'@types/node':
|
||||
specifier: ^22.0.0
|
||||
version: 22.19.17
|
||||
esbuild:
|
||||
specifier: ^0.27.0
|
||||
version: 0.27.7
|
||||
typescript:
|
||||
specifier: ^5.7.0
|
||||
version: 5.9.3
|
||||
|
||||
senses/linux-system-health:
|
||||
devDependencies:
|
||||
'@types/node':
|
||||
specifier: ^22.0.0
|
||||
version: 22.19.17
|
||||
esbuild:
|
||||
specifier: ^0.27.0
|
||||
version: 0.27.7
|
||||
typescript:
|
||||
specifier: ^5.7.0
|
||||
version: 5.9.3
|
||||
|
||||
senses/worker-process-metrics:
|
||||
devDependencies:
|
||||
'@types/node':
|
||||
specifier: ^22.0.0
|
||||
version: 22.19.17
|
||||
esbuild:
|
||||
specifier: ^0.27.0
|
||||
version: 0.27.7
|
||||
typescript:
|
||||
specifier: ^5.7.0
|
||||
version: 5.9.3
|
||||
|
||||
workflows/develop-sense:
|
||||
dependencies:
|
||||
'@uncaged/nerve-adapter-cursor':
|
||||
specifier: link:../../../repos/nerve/packages/adapter-cursor
|
||||
version: link:../../../repos/nerve/packages/adapter-cursor
|
||||
'@uncaged/nerve-adapter-hermes':
|
||||
specifier: link:../../../repos/nerve/packages/adapter-hermes
|
||||
version: link:../../../repos/nerve/packages/adapter-hermes
|
||||
'@uncaged/nerve-core':
|
||||
specifier: link:../../../repos/nerve/packages/core
|
||||
version: link:../../../repos/nerve/packages/core
|
||||
'@uncaged/nerve-workflow-meta':
|
||||
specifier: link:../../../repos/nerve/packages/workflow-meta
|
||||
version: link:../../../repos/nerve/packages/workflow-meta
|
||||
'@uncaged/nerve-workflow-utils':
|
||||
specifier: link:../../../repos/nerve/packages/workflow-utils
|
||||
version: link:../../../repos/nerve/packages/workflow-utils
|
||||
zod:
|
||||
specifier: ^4.3.6
|
||||
version: 4.3.6
|
||||
devDependencies:
|
||||
'@types/node':
|
||||
specifier: ^22.0.0
|
||||
version: 22.19.17
|
||||
esbuild:
|
||||
specifier: ^0.27.0
|
||||
version: 0.27.7
|
||||
typescript:
|
||||
specifier: ^5.7.0
|
||||
version: 5.9.3
|
||||
|
||||
workflows/develop-workflow:
|
||||
dependencies:
|
||||
'@uncaged/nerve-adapter-cursor':
|
||||
specifier: link:../../../repos/nerve/packages/adapter-cursor
|
||||
version: link:../../../repos/nerve/packages/adapter-cursor
|
||||
'@uncaged/nerve-adapter-hermes':
|
||||
specifier: link:../../../repos/nerve/packages/adapter-hermes
|
||||
version: link:../../../repos/nerve/packages/adapter-hermes
|
||||
'@uncaged/nerve-core':
|
||||
specifier: link:../../../repos/nerve/packages/core
|
||||
version: link:../../../repos/nerve/packages/core
|
||||
'@uncaged/nerve-workflow-meta':
|
||||
specifier: link:../../../repos/nerve/packages/workflow-meta
|
||||
version: link:../../../repos/nerve/packages/workflow-meta
|
||||
'@uncaged/nerve-workflow-utils':
|
||||
specifier: link:../../../repos/nerve/packages/workflow-utils
|
||||
version: link:../../../repos/nerve/packages/workflow-utils
|
||||
zod:
|
||||
specifier: ^4.3.6
|
||||
version: 4.3.6
|
||||
devDependencies:
|
||||
'@types/node':
|
||||
specifier: ^22.0.0
|
||||
version: 22.19.17
|
||||
esbuild:
|
||||
specifier: ^0.27.0
|
||||
version: 0.27.7
|
||||
typescript:
|
||||
specifier: ^5.7.0
|
||||
version: 5.9.3
|
||||
|
||||
workflows/extract-knowledge:
|
||||
dependencies:
|
||||
'@uncaged/nerve-adapter-cursor':
|
||||
specifier: link:../../../repos/nerve/packages/adapter-cursor
|
||||
version: link:../../../repos/nerve/packages/adapter-cursor
|
||||
'@uncaged/nerve-adapter-hermes':
|
||||
specifier: link:../../../repos/nerve/packages/adapter-hermes
|
||||
version: link:../../../repos/nerve/packages/adapter-hermes
|
||||
'@uncaged/nerve-core':
|
||||
specifier: link:../../../repos/nerve/packages/core
|
||||
version: link:../../../repos/nerve/packages/core
|
||||
'@uncaged/nerve-workflow-utils':
|
||||
specifier: link:../../../repos/nerve/packages/workflow-utils
|
||||
version: link:../../../repos/nerve/packages/workflow-utils
|
||||
zod:
|
||||
specifier: ^4.3.6
|
||||
version: 4.3.6
|
||||
devDependencies:
|
||||
'@types/node':
|
||||
specifier: ^22.0.0
|
||||
version: 22.19.17
|
||||
esbuild:
|
||||
specifier: ^0.27.0
|
||||
version: 0.27.7
|
||||
typescript:
|
||||
specifier: ^5.7.0
|
||||
version: 5.9.3
|
||||
|
||||
workflows/solve-issue:
|
||||
dependencies:
|
||||
'@uncaged/nerve-adapter-cursor':
|
||||
specifier: link:../../../repos/nerve/packages/adapter-cursor
|
||||
version: link:../../../repos/nerve/packages/adapter-cursor
|
||||
'@uncaged/nerve-adapter-hermes':
|
||||
specifier: link:../../../repos/nerve/packages/adapter-hermes
|
||||
version: link:../../../repos/nerve/packages/adapter-hermes
|
||||
'@uncaged/nerve-core':
|
||||
specifier: link:../../../repos/nerve/packages/core
|
||||
version: link:../../../repos/nerve/packages/core
|
||||
'@uncaged/nerve-workflow-utils':
|
||||
specifier: link:../../../repos/nerve/packages/workflow-utils
|
||||
version: link:../../../repos/nerve/packages/workflow-utils
|
||||
zod:
|
||||
specifier: ^4.3.6
|
||||
version: 4.3.6
|
||||
devDependencies:
|
||||
'@types/node':
|
||||
specifier: ^22.0.0
|
||||
version: 22.19.17
|
||||
esbuild:
|
||||
specifier: ^0.27.0
|
||||
version: 0.27.7
|
||||
typescript:
|
||||
specifier: ^5.7.0
|
||||
version: 5.9.3
|
||||
|
||||
packages:
|
||||
|
||||
@ -696,9 +490,6 @@ packages:
|
||||
cpu: [x64]
|
||||
os: [win32]
|
||||
|
||||
'@types/node@22.19.17':
|
||||
resolution: {integrity: sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==}
|
||||
|
||||
base64-js@1.5.1:
|
||||
resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==}
|
||||
|
||||
@ -959,14 +750,6 @@ packages:
|
||||
tunnel-agent@0.6.0:
|
||||
resolution: {integrity: sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==}
|
||||
|
||||
typescript@5.9.3:
|
||||
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
|
||||
engines: {node: '>=14.17'}
|
||||
hasBin: true
|
||||
|
||||
undici-types@6.21.0:
|
||||
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
|
||||
|
||||
util-deprecate@1.0.2:
|
||||
resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==}
|
||||
|
||||
@ -1212,10 +995,6 @@ snapshots:
|
||||
'@esbuild/win32-x64@0.27.7':
|
||||
optional: true
|
||||
|
||||
'@types/node@22.19.17':
|
||||
dependencies:
|
||||
undici-types: 6.21.0
|
||||
|
||||
base64-js@1.5.1:
|
||||
optional: true
|
||||
|
||||
@ -1507,10 +1286,6 @@ snapshots:
|
||||
safe-buffer: 5.2.1
|
||||
optional: true
|
||||
|
||||
typescript@5.9.3: {}
|
||||
|
||||
undici-types@6.21.0: {}
|
||||
|
||||
util-deprecate@1.0.2:
|
||||
optional: true
|
||||
|
||||
|
||||
@ -1,46 +0,0 @@
|
||||
import * as esbuild from "esbuild";
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
const root = path.join(path.dirname(fileURLToPath(import.meta.url)), "..");
|
||||
const dist = path.join(root, "dist");
|
||||
|
||||
const opts = {
|
||||
bundle: true,
|
||||
platform: "node",
|
||||
format: "esm",
|
||||
packages: "external",
|
||||
};
|
||||
|
||||
function listDirs(dir) {
|
||||
if (!fs.existsSync(dir)) return [];
|
||||
return fs
|
||||
.readdirSync(dir)
|
||||
.filter((name) => !name.startsWith(".") && !name.startsWith("_"))
|
||||
.map((name) => ({ name, full: path.join(dir, name) }))
|
||||
.filter(({ full }) => fs.statSync(full).isDirectory());
|
||||
}
|
||||
|
||||
async function main() {
|
||||
// Clean dist/
|
||||
fs.rmSync(dist, { recursive: true, force: true });
|
||||
|
||||
for (const { name, full } of listDirs(path.join(root, "senses"))) {
|
||||
const entry = path.join(full, "src", "index.ts");
|
||||
if (!fs.existsSync(entry)) continue;
|
||||
const outfile = path.join(dist, "senses", name, "index.js");
|
||||
fs.mkdirSync(path.dirname(outfile), { recursive: true });
|
||||
await esbuild.build({ ...opts, entryPoints: [entry], outfile });
|
||||
}
|
||||
|
||||
for (const { name, full } of listDirs(path.join(root, "workflows"))) {
|
||||
const entry = path.join(full, "index.ts");
|
||||
if (!fs.existsSync(entry)) continue;
|
||||
const outfile = path.join(dist, "workflows", name, "index.js");
|
||||
fs.mkdirSync(path.dirname(outfile), { recursive: true });
|
||||
await esbuild.build({ ...opts, entryPoints: [entry], outfile });
|
||||
}
|
||||
}
|
||||
|
||||
await main();
|
||||
@ -1,90 +1,14 @@
|
||||
import { execFile } from "node:child_process";
|
||||
import { hermesGatewayHealth } from "./schema.ts";
|
||||
|
||||
/** Keep subprocess deadlines slightly under typical sense timeout (30s). */
|
||||
const EXEC_TIMEOUT_MS = 25_000;
|
||||
|
||||
/** HTTP probe stays below EXEC_TIMEOUT_MS and sense timeout (30s). */
|
||||
const HTTP_TIMEOUT_MS = Math.min(23_000, EXEC_TIMEOUT_MS - 2000);
|
||||
|
||||
const HTTP_ERROR_MAX_LEN = 256;
|
||||
|
||||
/** How many consecutive failures before triggering a restart. */
|
||||
const FAILURE_THRESHOLD = 3;
|
||||
|
||||
type SenseState = {
|
||||
consecutiveFailures: number;
|
||||
lastRestartTs: number;
|
||||
/** Minimum ms between restart attempts to avoid restart loops. */
|
||||
restartCooldownMs: number;
|
||||
};
|
||||
|
||||
export const initialState: SenseState = {
|
||||
consecutiveFailures: 0,
|
||||
lastRestartTs: 0,
|
||||
restartCooldownMs: 300_000, // 5 minutes
|
||||
};
|
||||
|
||||
function gatewayProbeUrl(): string {
|
||||
const u =
|
||||
process.env.HERMES_GATEWAY_HEALTH_URL ??
|
||||
process.env.NERVE_HERMES_GATEWAY_URL ??
|
||||
"";
|
||||
return String(u).trim();
|
||||
}
|
||||
|
||||
function truncateHttpError(err: unknown): string {
|
||||
const raw =
|
||||
err && typeof err === "object" && "code" in err && (err as { code: unknown }).code
|
||||
? String((err as { code: unknown }).code)
|
||||
: String((err as { message?: unknown } | null)?.message ?? err ?? "error");
|
||||
const s = raw.trim() || "error";
|
||||
return s.length > HTTP_ERROR_MAX_LEN ? s.slice(0, HTTP_ERROR_MAX_LEN) : s;
|
||||
}
|
||||
|
||||
type HttpProbeResult = {
|
||||
httpOk: number;
|
||||
httpStatusCode: number;
|
||||
httpLatencyMs: number;
|
||||
httpError: string;
|
||||
};
|
||||
|
||||
async function probeGatewayHttp(url: string): Promise<HttpProbeResult> {
|
||||
if (!url) {
|
||||
return {
|
||||
httpOk: 0,
|
||||
httpStatusCode: 0,
|
||||
httpLatencyMs: 0,
|
||||
httpError: "missing_url",
|
||||
};
|
||||
}
|
||||
const t0 = Date.now();
|
||||
try {
|
||||
const signal = AbortSignal.timeout(HTTP_TIMEOUT_MS);
|
||||
const res = await fetch(url, {
|
||||
method: "GET",
|
||||
signal,
|
||||
redirect: "follow",
|
||||
});
|
||||
const httpLatencyMs = Date.now() - t0;
|
||||
const code = res.status;
|
||||
const ok = code >= 200 && code < 400;
|
||||
return {
|
||||
httpOk: ok ? 1 : 0,
|
||||
httpStatusCode: code,
|
||||
httpLatencyMs,
|
||||
httpError: ok ? "" : truncateHttpError({ message: `HTTP ${code}` }),
|
||||
};
|
||||
} catch (err) {
|
||||
const httpLatencyMs = Date.now() - t0;
|
||||
return {
|
||||
httpOk: 0,
|
||||
httpStatusCode: 0,
|
||||
httpLatencyMs,
|
||||
httpError: truncateHttpError(err),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function etimeToSeconds(etime: string): number {
|
||||
/**
|
||||
* When `ps` lacks `etimes` (wall-clock seconds since start), parse `etime`
|
||||
* ([[dd-]hh:]mm:ss) into seconds. See ps(1) `etime` field description.
|
||||
*/
|
||||
function etimeToSeconds(etime) {
|
||||
let s = String(etime).trim();
|
||||
if (!s) return 0;
|
||||
let days = 0;
|
||||
@ -108,14 +32,7 @@ function etimeToSeconds(etime: string): number {
|
||||
return 0;
|
||||
}
|
||||
|
||||
type ExecResult = {
|
||||
exitCode: number;
|
||||
errCode: string | undefined;
|
||||
stdout: string;
|
||||
stderr: string;
|
||||
};
|
||||
|
||||
function execFileUtf8(file: string, args: string[], opts: Record<string, unknown> = {}): Promise<ExecResult> {
|
||||
function execFileUtf8(file, args, opts = {}) {
|
||||
return new Promise((resolve) => {
|
||||
execFile(
|
||||
file,
|
||||
@ -125,15 +42,13 @@ function execFileUtf8(file: string, args: string[], opts: Record<string, unknown
|
||||
maxBuffer: 8 * 1024 * 1024,
|
||||
timeout: EXEC_TIMEOUT_MS,
|
||||
...opts,
|
||||
} as Parameters<typeof execFile>[2],
|
||||
},
|
||||
(err, stdout, stderr) => {
|
||||
const exitCode =
|
||||
err && typeof (err as NodeJS.ErrnoException).status === "number"
|
||||
? (err as NodeJS.ErrnoException & { status: number }).status
|
||||
: err ? -1 : 0;
|
||||
err && typeof err.status === "number" ? err.status : err ? -1 : 0;
|
||||
resolve({
|
||||
exitCode,
|
||||
errCode: (err as NodeJS.ErrnoException | null)?.code,
|
||||
errCode: err?.code,
|
||||
stdout: String(stdout ?? ""),
|
||||
stderr: String(stderr ?? ""),
|
||||
});
|
||||
@ -142,12 +57,12 @@ function execFileUtf8(file: string, args: string[], opts: Record<string, unknown
|
||||
});
|
||||
}
|
||||
|
||||
function parseMainPidFromStatus(text: string): number {
|
||||
function parseMainPidFromStatus(text) {
|
||||
const m = text.match(/Main PID:\s*(\d+)/i);
|
||||
return m ? Math.trunc(Number.parseInt(m[1], 10)) || 0 : 0;
|
||||
}
|
||||
|
||||
function parseActiveLineFromStatus(text: string): { active: boolean; subRunning: boolean } {
|
||||
function parseActiveLineFromStatus(text) {
|
||||
for (const line of text.split("\n")) {
|
||||
if (/^\s*Active:/i.test(line)) {
|
||||
const m = line.match(/Active:\s*(\S+)\s*\(([^)]*)\)/i);
|
||||
@ -162,7 +77,7 @@ function parseActiveLineFromStatus(text: string): { active: boolean; subRunning:
|
||||
return { active: false, subRunning: false };
|
||||
}
|
||||
|
||||
function parseSystemctlShow(text: string): { mainPid: number; active: boolean; subRunning: boolean } {
|
||||
function parseSystemctlShow(text) {
|
||||
let mainPid = 0;
|
||||
let active = false;
|
||||
let subRunning = false;
|
||||
@ -179,7 +94,7 @@ function parseSystemctlShow(text: string): { mainPid: number; active: boolean; s
|
||||
return { mainPid, active, subRunning };
|
||||
}
|
||||
|
||||
async function readSystemdState(): Promise<{ mainPid: number; systemdActiveRunning: boolean }> {
|
||||
async function readSystemdState() {
|
||||
const status = await execFileUtf8("systemctl", [
|
||||
"--user",
|
||||
"--no-pager",
|
||||
@ -190,7 +105,8 @@ async function readSystemdState(): Promise<{ mainPid: number; systemdActiveRunni
|
||||
let mainPid = parseMainPidFromStatus(combined);
|
||||
let { active, subRunning } = parseActiveLineFromStatus(combined);
|
||||
|
||||
const needShow = mainPid <= 0 || !active || !subRunning;
|
||||
const needShow =
|
||||
mainPid <= 0 || !active || !subRunning;
|
||||
|
||||
if (needShow) {
|
||||
const show = await execFileUtf8("systemctl", [
|
||||
@ -215,20 +131,14 @@ async function readSystemdState(): Promise<{ mainPid: number; systemdActiveRunni
|
||||
return { mainPid, systemdActiveRunning: active && subRunning };
|
||||
}
|
||||
|
||||
async function processExists(mainPid: number): Promise<boolean> {
|
||||
async function processExists(mainPid) {
|
||||
if (mainPid <= 0) return false;
|
||||
const r = await execFileUtf8("ps", ["-p", String(mainPid), "-o", "pid="]);
|
||||
if (r.errCode === "ENOENT") return false;
|
||||
return r.stdout.trim().length > 0;
|
||||
}
|
||||
|
||||
type PsMetrics = {
|
||||
rssBytes: number;
|
||||
cpuPercent: number;
|
||||
uptimeSec: number;
|
||||
};
|
||||
|
||||
async function readPsMetrics(mainPid: number): Promise<PsMetrics> {
|
||||
async function readPsMetrics(mainPid) {
|
||||
if (mainPid <= 0) {
|
||||
return { rssBytes: 0, cpuPercent: 0, uptimeSec: 0 };
|
||||
}
|
||||
@ -258,8 +168,12 @@ async function readPsMetrics(mainPid: number): Promise<PsMetrics> {
|
||||
const rssKiB = Number(parts[0]);
|
||||
const cpu = Number(parts[1]);
|
||||
const uptimeSec = etimeToSeconds(parts.slice(2).join(" "));
|
||||
const rssBytes = Number.isFinite(rssKiB) ? Math.trunc(rssKiB * 1024) : 0;
|
||||
const cpuPercent = Number.isFinite(cpu) ? Math.round(cpu * 100) / 100 : 0;
|
||||
const rssBytes = Number.isFinite(rssKiB)
|
||||
? Math.trunc(rssKiB * 1024)
|
||||
: 0;
|
||||
const cpuPercent = Number.isFinite(cpu)
|
||||
? Math.round(cpu * 100) / 100
|
||||
: 0;
|
||||
return { rssBytes, cpuPercent, uptimeSec };
|
||||
}
|
||||
const rssKiB = Number(parts[0]);
|
||||
@ -267,16 +181,67 @@ async function readPsMetrics(mainPid: number): Promise<PsMetrics> {
|
||||
const etimes = Number(parts[2]);
|
||||
const rssBytes = Number.isFinite(rssKiB) ? Math.trunc(rssKiB * 1024) : 0;
|
||||
const cpuPercent = Number.isFinite(cpu) ? Math.round(cpu * 100) / 100 : 0;
|
||||
const uptimeSec = Number.isFinite(etimes) ? Math.trunc(etimes) : 0;
|
||||
const uptimeSec = Number.isFinite(etimes)
|
||||
? Math.trunc(etimes)
|
||||
: 0;
|
||||
return { rssBytes, cpuPercent, uptimeSec };
|
||||
}
|
||||
|
||||
export async function compute(prevState: SenseState) {
|
||||
const now = Date.now();
|
||||
function parseActiveSessionsFromHermesStats(text) {
|
||||
const src = String(text);
|
||||
const patterns = [
|
||||
/^\s*Active\s+sessions?:\s*(\d+)/gim,
|
||||
/^\s*active\s+sessions?:\s*(\d+)/gim,
|
||||
/^\s*Total\s+sessions?:\s*(\d+)/gim,
|
||||
];
|
||||
for (const re of patterns) {
|
||||
re.lastIndex = 0;
|
||||
const m = re.exec(src);
|
||||
if (m) {
|
||||
const n = Math.trunc(Number.parseInt(m[1], 10));
|
||||
return Number.isFinite(n) ? n : 0;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
async function readActiveSessions() {
|
||||
try {
|
||||
const r = await execFileUtf8("hermes", ["sessions", "stats"]);
|
||||
if (r.errCode === "ENOENT") return 0;
|
||||
return parseActiveSessionsFromHermesStats(`${r.stdout}\n${r.stderr}`);
|
||||
} catch {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
async function countDirectChildren(mainPid) {
|
||||
if (mainPid <= 0) return 0;
|
||||
try {
|
||||
const r = await execFileUtf8("ps", [
|
||||
"--no-headers",
|
||||
"-o",
|
||||
"pid",
|
||||
"--ppid",
|
||||
String(mainPid),
|
||||
]);
|
||||
if (r.errCode === "ENOENT") return 0;
|
||||
const lines = r.stdout
|
||||
.split("\n")
|
||||
.map((l) => l.trim())
|
||||
.filter(Boolean);
|
||||
return lines.length;
|
||||
} catch {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
export async function compute(db, _peers) {
|
||||
const ts = Date.now();
|
||||
|
||||
// --- probe gateway ---
|
||||
let mainPid = 0;
|
||||
let systemdActiveRunning = false;
|
||||
|
||||
try {
|
||||
const st = await readSystemdState();
|
||||
mainPid = st.mainPid;
|
||||
@ -309,61 +274,48 @@ export async function compute(prevState: SenseState) {
|
||||
}
|
||||
}
|
||||
|
||||
const alive = systemdActiveRunning && mainPid > 0 && psOk ? 1 : 0;
|
||||
const alive =
|
||||
systemdActiveRunning && mainPid > 0 && psOk ? 1 : 0;
|
||||
|
||||
let httpOk = 0;
|
||||
let httpStatusCode = 0;
|
||||
let httpLatencyMs = 0;
|
||||
let httpError = "";
|
||||
let activeSessions = 0;
|
||||
try {
|
||||
const h = await probeGatewayHttp(gatewayProbeUrl());
|
||||
httpOk = h.httpOk;
|
||||
httpStatusCode = h.httpStatusCode;
|
||||
httpLatencyMs = h.httpLatencyMs;
|
||||
httpError = h.httpError;
|
||||
activeSessions = await readActiveSessions();
|
||||
} catch {
|
||||
httpOk = 0;
|
||||
httpStatusCode = 0;
|
||||
httpLatencyMs = 0;
|
||||
httpError = "probe_failed";
|
||||
activeSessions = 0;
|
||||
}
|
||||
|
||||
// --- decide health ---
|
||||
const healthy = alive === 1 && httpOk === 1;
|
||||
let childProcessCount = 0;
|
||||
if (alive && mainPid > 0) {
|
||||
try {
|
||||
childProcessCount = await countDirectChildren(mainPid);
|
||||
} catch {
|
||||
childProcessCount = 0;
|
||||
}
|
||||
}
|
||||
|
||||
// --- state machine: track consecutive failures ---
|
||||
const consecutiveFailures = healthy ? 0 : prevState.consecutiveFailures + 1;
|
||||
const lastRestartTs = prevState.lastRestartTs;
|
||||
const cooldown = prevState.restartCooldownMs;
|
||||
const cooldownElapsed = now - lastRestartTs >= cooldown;
|
||||
const storedMainPid = mainPid > 0 ? mainPid : 0;
|
||||
|
||||
// --- trigger restart? ---
|
||||
const shouldRestart =
|
||||
consecutiveFailures >= FAILURE_THRESHOLD && cooldownElapsed;
|
||||
|
||||
const nextState: SenseState = {
|
||||
consecutiveFailures,
|
||||
lastRestartTs: shouldRestart ? now : lastRestartTs,
|
||||
restartCooldownMs: cooldown,
|
||||
};
|
||||
|
||||
const signal = {
|
||||
ts: now,
|
||||
const row = {
|
||||
ts,
|
||||
alive,
|
||||
mainPid: mainPid > 0 ? mainPid : 0,
|
||||
mainPid: storedMainPid,
|
||||
rssBytes: alive ? rssBytes : 0,
|
||||
cpuPercent: alive ? cpuPercent : 0,
|
||||
uptimeSec: alive ? uptimeSec : 0,
|
||||
httpOk,
|
||||
httpStatusCode,
|
||||
httpLatencyMs,
|
||||
httpError,
|
||||
consecutiveFailures,
|
||||
activeSessions,
|
||||
childProcessCount: alive ? childProcessCount : 0,
|
||||
};
|
||||
|
||||
const trigger = shouldRestart
|
||||
? { command: "systemctl --user restart hermes-gateway" }
|
||||
: null;
|
||||
await db.insert(hermesGatewayHealth).values(row);
|
||||
|
||||
return { state: nextState, signal, trigger };
|
||||
return {
|
||||
ts: row.ts,
|
||||
alive: row.alive,
|
||||
mainPid: row.mainPid,
|
||||
rssBytes: row.rssBytes,
|
||||
cpuPercent: row.cpuPercent,
|
||||
uptimeSec: row.uptimeSec,
|
||||
activeSessions: row.activeSessions,
|
||||
childProcessCount: row.childProcessCount,
|
||||
};
|
||||
}
|
||||
14
senses/hermes-gateway-health/migrations/0001_init.sql
Normal file
14
senses/hermes-gateway-health/migrations/0001_init.sql
Normal file
@ -0,0 +1,14 @@
|
||||
-- Migration: 0001_init
|
||||
-- Creates the hermes_gateway_health table for hermes-gateway-health sense.
|
||||
|
||||
CREATE TABLE IF NOT EXISTS hermes_gateway_health (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
ts INTEGER NOT NULL,
|
||||
alive INTEGER NOT NULL,
|
||||
main_pid INTEGER NOT NULL,
|
||||
rss_bytes INTEGER NOT NULL,
|
||||
cpu_percent REAL NOT NULL,
|
||||
uptime_sec INTEGER NOT NULL,
|
||||
active_sessions INTEGER NOT NULL,
|
||||
child_process_count INTEGER NOT NULL
|
||||
);
|
||||
13
senses/hermes-gateway-health/schema.ts
Normal file
13
senses/hermes-gateway-health/schema.ts
Normal file
@ -0,0 +1,13 @@
|
||||
import { integer, real, sqliteTable } from "drizzle-orm/sqlite-core";
|
||||
|
||||
export const hermesGatewayHealth = sqliteTable("hermes_gateway_health", {
|
||||
id: integer("id").primaryKey({ autoIncrement: true }),
|
||||
ts: integer("ts").notNull(),
|
||||
alive: integer("alive").notNull(),
|
||||
mainPid: integer("main_pid").notNull(),
|
||||
rssBytes: integer("rss_bytes").notNull(),
|
||||
cpuPercent: real("cpu_percent").notNull(),
|
||||
uptimeSec: integer("uptime_sec").notNull(),
|
||||
activeSessions: integer("active_sessions").notNull(),
|
||||
childProcessCount: integer("child_process_count").notNull(),
|
||||
});
|
||||
121
senses/hermes-session-message-stats/index.js
Normal file
121
senses/hermes-session-message-stats/index.js
Normal file
@ -0,0 +1,121 @@
|
||||
import { createReadStream } from "node:fs";
|
||||
import { readdir } from "node:fs/promises";
|
||||
import { homedir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
import { createInterface } from "node:readline";
|
||||
import { hermesSessionMessageStats } from "./schema.ts";
|
||||
|
||||
const MEASUREMENT_WINDOW_MS = 900_000;
|
||||
const MEASUREMENT_WINDOW_SECONDS = 900;
|
||||
|
||||
/**
|
||||
* @param {string} filePath
|
||||
* @param {number} cutoffMs
|
||||
* @param {number} nowMs
|
||||
* @returns {Promise<{ user: number; assistant: number; tool: number; fileHadActivity: boolean }>}
|
||||
*/
|
||||
async function aggregateJsonlFile(filePath, cutoffMs, nowMs) {
|
||||
let user = 0;
|
||||
let assistant = 0;
|
||||
let tool = 0;
|
||||
let fileHadActivity = false;
|
||||
|
||||
const input = createReadStream(filePath, { encoding: "utf8" });
|
||||
const rl = createInterface({ input, crlfDelay: Infinity });
|
||||
try {
|
||||
for await (const line of rl) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed) continue;
|
||||
let obj;
|
||||
try {
|
||||
obj = JSON.parse(trimmed);
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
if (typeof obj.role !== "string" || typeof obj.timestamp !== "string") {
|
||||
continue;
|
||||
}
|
||||
const t = Date.parse(obj.timestamp);
|
||||
if (!Number.isFinite(t) || t < cutoffMs || t > nowMs) continue;
|
||||
|
||||
const roleNorm = obj.role.trim().toLowerCase();
|
||||
if (roleNorm === "user") {
|
||||
user++;
|
||||
fileHadActivity = true;
|
||||
} else if (roleNorm === "assistant") {
|
||||
assistant++;
|
||||
fileHadActivity = true;
|
||||
} else if (roleNorm === "tool") {
|
||||
tool++;
|
||||
fileHadActivity = true;
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
rl.close();
|
||||
}
|
||||
|
||||
return { user, assistant, tool, fileHadActivity };
|
||||
}
|
||||
|
||||
export async function compute(db, _peers) {
|
||||
const nowMs = Date.now();
|
||||
const cutoffMs = nowMs - MEASUREMENT_WINDOW_MS;
|
||||
const ts = nowMs;
|
||||
|
||||
let totalUserMessages = 0;
|
||||
let totalAssistantMessages = 0;
|
||||
let totalToolMessages = 0;
|
||||
let activeSessions = 0;
|
||||
|
||||
const sessionsDir = join(homedir(), ".hermes", "sessions");
|
||||
let files = [];
|
||||
try {
|
||||
const entries = await readdir(sessionsDir, { withFileTypes: true });
|
||||
files = entries
|
||||
.filter((e) => e.isFile() && e.name.endsWith(".jsonl"))
|
||||
.map((e) => join(sessionsDir, e.name));
|
||||
} catch (err) {
|
||||
if (err && typeof err === "object" && "code" in err && err.code === "ENOENT") {
|
||||
files = [];
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
for (const filePath of files) {
|
||||
const { user, assistant, tool, fileHadActivity } = await aggregateJsonlFile(
|
||||
filePath,
|
||||
cutoffMs,
|
||||
nowMs,
|
||||
);
|
||||
totalUserMessages += user;
|
||||
totalAssistantMessages += assistant;
|
||||
totalToolMessages += tool;
|
||||
if (fileHadActivity) activeSessions++;
|
||||
}
|
||||
|
||||
const totalMessages =
|
||||
totalUserMessages + totalAssistantMessages + totalToolMessages;
|
||||
|
||||
const row = {
|
||||
ts,
|
||||
totalUserMessages,
|
||||
totalAssistantMessages,
|
||||
totalToolMessages,
|
||||
totalMessages,
|
||||
activeSessions,
|
||||
measurementWindowSeconds: MEASUREMENT_WINDOW_SECONDS,
|
||||
};
|
||||
|
||||
await db.insert(hermesSessionMessageStats).values(row);
|
||||
|
||||
return {
|
||||
ts: row.ts,
|
||||
totalUserMessages: row.totalUserMessages,
|
||||
totalAssistantMessages: row.totalAssistantMessages,
|
||||
totalToolMessages: row.totalToolMessages,
|
||||
totalMessages: row.totalMessages,
|
||||
activeSessions: row.activeSessions,
|
||||
measurementWindowSeconds: row.measurementWindowSeconds,
|
||||
};
|
||||
}
|
||||
13
senses/hermes-session-message-stats/migrations/0001_init.sql
Normal file
13
senses/hermes-session-message-stats/migrations/0001_init.sql
Normal file
@ -0,0 +1,13 @@
|
||||
-- Migration: 0001_init
|
||||
-- Creates the hermes_session_message_stats table for hermes-session-message-stats sense.
|
||||
|
||||
CREATE TABLE IF NOT EXISTS hermes_session_message_stats (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
ts INTEGER NOT NULL,
|
||||
total_user_messages INTEGER NOT NULL,
|
||||
total_assistant_messages INTEGER NOT NULL,
|
||||
total_tool_messages INTEGER NOT NULL,
|
||||
total_messages INTEGER NOT NULL,
|
||||
active_sessions INTEGER NOT NULL,
|
||||
measurement_window_seconds INTEGER NOT NULL
|
||||
);
|
||||
12
senses/hermes-session-message-stats/schema.ts
Normal file
12
senses/hermes-session-message-stats/schema.ts
Normal file
@ -0,0 +1,12 @@
|
||||
import { integer, sqliteTable } from "drizzle-orm/sqlite-core";
|
||||
|
||||
export const hermesSessionMessageStats = sqliteTable("hermes_session_message_stats", {
|
||||
id: integer("id").primaryKey({ autoIncrement: true }),
|
||||
ts: integer("ts").notNull(),
|
||||
totalUserMessages: integer("total_user_messages").notNull(),
|
||||
totalAssistantMessages: integer("total_assistant_messages").notNull(),
|
||||
totalToolMessages: integer("total_tool_messages").notNull(),
|
||||
totalMessages: integer("total_messages").notNull(),
|
||||
activeSessions: integer("active_sessions").notNull(),
|
||||
measurementWindowSeconds: integer("measurement_window_seconds").notNull(),
|
||||
});
|
||||
87
senses/linux-system-health/index.js
Normal file
87
senses/linux-system-health/index.js
Normal file
@ -0,0 +1,87 @@
|
||||
import { loadavg, totalmem, freemem, uptime } from "node:os";
|
||||
import { execSync } from "node:child_process";
|
||||
import { readFile } from "node:fs/promises";
|
||||
import { snapshots } from "./schema.ts";
|
||||
|
||||
const SOCKSTAT_PATH = "/proc/net/sockstat";
|
||||
|
||||
function parseSockstat(content) {
|
||||
let socketsUsed = 0, tcpInuse = 0, tcpOrphan = 0, tcpTw = 0, tcpAlloc = 0, tcpMemPages = 0;
|
||||
|
||||
for (const line of content.split("\n")) {
|
||||
const trimmed = line.trim();
|
||||
if (trimmed.startsWith("sockets:")) {
|
||||
const parts = trimmed.split(/\s+/);
|
||||
const idx = parts.indexOf("used");
|
||||
if (idx !== -1 && idx + 1 < parts.length) {
|
||||
socketsUsed = Number.parseInt(parts[idx + 1], 10) || 0;
|
||||
}
|
||||
} else if (trimmed.startsWith("TCP:")) {
|
||||
const parts = trimmed.split(/\s+/);
|
||||
const map = {};
|
||||
for (let i = 1; i + 1 < parts.length; i += 2) {
|
||||
map[parts[i]] = Number.parseInt(parts[i + 1], 10) || 0;
|
||||
}
|
||||
tcpInuse = map.inuse ?? 0;
|
||||
tcpOrphan = map.orphan ?? 0;
|
||||
tcpTw = map.tw ?? 0;
|
||||
tcpAlloc = map.alloc ?? 0;
|
||||
tcpMemPages = map.mem ?? 0;
|
||||
}
|
||||
}
|
||||
|
||||
return { socketsUsed, tcpInuse, tcpOrphan, tcpTw, tcpAlloc, tcpMemPages };
|
||||
}
|
||||
|
||||
export async function compute(db, _peers) {
|
||||
const [load1, load5, load15] = loadavg();
|
||||
|
||||
const memTotal = totalmem();
|
||||
const memFree = freemem();
|
||||
const memUsed = memTotal - memFree;
|
||||
const memTotalMB = Math.round(memTotal / 1024 / 1024);
|
||||
const memUsedMB = Math.round(memUsed / 1024 / 1024);
|
||||
const memUsedPct = Math.round((memUsed / memTotal) * 10000) / 100;
|
||||
|
||||
let diskTotalGB = 0, diskUsedGB = 0, diskUsedPct = 0;
|
||||
try {
|
||||
const df = execSync("df -B1 / | tail -1", { encoding: "utf-8" }).trim();
|
||||
const parts = df.split(/\s+/);
|
||||
const total = Number(parts[1]);
|
||||
const used = Number(parts[2]);
|
||||
diskTotalGB = Math.round(total / 1024 / 1024 / 1024 * 100) / 100;
|
||||
diskUsedGB = Math.round(used / 1024 / 1024 / 1024 * 100) / 100;
|
||||
diskUsedPct = total > 0 ? Math.round((used / total) * 10000) / 100 : 0;
|
||||
} catch {}
|
||||
|
||||
// TCP socket stats
|
||||
let tcp = { socketsUsed: 0, tcpInuse: 0, tcpOrphan: 0, tcpTw: 0, tcpAlloc: 0, tcpMemPages: 0 };
|
||||
try {
|
||||
const content = await readFile(SOCKSTAT_PATH, "utf8");
|
||||
tcp = parseSockstat(content);
|
||||
} catch {}
|
||||
|
||||
const ts = Date.now();
|
||||
const uptimeSec = Math.round(uptime());
|
||||
|
||||
await db.insert(snapshots).values({
|
||||
ts, cpuLoad1m: load1, cpuLoad5m: load5, cpuLoad15m: load15,
|
||||
memTotalMB, memUsedMB, memUsedPct,
|
||||
diskTotalGB, diskUsedGB, diskUsedPct,
|
||||
uptimeSec,
|
||||
socketsUsed: tcp.socketsUsed,
|
||||
tcpInuse: tcp.tcpInuse,
|
||||
tcpOrphan: tcp.tcpOrphan,
|
||||
tcpTw: tcp.tcpTw,
|
||||
tcpAlloc: tcp.tcpAlloc,
|
||||
tcpMemPages: tcp.tcpMemPages,
|
||||
});
|
||||
|
||||
return {
|
||||
cpu: { load1m: load1, load5m: load5, load15m: load15 },
|
||||
memory: { totalMB: memTotalMB, usedMB: memUsedMB, usedPct: memUsedPct },
|
||||
disk: { totalGB: diskTotalGB, usedGB: diskUsedGB, usedPct: diskUsedPct },
|
||||
tcp: { socketsUsed: tcp.socketsUsed, inuse: tcp.tcpInuse, orphan: tcp.tcpOrphan, tw: tcp.tcpTw, alloc: tcp.tcpAlloc, memPages: tcp.tcpMemPages },
|
||||
uptimeSec,
|
||||
};
|
||||
}
|
||||
16
senses/linux-system-health/migrations/0001_init.sql
Normal file
16
senses/linux-system-health/migrations/0001_init.sql
Normal file
@ -0,0 +1,16 @@
|
||||
-- Migration: 0001_init
|
||||
-- Creates the snapshots table for linux-system-health sense.
|
||||
|
||||
CREATE TABLE IF NOT EXISTS snapshots (
|
||||
ts INTEGER PRIMARY KEY,
|
||||
cpu_load_1m REAL NOT NULL,
|
||||
cpu_load_5m REAL NOT NULL,
|
||||
cpu_load_15m REAL NOT NULL,
|
||||
mem_total_mb INTEGER NOT NULL,
|
||||
mem_used_mb INTEGER NOT NULL,
|
||||
mem_used_pct REAL NOT NULL,
|
||||
disk_total_gb REAL NOT NULL,
|
||||
disk_used_gb REAL NOT NULL,
|
||||
disk_used_pct REAL NOT NULL,
|
||||
uptime_sec INTEGER NOT NULL
|
||||
);
|
||||
@ -0,0 +1,6 @@
|
||||
ALTER TABLE snapshots ADD COLUMN sockets_used INTEGER;
|
||||
ALTER TABLE snapshots ADD COLUMN tcp_inuse INTEGER;
|
||||
ALTER TABLE snapshots ADD COLUMN tcp_orphan INTEGER;
|
||||
ALTER TABLE snapshots ADD COLUMN tcp_tw INTEGER;
|
||||
ALTER TABLE snapshots ADD COLUMN tcp_alloc INTEGER;
|
||||
ALTER TABLE snapshots ADD COLUMN tcp_mem_pages INTEGER;
|
||||
22
senses/linux-system-health/schema.ts
Normal file
22
senses/linux-system-health/schema.ts
Normal file
@ -0,0 +1,22 @@
|
||||
import { integer, real, sqliteTable, text } from "drizzle-orm/sqlite-core";
|
||||
|
||||
export const snapshots = sqliteTable("snapshots", {
|
||||
ts: integer("ts").primaryKey(),
|
||||
cpuLoad1m: real("cpu_load_1m").notNull(),
|
||||
cpuLoad5m: real("cpu_load_5m").notNull(),
|
||||
cpuLoad15m: real("cpu_load_15m").notNull(),
|
||||
memTotalMB: integer("mem_total_mb").notNull(),
|
||||
memUsedMB: integer("mem_used_mb").notNull(),
|
||||
memUsedPct: real("mem_used_pct").notNull(),
|
||||
diskTotalGB: real("disk_total_gb").notNull(),
|
||||
diskUsedGB: real("disk_used_gb").notNull(),
|
||||
diskUsedPct: real("disk_used_pct").notNull(),
|
||||
uptimeSec: integer("uptime_sec").notNull(),
|
||||
// TCP socket stats (merged from linux-tcp-socket-stats)
|
||||
socketsUsed: integer("sockets_used"),
|
||||
tcpInuse: integer("tcp_inuse"),
|
||||
tcpOrphan: integer("tcp_orphan"),
|
||||
tcpTw: integer("tcp_tw"),
|
||||
tcpAlloc: integer("tcp_alloc"),
|
||||
tcpMemPages: integer("tcp_mem_pages"),
|
||||
});
|
||||
@ -1,33 +0,0 @@
|
||||
import { join } from "node:path";
|
||||
import { createCursorAdapter, cursorAdapter } from "@uncaged/nerve-adapter-cursor";
|
||||
import { hermesAdapter } from "@uncaged/nerve-adapter-hermes";
|
||||
import { createDevelopSenseWorkflow } from "@uncaged/nerve-workflow-meta";
|
||||
|
||||
const HOME = process.env.HOME ?? "/home/azureuser";
|
||||
const NERVE_ROOT = join(HOME, ".uncaged-nerve");
|
||||
|
||||
const apiKey = process.env.DASHSCOPE_API_KEY;
|
||||
const baseUrl = process.env.DASHSCOPE_BASE_URL;
|
||||
const model = process.env.DASHSCOPE_MODEL ?? "qwen-plus";
|
||||
if (!apiKey || !baseUrl) {
|
||||
throw new Error("Set DASHSCOPE_API_KEY and DASHSCOPE_BASE_URL");
|
||||
}
|
||||
|
||||
const CURSOR_TIMEOUT_MS = 300_000;
|
||||
|
||||
const workflow = createDevelopSenseWorkflow({
|
||||
defaultAdapter: hermesAdapter,
|
||||
adapters: {
|
||||
planner: createCursorAdapter({
|
||||
type: "cursor",
|
||||
mode: "ask",
|
||||
model: "auto",
|
||||
timeout: CURSOR_TIMEOUT_MS,
|
||||
}),
|
||||
coder: cursorAdapter,
|
||||
},
|
||||
extract: { provider: { apiKey, baseUrl, model } },
|
||||
cwd: NERVE_ROOT,
|
||||
});
|
||||
|
||||
export default workflow;
|
||||
@ -1,34 +0,0 @@
|
||||
import { join } from "node:path";
|
||||
import { createCursorAdapter, cursorAdapter } from "@uncaged/nerve-adapter-cursor";
|
||||
import { hermesAdapter } from "@uncaged/nerve-adapter-hermes";
|
||||
import { createDevelopWorkflowWorkflow } from "@uncaged/nerve-workflow-meta";
|
||||
|
||||
const HOME = process.env.HOME ?? "/home/azureuser";
|
||||
const NERVE_ROOT = join(HOME, ".uncaged-nerve");
|
||||
|
||||
const apiKey = process.env.DASHSCOPE_API_KEY;
|
||||
const baseUrl = process.env.DASHSCOPE_BASE_URL;
|
||||
const model = process.env.DASHSCOPE_MODEL ?? "qwen-plus";
|
||||
|
||||
if (!apiKey || !baseUrl) {
|
||||
throw new Error("Set DASHSCOPE_API_KEY and DASHSCOPE_BASE_URL");
|
||||
}
|
||||
|
||||
const CURSOR_TIMEOUT_MS = 300_000;
|
||||
|
||||
const workflow = createDevelopWorkflowWorkflow({
|
||||
defaultAdapter: hermesAdapter,
|
||||
adapters: {
|
||||
planner: createCursorAdapter({
|
||||
type: "cursor",
|
||||
mode: "ask",
|
||||
model: "auto",
|
||||
timeout: CURSOR_TIMEOUT_MS,
|
||||
}),
|
||||
coder: cursorAdapter,
|
||||
},
|
||||
extract: { provider: { apiKey, baseUrl, model } },
|
||||
nerveRoot: NERVE_ROOT,
|
||||
});
|
||||
|
||||
export default workflow;
|
||||
@ -1,33 +0,0 @@
|
||||
import type { AgentFn, WorkflowDefinition } from "@uncaged/nerve-core";
|
||||
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||
import { createLlmAdapter } from "@uncaged/nerve-workflow-utils";
|
||||
|
||||
import { moderator } from "./moderator.js";
|
||||
import type { WorkflowMeta } from "./moderator.js";
|
||||
import { createAnswererRole } from "./roles/answerer.js";
|
||||
import { createExplorerRole } from "./roles/explorer.js";
|
||||
import { createQuestionerRole } from "./roles/questioner.js";
|
||||
|
||||
export type CreateKnowledgeExtractionDeps = {
|
||||
defaultAdapter: AgentFn;
|
||||
adapters?: Partial<Record<keyof WorkflowMeta, AgentFn>>;
|
||||
extract: LlmExtractorConfig;
|
||||
};
|
||||
|
||||
export function createKnowledgeExtractionWorkflow({
|
||||
defaultAdapter,
|
||||
adapters,
|
||||
extract,
|
||||
}: CreateKnowledgeExtractionDeps): WorkflowDefinition<WorkflowMeta> {
|
||||
const a = (role: keyof WorkflowMeta) => adapters?.[role] ?? defaultAdapter;
|
||||
const llmAdapter = createLlmAdapter(extract.provider);
|
||||
return {
|
||||
name: "extract-knowledge",
|
||||
roles: {
|
||||
questioner: createQuestionerRole(adapters?.questioner ?? llmAdapter, { extract }),
|
||||
answerer: createAnswererRole(adapters?.answerer ?? llmAdapter, { extract }),
|
||||
explorer: createExplorerRole(a("explorer"), { extract }),
|
||||
},
|
||||
moderator,
|
||||
};
|
||||
}
|
||||
@ -1,30 +0,0 @@
|
||||
import { join } from "node:path";
|
||||
import { createCursorAdapter } from "@uncaged/nerve-adapter-cursor";
|
||||
import { hermesAdapter } from "@uncaged/nerve-adapter-hermes";
|
||||
import { createKnowledgeExtractionWorkflow } from "./build.js";
|
||||
import { resolveDashScopeProvider } from "../solve-issue/lib/provider.js";
|
||||
|
||||
const HOME = process.env.HOME ?? "/home/azureuser";
|
||||
const NERVE_ROOT = join(HOME, ".uncaged-nerve");
|
||||
|
||||
const provider = await resolveDashScopeProvider(NERVE_ROOT);
|
||||
|
||||
if (provider === null) {
|
||||
throw new Error("Set DASHSCOPE_API_KEY and DASHSCOPE_BASE_URL (or cfg get equivalents)");
|
||||
}
|
||||
|
||||
const CURSOR_TIMEOUT_MS = 300_000;
|
||||
|
||||
const workflow = createKnowledgeExtractionWorkflow({
|
||||
defaultAdapter: hermesAdapter,
|
||||
adapters: {
|
||||
explorer: createCursorAdapter({
|
||||
type: "cursor",
|
||||
model: "claude-sonnet-4",
|
||||
timeout: CURSOR_TIMEOUT_MS,
|
||||
}),
|
||||
},
|
||||
extract: { provider },
|
||||
});
|
||||
|
||||
export default workflow;
|
||||
@ -1,74 +0,0 @@
|
||||
import type { Dirent } from "node:fs";
|
||||
import { readdir } from "node:fs/promises";
|
||||
import { join } from "node:path";
|
||||
|
||||
import type { StartStep, WorkflowMessage } from "@uncaged/nerve-core";
|
||||
|
||||
import type { ExplorerMeta } from "../roles/explorer.js";
|
||||
import type { QuestionerMeta } from "../roles/questioner.js";
|
||||
|
||||
async function walkMarkdownFiles(rootDir: string, base: string): Promise<string[]> {
|
||||
const out: string[] = [];
|
||||
let entries: Dirent[];
|
||||
try {
|
||||
entries = (await readdir(rootDir, { withFileTypes: true })) as Dirent[];
|
||||
} catch {
|
||||
return out;
|
||||
}
|
||||
for (const e of entries) {
|
||||
const name = e.name;
|
||||
const rel = base ? `${base}/${name}` : name;
|
||||
const full = join(rootDir, name);
|
||||
if (e.isDirectory()) {
|
||||
out.push(...(await walkMarkdownFiles(full, rel)));
|
||||
} else if (e.isFile() && name.endsWith(".md")) {
|
||||
out.push(rel.replace(/\\/g, "/"));
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
/** Enumerate all markdown files under `.knowledge/` as repo-relative paths; seed line first if present. */
|
||||
export async function bootstrapKnowledgeQueue(cwd: string, startContent: string): Promise<string[]> {
|
||||
const knowledgeDir = join(cwd, ".knowledge");
|
||||
const relFiles = await walkMarkdownFiles(knowledgeDir, "");
|
||||
const paths = relFiles.map((f) => `.knowledge/${f}`);
|
||||
const seed = startContent.trim().split(/\r?\n/u)[0]?.trim() ?? "";
|
||||
if (paths.length === 0 && seed.length > 0) {
|
||||
return [seed];
|
||||
}
|
||||
if (seed.length > 0 && paths.includes(seed)) {
|
||||
return [seed, ...paths.filter((p) => p !== seed)];
|
||||
}
|
||||
if (seed.length > 0 && !paths.includes(seed)) {
|
||||
return [seed, ...paths];
|
||||
}
|
||||
return [...paths].sort();
|
||||
}
|
||||
|
||||
function lastIndexOfRole(messages: WorkflowMessage[], role: string): number {
|
||||
for (let i = messages.length - 1; i >= 0; i--) {
|
||||
if (messages[i].role === role) return i;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
/** Next queue for questioner: bootstrap, or continue after answerer / explorer. */
|
||||
export async function resolveQueueForQuestioner(
|
||||
start: StartStep,
|
||||
messages: WorkflowMessage[],
|
||||
cwd: string,
|
||||
): Promise<string[]> {
|
||||
const lastQi = lastIndexOfRole(messages, "questioner");
|
||||
if (lastQi === -1) {
|
||||
return bootstrapKnowledgeQueue(cwd, start.content);
|
||||
}
|
||||
const qMeta = messages[lastQi].meta as QuestionerMeta;
|
||||
const tail = messages.slice(lastQi + 1);
|
||||
const explorerMsg = tail.find((m) => m.role === "explorer");
|
||||
if (explorerMsg) {
|
||||
const eMeta = explorerMsg.meta as ExplorerMeta;
|
||||
return [...qMeta.remaining_queue, ...eMeta.new_cards];
|
||||
}
|
||||
return qMeta.remaining_queue;
|
||||
}
|
||||
@ -1,21 +0,0 @@
|
||||
import type { StartStep } from "@uncaged/nerve-core";
|
||||
|
||||
type StartMetaWithWorkdir = StartStep["meta"] & { workdir?: string | null };
|
||||
|
||||
/**
|
||||
* Resolve the target repo working directory.
|
||||
* Priority: start.meta.workdir → prompt second line (if absolute path) → cwd.
|
||||
*/
|
||||
export function resolveWorkdir(start: StartStep): string {
|
||||
const m = start.meta as StartMetaWithWorkdir;
|
||||
if (m.workdir) return m.workdir;
|
||||
|
||||
// Allow prompt to carry workdir on the second line: "seed\n/abs/path"
|
||||
const lines = start.content.split(/\r?\n/);
|
||||
if (lines.length >= 2) {
|
||||
const candidate = lines[1]!.trim();
|
||||
if (candidate.startsWith("/")) return candidate;
|
||||
}
|
||||
|
||||
return process.cwd();
|
||||
}
|
||||
@ -1,84 +0,0 @@
|
||||
import { END } from "@uncaged/nerve-core";
|
||||
import type { Moderator, ThreadContext } from "@uncaged/nerve-core";
|
||||
|
||||
import type { AnswererMeta } from "./roles/answerer.js";
|
||||
import type { ExplorerMeta } from "./roles/explorer.js";
|
||||
import type { QuestionerMeta } from "./roles/questioner.js";
|
||||
|
||||
export type WorkflowMeta = {
|
||||
questioner: QuestionerMeta;
|
||||
answerer: AnswererMeta;
|
||||
explorer: ExplorerMeta;
|
||||
};
|
||||
|
||||
type Steps = ThreadContext<WorkflowMeta>["steps"];
|
||||
|
||||
function lastQuestionerRemaining(steps: Steps): QuestionerMeta | undefined {
|
||||
for (let i = steps.length - 1; i >= 0; i--) {
|
||||
const s = steps[i];
|
||||
if (s.role === "questioner") return s.meta;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/** End when the last two explorer invocations both added no new cards (issue #266 stagnation rule). */
|
||||
function lastTwoExplorerRunsBothEmpty(steps: Steps): boolean {
|
||||
const explorerSteps = steps.filter((s) => s.role === "explorer");
|
||||
if (explorerSteps.length < 2) return false;
|
||||
const e1 = explorerSteps[explorerSteps.length - 1].meta as ExplorerMeta;
|
||||
const e2 = explorerSteps[explorerSteps.length - 2].meta as ExplorerMeta;
|
||||
return e1.new_cards.length === 0 && e2.new_cards.length === 0;
|
||||
}
|
||||
|
||||
function queueAfterSkippedExplorer(steps: Steps): string[] {
|
||||
const q = lastQuestionerRemaining(steps);
|
||||
return q?.remaining_queue ?? [];
|
||||
}
|
||||
|
||||
function queueAfterExplorerStep(steps: Steps): string[] {
|
||||
const last = steps[steps.length - 1];
|
||||
if (!last || last.role !== "explorer") return [];
|
||||
const q = lastQuestionerRemaining(steps);
|
||||
if (!q) return [];
|
||||
const e = last.meta as ExplorerMeta;
|
||||
return [...q.remaining_queue, ...e.new_cards];
|
||||
}
|
||||
|
||||
export const moderator: Moderator<WorkflowMeta> = (context) => {
|
||||
const { steps } = context;
|
||||
|
||||
if (steps.length === 0) {
|
||||
return "questioner";
|
||||
}
|
||||
|
||||
const last = steps[steps.length - 1];
|
||||
|
||||
if (last.role === "questioner") {
|
||||
return "answerer";
|
||||
}
|
||||
|
||||
if (last.role === "answerer") {
|
||||
const am = last.meta as AnswererMeta;
|
||||
if (am.has_unanswered) {
|
||||
return "explorer";
|
||||
}
|
||||
const q = queueAfterSkippedExplorer(steps);
|
||||
if (q.length === 0) {
|
||||
return END;
|
||||
}
|
||||
return "questioner";
|
||||
}
|
||||
|
||||
if (last.role === "explorer") {
|
||||
if (lastTwoExplorerRunsBothEmpty(steps)) {
|
||||
return END;
|
||||
}
|
||||
const q = queueAfterExplorerStep(steps);
|
||||
if (q.length === 0) {
|
||||
return END;
|
||||
}
|
||||
return "questioner";
|
||||
}
|
||||
|
||||
return END;
|
||||
};
|
||||
@ -1,102 +0,0 @@
|
||||
import type { AgentFn, Role, ThreadContext, WorkflowMessage } from "@uncaged/nerve-core";
|
||||
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||
import { createRole, nerveCommandEnv, spawnSafe } from "@uncaged/nerve-workflow-utils";
|
||||
import { z } from "zod";
|
||||
|
||||
import { resolveWorkdir } from "../lib/workdir.js";
|
||||
|
||||
import type { QuestionerMeta } from "./questioner.js";
|
||||
|
||||
export const answererMetaSchema = z.object({
|
||||
results: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
found: z.boolean(),
|
||||
source: z.string(),
|
||||
note: z.string(),
|
||||
}),
|
||||
),
|
||||
has_unanswered: z.boolean(),
|
||||
});
|
||||
|
||||
export type AnswererMeta = z.infer<typeof answererMetaSchema>;
|
||||
|
||||
export type CreateAnswererRoleDeps = {
|
||||
extract: LlmExtractorConfig;
|
||||
};
|
||||
|
||||
function lastQuestionerMeta(messages: WorkflowMessage[]): QuestionerMeta | undefined {
|
||||
for (let i = messages.length - 1; i >= 0; i--) {
|
||||
if (messages[i].role === "questioner") {
|
||||
return messages[i].meta as QuestionerMeta;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export async function answererPrompt(ctx: ThreadContext): Promise<string> {
|
||||
const messages = ctx.steps as unknown as WorkflowMessage[];
|
||||
const cwd = resolveWorkdir(ctx.start);
|
||||
const qm = lastQuestionerMeta(messages);
|
||||
if (!qm || qm.questions.length === 0) {
|
||||
throw new Error("answerer: prompt invoked without questioner questions — wrapped role should short-circuit");
|
||||
}
|
||||
|
||||
const blocks: string[] = [];
|
||||
for (const q of qm.questions) {
|
||||
if ((ctx.start.meta as Record<string, unknown>).dryRun) {
|
||||
blocks.push(`### ${q.id}\n[dryRun] skipped nerve knowledge query\n`);
|
||||
continue;
|
||||
}
|
||||
const res = await spawnSafe(
|
||||
"nerve",
|
||||
["knowledge", "query", q.question],
|
||||
{
|
||||
cwd,
|
||||
env: nerveCommandEnv(),
|
||||
timeoutMs: 120_000,
|
||||
dryRun: false,
|
||||
abortSignal: null,
|
||||
},
|
||||
);
|
||||
if (res.ok) {
|
||||
blocks.push(`### ${q.id} (${q.domain})\nQuestion: ${q.question}\n---\n${res.value.stdout}\n`);
|
||||
} else {
|
||||
const err = res.error;
|
||||
const detail =
|
||||
err.kind === "non_zero_exit"
|
||||
? `exit ${err.exitCode}\n${err.stderr}`
|
||||
: err.kind === "timeout"
|
||||
? `timeout\n${err.stderr}`
|
||||
: err.kind === "spawn_failed"
|
||||
? err.message
|
||||
: "aborted";
|
||||
blocks.push(`### ${q.id}\nnerve knowledge query failed: ${detail}\n`);
|
||||
}
|
||||
}
|
||||
|
||||
return [
|
||||
"You are the **answerer**. You MUST NOT read repository source code — only the CLI retrieval excerpts below.",
|
||||
"For each question id, decide whether the knowledge base already answers it.",
|
||||
"Set found=true only when the excerpt supports a confident answer; otherwise found=false.",
|
||||
"Set has_unanswered=true if any question remains unanswered by the knowledge base.",
|
||||
"",
|
||||
...blocks,
|
||||
].join("\n");
|
||||
}
|
||||
|
||||
export function createAnswererRole(adapter: AgentFn, { extract }: CreateAnswererRoleDeps): Role<AnswererMeta> {
|
||||
const inner = createRole(adapter, answererPrompt, answererMetaSchema, extract);
|
||||
|
||||
return async (ctx: ThreadContext) => {
|
||||
const messages = ctx.steps as unknown as WorkflowMessage[];
|
||||
const qm = lastQuestionerMeta(messages);
|
||||
if (!qm || qm.questions.length === 0) {
|
||||
return {
|
||||
content: "answerer: no questions from questioner; skipping CLI lookup.",
|
||||
meta: { results: [], has_unanswered: false },
|
||||
};
|
||||
}
|
||||
return inner(ctx);
|
||||
};
|
||||
}
|
||||
@ -1,93 +0,0 @@
|
||||
import type { AgentFn, Role, ThreadContext, WorkflowMessage } from "@uncaged/nerve-core";
|
||||
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||
import { z } from "zod";
|
||||
|
||||
import { resolveWorkdir } from "../lib/workdir.js";
|
||||
|
||||
import type { AnswererMeta } from "./answerer.js";
|
||||
import type { QuestionerMeta } from "./questioner.js";
|
||||
|
||||
export const explorerMetaSchema = z.object({
|
||||
patches: z.array(
|
||||
z.object({
|
||||
card: z.string(),
|
||||
section: z.string(),
|
||||
}),
|
||||
),
|
||||
new_cards: z.array(z.string()),
|
||||
});
|
||||
|
||||
export type ExplorerMeta = z.infer<typeof explorerMetaSchema>;
|
||||
|
||||
export type CreateExplorerRoleDeps = {
|
||||
extract: LlmExtractorConfig;
|
||||
};
|
||||
|
||||
function lastMeta<M>(messages: WorkflowMessage[], role: string): M | undefined {
|
||||
for (let i = messages.length - 1; i >= 0; i--) {
|
||||
if (messages[i].role === role) {
|
||||
return messages[i].meta as M;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export function explorerPrompt(ctx: ThreadContext): string {
|
||||
const messages = ctx.steps as unknown as WorkflowMessage[];
|
||||
const threadId = ctx.start.meta.threadId;
|
||||
const qm = lastMeta<QuestionerMeta>(messages, "questioner");
|
||||
const am = lastMeta<AnswererMeta>(messages, "answerer");
|
||||
const cwd = resolveWorkdir(ctx.start);
|
||||
|
||||
const unanswered =
|
||||
am?.results.filter((r) => !r.found).map((r) => r.id) ?? [];
|
||||
|
||||
return `You are the **explorer** in an extract-knowledge workflow.
|
||||
|
||||
## Context
|
||||
|
||||
- Thread: \`nerve thread ${threadId}\`
|
||||
- Working directory (repo root for paths): ${cwd}
|
||||
- Current knowledge card (questioner): ${qm?.card ?? "(unknown)"}
|
||||
|
||||
## Unanswered question ids
|
||||
|
||||
${JSON.stringify(unanswered)}
|
||||
|
||||
Use the prior answerer results in the thread to map ids to full question text when you read messages above.
|
||||
|
||||
## Task
|
||||
|
||||
For each unanswered question, **read the codebase** as needed, then either:
|
||||
|
||||
- Add a new markdown file under \`.knowledge/\`, or
|
||||
- Patch an existing card (prefer updating the card listed above when appropriate).
|
||||
|
||||
After any write or patch to \`.knowledge\`, run:
|
||||
|
||||
\`\`\`bash
|
||||
nerve knowledge sync
|
||||
\`\`\`
|
||||
|
||||
from this repo root (${cwd}), and fix failures until sync succeeds.
|
||||
|
||||
## Output meta
|
||||
|
||||
Report \`patches\` as { card, section } entries for cards you edited (section is a short heading or path hint).
|
||||
Report \`new_cards\` as repo-relative paths for brand-new files you created (e.g. \`.knowledge/new-topic.md\`).
|
||||
|
||||
Do not claim work you did not perform.`;
|
||||
}
|
||||
|
||||
export function createExplorerRole(
|
||||
adapter: AgentFn,
|
||||
{ extract }: CreateExplorerRoleDeps,
|
||||
): Role<ExplorerMeta> {
|
||||
return createRole(
|
||||
adapter,
|
||||
async (ctx: ThreadContext) => explorerPrompt(ctx),
|
||||
explorerMetaSchema,
|
||||
extract,
|
||||
);
|
||||
}
|
||||
@ -1,108 +0,0 @@
|
||||
import { readFile } from "node:fs/promises";
|
||||
import { join } from "node:path";
|
||||
|
||||
import type { AgentFn, Role, ThreadContext, WorkflowMessage } from "@uncaged/nerve-core";
|
||||
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||
import { z } from "zod";
|
||||
|
||||
import { resolveQueueForQuestioner } from "../lib/knowledge-queue.js";
|
||||
import { resolveWorkdir } from "../lib/workdir.js";
|
||||
|
||||
const questionerExtractSchema = z.object({
|
||||
questions: z
|
||||
.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
question: z.string(),
|
||||
domain: z.string(),
|
||||
}),
|
||||
)
|
||||
.length(5),
|
||||
});
|
||||
|
||||
export type QuestionerMeta = {
|
||||
/** Empty when no .knowledge cards and no work to do. */
|
||||
card: string;
|
||||
questions: { id: string; question: string; domain: string }[];
|
||||
remaining_queue: string[];
|
||||
};
|
||||
|
||||
export type CreateQuestionerRoleDeps = {
|
||||
extract: LlmExtractorConfig;
|
||||
};
|
||||
|
||||
function questionerSystem(): string {
|
||||
return `You are the **questioner** in an extract-knowledge workflow.
|
||||
|
||||
Read the given markdown knowledge card. Propose exactly **five** technical questions that are **not** already answered or covered by that card.
|
||||
|
||||
Rules:
|
||||
- Questions must be concrete and technical.
|
||||
- Each question needs a stable string id (e.g. q1, q2, q3, q4, q5), a short domain label (e.g. routing, storage), and the question text.
|
||||
- Do not assume access to other files or tools — reason only from the card content shown.`;
|
||||
}
|
||||
|
||||
function questionerUser(card: string, cardBody: string, remainingHint: string[]): string {
|
||||
return `Current card path: ${card}
|
||||
|
||||
Remaining queue after this card (paths, may be empty): ${JSON.stringify(remainingHint)}
|
||||
|
||||
--- Card content ---
|
||||
|
||||
${cardBody}`;
|
||||
}
|
||||
|
||||
export async function questionerPrompt(ctx: ThreadContext): Promise<string> {
|
||||
const messages = ctx.steps as unknown as WorkflowMessage[];
|
||||
const cwd = resolveWorkdir(ctx.start);
|
||||
const queue = await resolveQueueForQuestioner(ctx.start, messages, cwd);
|
||||
if (queue.length === 0) {
|
||||
throw new Error(
|
||||
"questioner: prompt invoked with empty queue — wrapped role should short-circuit before LLM",
|
||||
);
|
||||
}
|
||||
const card = queue[0]!;
|
||||
const remaining_queue = queue.slice(1);
|
||||
let cardBody: string;
|
||||
try {
|
||||
cardBody = await readFile(join(cwd, card), "utf8");
|
||||
} catch (e) {
|
||||
const msg = e instanceof Error ? e.message : String(e);
|
||||
throw new Error(`questioner: failed to read ${card}: ${msg}`);
|
||||
}
|
||||
return `${questionerSystem()}\n\n${questionerUser(card, cardBody, remaining_queue)}`;
|
||||
}
|
||||
|
||||
export function createQuestionerRole(adapter: AgentFn, { extract }: CreateQuestionerRoleDeps): Role<QuestionerMeta> {
|
||||
const inner = createRole(adapter, questionerPrompt, questionerExtractSchema, extract);
|
||||
|
||||
return async (ctx: ThreadContext) => {
|
||||
const messages = ctx.steps as unknown as WorkflowMessage[];
|
||||
const cwd = resolveWorkdir(ctx.start);
|
||||
const queue = await resolveQueueForQuestioner(ctx.start, messages, cwd);
|
||||
if (queue.length === 0) {
|
||||
return {
|
||||
content:
|
||||
"questioner: no `.knowledge` markdown files found and no seed path in the trigger prompt; queue is empty.",
|
||||
meta: {
|
||||
card: "",
|
||||
questions: [],
|
||||
remaining_queue: [],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const card = queue[0]!;
|
||||
const remaining_queue = queue.slice(1);
|
||||
const r = await inner(ctx);
|
||||
return {
|
||||
content: r.content,
|
||||
meta: {
|
||||
card,
|
||||
questions: r.meta.questions,
|
||||
remaining_queue,
|
||||
},
|
||||
};
|
||||
};
|
||||
}
|
||||
1069
workflows/gitea-issue-solver/index.ts
Normal file
1069
workflows/gitea-issue-solver/index.ts
Normal file
File diff suppressed because it is too large
Load Diff
22
workflows/gitea-issue-solver/package.json
Normal file
22
workflows/gitea-issue-solver/package.json
Normal file
@ -0,0 +1,22 @@
|
||||
{
|
||||
"name": "gitea-issue-solver-workflow",
|
||||
"version": "0.0.1",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"dependencies": {
|
||||
"@uncaged/nerve-core": "latest",
|
||||
"@uncaged/nerve-workflow-utils": "latest",
|
||||
"zod": "^4.3.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.0.0",
|
||||
"typescript": "^5.7.0"
|
||||
},
|
||||
"pnpm": {
|
||||
"overrides": {
|
||||
"@uncaged/nerve-daemon": "link:../../../repos/nerve/packages/daemon",
|
||||
"@uncaged/nerve-core": "link:../../../repos/nerve/packages/core",
|
||||
"@uncaged/nerve-workflow-utils": "link:../../../repos/nerve/packages/workflow-utils"
|
||||
}
|
||||
}
|
||||
}
|
||||
59
workflows/gitea-issue-solver/pnpm-lock.yaml
generated
Normal file
59
workflows/gitea-issue-solver/pnpm-lock.yaml
generated
Normal file
@ -0,0 +1,59 @@
|
||||
lockfileVersion: '9.0'
|
||||
|
||||
settings:
|
||||
autoInstallPeers: true
|
||||
excludeLinksFromLockfile: false
|
||||
|
||||
overrides:
|
||||
'@uncaged/nerve-daemon': link:../../../repos/nerve/packages/daemon
|
||||
'@uncaged/nerve-core': link:../../../repos/nerve/packages/core
|
||||
'@uncaged/nerve-workflow-utils': link:../../../repos/nerve/packages/workflow-utils
|
||||
|
||||
importers:
|
||||
|
||||
.:
|
||||
dependencies:
|
||||
'@uncaged/nerve-core':
|
||||
specifier: link:../../../repos/nerve/packages/core
|
||||
version: link:../../../repos/nerve/packages/core
|
||||
'@uncaged/nerve-workflow-utils':
|
||||
specifier: link:../../../repos/nerve/packages/workflow-utils
|
||||
version: link:../../../repos/nerve/packages/workflow-utils
|
||||
zod:
|
||||
specifier: ^4.3.6
|
||||
version: 4.3.6
|
||||
devDependencies:
|
||||
'@types/node':
|
||||
specifier: ^22.0.0
|
||||
version: 22.19.17
|
||||
typescript:
|
||||
specifier: ^5.7.0
|
||||
version: 5.9.3
|
||||
|
||||
packages:
|
||||
|
||||
'@types/node@22.19.17':
|
||||
resolution: {integrity: sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==}
|
||||
|
||||
typescript@5.9.3:
|
||||
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
|
||||
engines: {node: '>=14.17'}
|
||||
hasBin: true
|
||||
|
||||
undici-types@6.21.0:
|
||||
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
|
||||
|
||||
zod@4.3.6:
|
||||
resolution: {integrity: sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==}
|
||||
|
||||
snapshots:
|
||||
|
||||
'@types/node@22.19.17':
|
||||
dependencies:
|
||||
undici-types: 6.21.0
|
||||
|
||||
typescript@5.9.3: {}
|
||||
|
||||
undici-types@6.21.0: {}
|
||||
|
||||
zod@4.3.6: {}
|
||||
@ -7,13 +7,7 @@
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"noEmit": true,
|
||||
"allowImportingTsExtensions": true,
|
||||
"types": ["node"]
|
||||
},
|
||||
"include": [
|
||||
"senses/**/*.ts",
|
||||
"workflows/**/*.ts",
|
||||
"scripts/**/*.ts",
|
||||
"workflows/_shared/**/*.ts"
|
||||
]
|
||||
"include": ["./**/*.ts"]
|
||||
}
|
||||
86
workflows/hello-world/index.ts
Normal file
86
workflows/hello-world/index.ts
Normal file
@ -0,0 +1,86 @@
|
||||
import type {
|
||||
ModeratorContext,
|
||||
RoleResult,
|
||||
StartStep,
|
||||
WorkflowDefinition,
|
||||
WorkflowMessage,
|
||||
} from "@uncaged/nerve-core";
|
||||
import { END } from "@uncaged/nerve-core";
|
||||
|
||||
type WorkflowMeta = {
|
||||
greeter: {
|
||||
name: string;
|
||||
error: string | null;
|
||||
};
|
||||
};
|
||||
|
||||
const DEFAULT_NAME = "friend";
|
||||
|
||||
function resolveNameFromContent(content: string): { name: string; error: string | null } {
|
||||
const trimmed = content.trim();
|
||||
if (trimmed === "") {
|
||||
return { name: DEFAULT_NAME, error: "empty_input" };
|
||||
}
|
||||
|
||||
let jsonParsed: unknown;
|
||||
let parseOk: boolean;
|
||||
try {
|
||||
jsonParsed = JSON.parse(trimmed);
|
||||
parseOk = true;
|
||||
} catch {
|
||||
parseOk = false;
|
||||
}
|
||||
|
||||
if (parseOk) {
|
||||
if (jsonParsed !== null && typeof jsonParsed === "object" && !Array.isArray(jsonParsed)) {
|
||||
const nameField = (jsonParsed as Record<string, unknown>).name;
|
||||
if (typeof nameField === "string") {
|
||||
const n = nameField.trim();
|
||||
if (n !== "") {
|
||||
return { name: n, error: null };
|
||||
}
|
||||
return { name: DEFAULT_NAME, error: "name_empty" };
|
||||
}
|
||||
return { name: DEFAULT_NAME, error: "missing_name" };
|
||||
}
|
||||
return { name: DEFAULT_NAME, error: "invalid_json_shape" };
|
||||
}
|
||||
|
||||
return { name: trimmed, error: null };
|
||||
}
|
||||
|
||||
async function greeter(
|
||||
start: StartStep,
|
||||
_messages: WorkflowMessage[],
|
||||
): Promise<RoleResult<WorkflowMeta["greeter"]>> {
|
||||
try {
|
||||
const { name, error } = resolveNameFromContent(start.content);
|
||||
return {
|
||||
content: `Hello, ${name}!`,
|
||||
meta: { name, error },
|
||||
};
|
||||
} catch (unhandled) {
|
||||
const msg = unhandled instanceof Error ? unhandled.message : String(unhandled);
|
||||
return {
|
||||
content: `Hello, ${DEFAULT_NAME}!`,
|
||||
meta: { name: DEFAULT_NAME, error: `internal_error: ${msg}` },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const workflow: WorkflowDefinition<WorkflowMeta> = {
|
||||
name: "hello-world",
|
||||
roles: { greeter },
|
||||
moderator(context: ModeratorContext<WorkflowMeta>) {
|
||||
if (context.steps.length === 0) {
|
||||
return "greeter";
|
||||
}
|
||||
const last = context.steps[context.steps.length - 1];
|
||||
if (last.role === "greeter") {
|
||||
return END;
|
||||
}
|
||||
return END;
|
||||
},
|
||||
};
|
||||
|
||||
export default workflow;
|
||||
21
workflows/hello-world/package.json
Normal file
21
workflows/hello-world/package.json
Normal file
@ -0,0 +1,21 @@
|
||||
{
|
||||
"name": "hello-world-workflow",
|
||||
"version": "0.0.1",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"dependencies": {
|
||||
"@uncaged/nerve-core": "latest",
|
||||
"@uncaged/nerve-workflow-utils": "latest"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.0.0",
|
||||
"typescript": "^5.7.0"
|
||||
},
|
||||
"pnpm": {
|
||||
"overrides": {
|
||||
"@uncaged/nerve-daemon": "link:../../../repos/nerve/packages/daemon",
|
||||
"@uncaged/nerve-core": "link:../../../repos/nerve/packages/core",
|
||||
"@uncaged/nerve-workflow-utils": "link:../../../repos/nerve/packages/workflow-utils"
|
||||
}
|
||||
}
|
||||
}
|
||||
51
workflows/hello-world/pnpm-lock.yaml
generated
Normal file
51
workflows/hello-world/pnpm-lock.yaml
generated
Normal file
@ -0,0 +1,51 @@
|
||||
lockfileVersion: '9.0'
|
||||
|
||||
settings:
|
||||
autoInstallPeers: true
|
||||
excludeLinksFromLockfile: false
|
||||
|
||||
overrides:
|
||||
'@uncaged/nerve-daemon': link:../../../repos/nerve/packages/daemon
|
||||
'@uncaged/nerve-core': link:../../../repos/nerve/packages/core
|
||||
'@uncaged/nerve-workflow-utils': link:../../../repos/nerve/packages/workflow-utils
|
||||
|
||||
importers:
|
||||
|
||||
.:
|
||||
dependencies:
|
||||
'@uncaged/nerve-core':
|
||||
specifier: link:../../../repos/nerve/packages/core
|
||||
version: link:../../../repos/nerve/packages/core
|
||||
'@uncaged/nerve-workflow-utils':
|
||||
specifier: link:../../../repos/nerve/packages/workflow-utils
|
||||
version: link:../../../repos/nerve/packages/workflow-utils
|
||||
devDependencies:
|
||||
'@types/node':
|
||||
specifier: ^22.0.0
|
||||
version: 22.19.17
|
||||
typescript:
|
||||
specifier: ^5.7.0
|
||||
version: 5.9.3
|
||||
|
||||
packages:
|
||||
|
||||
'@types/node@22.19.17':
|
||||
resolution: {integrity: sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==}
|
||||
|
||||
typescript@5.9.3:
|
||||
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
|
||||
engines: {node: '>=14.17'}
|
||||
hasBin: true
|
||||
|
||||
undici-types@6.21.0:
|
||||
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
|
||||
|
||||
snapshots:
|
||||
|
||||
'@types/node@22.19.17':
|
||||
dependencies:
|
||||
undici-types: 6.21.0
|
||||
|
||||
typescript@5.9.3: {}
|
||||
|
||||
undici-types@6.21.0: {}
|
||||
13
workflows/hello-world/tsconfig.json
Normal file
13
workflows/hello-world/tsconfig.json
Normal file
@ -0,0 +1,13 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"lib": ["ES2022"],
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"noEmit": true,
|
||||
"types": ["node"]
|
||||
},
|
||||
"include": ["./**/*.ts"]
|
||||
}
|
||||
1263
workflows/pr-code-reviewer/index.ts
Normal file
1263
workflows/pr-code-reviewer/index.ts
Normal file
File diff suppressed because it is too large
Load Diff
22
workflows/pr-code-reviewer/package.json
Normal file
22
workflows/pr-code-reviewer/package.json
Normal file
@ -0,0 +1,22 @@
|
||||
{
|
||||
"name": "pr-code-reviewer-workflow",
|
||||
"version": "0.0.1",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"dependencies": {
|
||||
"@uncaged/nerve-core": "latest",
|
||||
"@uncaged/nerve-workflow-utils": "latest",
|
||||
"zod": "^4.3.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.0.0",
|
||||
"typescript": "^5.7.0"
|
||||
},
|
||||
"pnpm": {
|
||||
"overrides": {
|
||||
"@uncaged/nerve-daemon": "link:../../../repos/nerve/packages/daemon",
|
||||
"@uncaged/nerve-core": "link:../../../repos/nerve/packages/core",
|
||||
"@uncaged/nerve-workflow-utils": "link:../../../repos/nerve/packages/workflow-utils"
|
||||
}
|
||||
}
|
||||
}
|
||||
59
workflows/pr-code-reviewer/pnpm-lock.yaml
generated
Normal file
59
workflows/pr-code-reviewer/pnpm-lock.yaml
generated
Normal file
@ -0,0 +1,59 @@
|
||||
lockfileVersion: '9.0'
|
||||
|
||||
settings:
|
||||
autoInstallPeers: true
|
||||
excludeLinksFromLockfile: false
|
||||
|
||||
overrides:
|
||||
'@uncaged/nerve-daemon': link:../../../repos/nerve/packages/daemon
|
||||
'@uncaged/nerve-core': link:../../../repos/nerve/packages/core
|
||||
'@uncaged/nerve-workflow-utils': link:../../../repos/nerve/packages/workflow-utils
|
||||
|
||||
importers:
|
||||
|
||||
.:
|
||||
dependencies:
|
||||
'@uncaged/nerve-core':
|
||||
specifier: link:../../../repos/nerve/packages/core
|
||||
version: link:../../../repos/nerve/packages/core
|
||||
'@uncaged/nerve-workflow-utils':
|
||||
specifier: link:../../../repos/nerve/packages/workflow-utils
|
||||
version: link:../../../repos/nerve/packages/workflow-utils
|
||||
zod:
|
||||
specifier: ^4.3.6
|
||||
version: 4.3.6
|
||||
devDependencies:
|
||||
'@types/node':
|
||||
specifier: ^22.0.0
|
||||
version: 22.19.17
|
||||
typescript:
|
||||
specifier: ^5.7.0
|
||||
version: 5.9.3
|
||||
|
||||
packages:
|
||||
|
||||
'@types/node@22.19.17':
|
||||
resolution: {integrity: sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==}
|
||||
|
||||
typescript@5.9.3:
|
||||
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
|
||||
engines: {node: '>=14.17'}
|
||||
hasBin: true
|
||||
|
||||
undici-types@6.21.0:
|
||||
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
|
||||
|
||||
zod@4.3.6:
|
||||
resolution: {integrity: sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==}
|
||||
|
||||
snapshots:
|
||||
|
||||
'@types/node@22.19.17':
|
||||
dependencies:
|
||||
undici-types: 6.21.0
|
||||
|
||||
typescript@5.9.3: {}
|
||||
|
||||
undici-types@6.21.0: {}
|
||||
|
||||
zod@4.3.6: {}
|
||||
13
workflows/pr-code-reviewer/tsconfig.json
Normal file
13
workflows/pr-code-reviewer/tsconfig.json
Normal file
@ -0,0 +1,13 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"lib": ["ES2022"],
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"noEmit": true,
|
||||
"types": ["node"]
|
||||
},
|
||||
"include": ["./**/*.ts"]
|
||||
}
|
||||
575
workflows/pr-summarizer/index.ts
Normal file
575
workflows/pr-summarizer/index.ts
Normal file
@ -0,0 +1,575 @@
|
||||
/**
|
||||
* PR 摘要工作流:从 Gitea 拉取 PR 与 diff,可选 LLM 分析后输出中文 Markdown 总结。
|
||||
* 宿主需在 nerve.yaml 中注册 workflows.pr-summarizer;触发示例:
|
||||
* nerve workflow trigger pr-summarizer --payload '{"prompt":"<PR URL 或 JSON>"}'
|
||||
* Sense 可返回 workflow: `pr-summarizer|50|<prompt>`(见 parseSenseWorkflowDirective)。
|
||||
*/
|
||||
import type {
|
||||
ModeratorContext,
|
||||
RoleResult,
|
||||
StartStep,
|
||||
WorkflowDefinition,
|
||||
WorkflowMessage,
|
||||
} from "@uncaged/nerve-core";
|
||||
import { END } from "@uncaged/nerve-core";
|
||||
import {
|
||||
isDryRun,
|
||||
llmExtract,
|
||||
nerveAgentContext,
|
||||
readNerveYaml,
|
||||
spawnSafe,
|
||||
} from "@uncaged/nerve-workflow-utils";
|
||||
import { join } from "node:path";
|
||||
import { z } from "zod";
|
||||
|
||||
const HOME = process.env.HOME ?? "/home/azureuser";
|
||||
const NERVE_ROOT = join(HOME, ".uncaged-nerve");
|
||||
|
||||
/** unified diff 写入 meta 前的最大字符数(超出则截断并在 content 中说明) */
|
||||
const DIFF_TEXT_MAX_CHARS = 1_500_000;
|
||||
/** 送给分析模型的 diff 前缀长度上限 */
|
||||
const DIFF_LLM_MAX_CHARS = 100_000;
|
||||
|
||||
type PrSummarizerMeta = {
|
||||
fetcher: {
|
||||
prUrl: string | null;
|
||||
owner: string | null;
|
||||
repo: string | null;
|
||||
prIndex: number | null;
|
||||
giteaBaseUrl: string | null;
|
||||
title: string | null;
|
||||
state: string | null;
|
||||
diffText: string | null;
|
||||
diffByteLength: number | null;
|
||||
httpStatus: number | null;
|
||||
errorMessage: string | null;
|
||||
};
|
||||
analyzer: {
|
||||
analysisMarkdown: string | null;
|
||||
providerModel: string | null;
|
||||
errorMessage: string | null;
|
||||
};
|
||||
writer: {
|
||||
summaryZhMarkdown: string | null;
|
||||
errorMessage: string | null;
|
||||
};
|
||||
};
|
||||
|
||||
const jsonPromptSchema = z.object({
|
||||
prUrl: z.string().nullish(),
|
||||
owner: z.string().nullish(),
|
||||
repo: z.string().nullish(),
|
||||
index: z.number().int().positive().nullish(),
|
||||
baseUrl: z.string().nullish(),
|
||||
});
|
||||
|
||||
const analysisExtractSchema = z
|
||||
.object({
|
||||
analysisMarkdown: z.string().describe("Technical PR analysis in Markdown (can be English)."),
|
||||
})
|
||||
.describe("Structured PR analysis from the diff.");
|
||||
|
||||
const summaryExtractSchema = z
|
||||
.object({
|
||||
summaryZhMarkdown: z
|
||||
.string()
|
||||
.describe(
|
||||
"Final deliverable: Chinese Markdown with title, key changes, risks, and test suggestions.",
|
||||
),
|
||||
})
|
||||
.describe("Chinese Markdown PR summary.");
|
||||
|
||||
function getNerveYaml(): string {
|
||||
const result = readNerveYaml({ nerveRoot: NERVE_ROOT });
|
||||
return result.ok ? result.value : "# nerve.yaml unavailable";
|
||||
}
|
||||
|
||||
async function cfgGet(key: string): Promise<string | null> {
|
||||
const result = await spawnSafe("cfg", ["get", key], {
|
||||
cwd: NERVE_ROOT,
|
||||
env: null,
|
||||
timeoutMs: 10_000,
|
||||
});
|
||||
if (!result.ok) {
|
||||
return null;
|
||||
}
|
||||
return result.value.stdout.trim() || null;
|
||||
}
|
||||
|
||||
async function resolveDashScopeProvider(): Promise<{
|
||||
baseUrl: string;
|
||||
apiKey: string;
|
||||
model: string;
|
||||
} | null> {
|
||||
const apiKey = process.env.DASHSCOPE_API_KEY ?? (await cfgGet("DASHSCOPE_API_KEY"));
|
||||
const baseUrl = process.env.DASHSCOPE_BASE_URL ?? (await cfgGet("DASHSCOPE_BASE_URL"));
|
||||
const model =
|
||||
process.env.DASHSCOPE_MODEL ?? (await cfgGet("DASHSCOPE_MODEL")) ?? "qwen-plus";
|
||||
if (!apiKey || !baseUrl) {
|
||||
return null;
|
||||
}
|
||||
return { apiKey, baseUrl, model };
|
||||
}
|
||||
|
||||
function parseGiteaPullUrl(raw: string): {
|
||||
giteaBaseUrl: string;
|
||||
owner: string;
|
||||
repo: string;
|
||||
prIndex: number;
|
||||
prUrl: string;
|
||||
} | null {
|
||||
let u: URL;
|
||||
try {
|
||||
u = new URL(raw.trim());
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
if (u.protocol !== "http:" && u.protocol !== "https:") {
|
||||
return null;
|
||||
}
|
||||
const parts = u.pathname.replace(/\/+$/, "").split("/").filter(Boolean);
|
||||
const pullsAt = parts.indexOf("pulls");
|
||||
if (pullsAt < 2 || pullsAt + 1 >= parts.length) {
|
||||
return null;
|
||||
}
|
||||
const indexStr = parts[pullsAt + 1];
|
||||
if (!indexStr || !/^\d+$/.test(indexStr)) {
|
||||
return null;
|
||||
}
|
||||
const owner = parts[pullsAt - 2];
|
||||
const repo = parts[pullsAt - 1];
|
||||
if (!owner || !repo) {
|
||||
return null;
|
||||
}
|
||||
const prIndex = Number.parseInt(indexStr, 10);
|
||||
if (!Number.isFinite(prIndex) || prIndex < 1) {
|
||||
return null;
|
||||
}
|
||||
const giteaBaseUrl = `${u.protocol}//${u.host}`;
|
||||
return { giteaBaseUrl, owner, repo, prIndex, prUrl: raw.trim() };
|
||||
}
|
||||
|
||||
type ResolvedPr = {
|
||||
prUrl: string | null;
|
||||
owner: string | null;
|
||||
repo: string | null;
|
||||
prIndex: number | null;
|
||||
giteaBaseUrl: string | null;
|
||||
parseError: string | null;
|
||||
};
|
||||
|
||||
function resolvePrFromContent(content: string): ResolvedPr {
|
||||
const empty: ResolvedPr = {
|
||||
prUrl: null,
|
||||
owner: null,
|
||||
repo: null,
|
||||
prIndex: null,
|
||||
giteaBaseUrl: null,
|
||||
parseError: null,
|
||||
};
|
||||
const trimmed = content.trim();
|
||||
if (!trimmed) {
|
||||
return { ...empty, parseError: "Empty prompt" };
|
||||
}
|
||||
|
||||
if (trimmed.startsWith("{")) {
|
||||
let parsed: unknown;
|
||||
try {
|
||||
parsed = JSON.parse(trimmed) as unknown;
|
||||
} catch {
|
||||
return { ...empty, parseError: "Invalid JSON in prompt" };
|
||||
}
|
||||
const row = jsonPromptSchema.safeParse(parsed);
|
||||
if (!row.success) {
|
||||
return { ...empty, parseError: `JSON validation failed: ${row.error.message}` };
|
||||
}
|
||||
const j = row.data;
|
||||
let owner: string | null = j.owner ?? null;
|
||||
let repo: string | null = j.repo ?? null;
|
||||
let prIndex: number | null = j.index ?? null;
|
||||
let giteaBaseUrl: string | null = j.baseUrl ?? null;
|
||||
let prUrl: string | null = j.prUrl ?? null;
|
||||
|
||||
if (j.prUrl) {
|
||||
const p = parseGiteaPullUrl(j.prUrl);
|
||||
if (p) {
|
||||
owner = owner ?? p.owner;
|
||||
repo = repo ?? p.repo;
|
||||
prIndex = prIndex ?? p.prIndex;
|
||||
giteaBaseUrl = giteaBaseUrl ?? p.giteaBaseUrl;
|
||||
prUrl = prUrl ?? p.prUrl;
|
||||
}
|
||||
}
|
||||
|
||||
if (owner && repo && prIndex !== null && giteaBaseUrl) {
|
||||
const normalizedBase = giteaBaseUrl.replace(/\/+$/, "");
|
||||
const builtUrl = `${normalizedBase}/${owner}/${repo}/pulls/${prIndex}`;
|
||||
return {
|
||||
prUrl: prUrl ?? builtUrl,
|
||||
owner,
|
||||
repo,
|
||||
prIndex,
|
||||
giteaBaseUrl: normalizedBase,
|
||||
parseError: null,
|
||||
};
|
||||
}
|
||||
return {
|
||||
...empty,
|
||||
parseError: "JSON prompt must include resolvable owner, repo, pr index, and baseUrl (or prUrl)",
|
||||
};
|
||||
}
|
||||
|
||||
const p = parseGiteaPullUrl(trimmed);
|
||||
if (!p) {
|
||||
return {
|
||||
...empty,
|
||||
parseError: "Not a valid Gitea PR URL (expected https://host/owner/repo/pulls/NUMBER)",
|
||||
};
|
||||
}
|
||||
return {
|
||||
prUrl: p.prUrl,
|
||||
owner: p.owner,
|
||||
repo: p.repo,
|
||||
prIndex: p.prIndex,
|
||||
giteaBaseUrl: p.giteaBaseUrl.replace(/\/+$/, ""),
|
||||
parseError: null,
|
||||
};
|
||||
}
|
||||
|
||||
function emptyFetcherMeta(): PrSummarizerMeta["fetcher"] {
|
||||
return {
|
||||
prUrl: null,
|
||||
owner: null,
|
||||
repo: null,
|
||||
prIndex: null,
|
||||
giteaBaseUrl: null,
|
||||
title: null,
|
||||
state: null,
|
||||
diffText: null,
|
||||
diffByteLength: null,
|
||||
httpStatus: null,
|
||||
errorMessage: null,
|
||||
};
|
||||
}
|
||||
|
||||
const workflow: WorkflowDefinition<PrSummarizerMeta> = {
|
||||
name: "pr-summarizer",
|
||||
|
||||
roles: {
|
||||
async fetcher(start: StartStep): Promise<RoleResult<PrSummarizerMeta["fetcher"]>> {
|
||||
const resolved = resolvePrFromContent(start.content);
|
||||
if (resolved.parseError !== null) {
|
||||
const meta: PrSummarizerMeta["fetcher"] = {
|
||||
...emptyFetcherMeta(),
|
||||
errorMessage: resolved.parseError,
|
||||
};
|
||||
return { content: `Fetcher: parse error — ${resolved.parseError}`, meta };
|
||||
}
|
||||
|
||||
const token = process.env.GITEA_TOKEN ?? null;
|
||||
if (!token || token.trim() === "") {
|
||||
const meta: PrSummarizerMeta["fetcher"] = {
|
||||
...emptyFetcherMeta(),
|
||||
prUrl: resolved.prUrl,
|
||||
owner: resolved.owner,
|
||||
repo: resolved.repo,
|
||||
prIndex: resolved.prIndex,
|
||||
giteaBaseUrl: resolved.giteaBaseUrl,
|
||||
errorMessage: "GITEA_TOKEN is not set",
|
||||
};
|
||||
return { content: "Fetcher: missing GITEA_TOKEN (set env before running).", meta };
|
||||
}
|
||||
|
||||
const apiRoot = `${resolved.giteaBaseUrl}/api/v1`;
|
||||
const pullJsonUrl = `${apiRoot}/repos/${resolved.owner}/${resolved.repo}/pulls/${resolved.prIndex}`;
|
||||
const pullDiffUrl = `${pullJsonUrl}.diff`;
|
||||
|
||||
const headersJson: Record<string, string> = {
|
||||
Authorization: `token ${token}`,
|
||||
Accept: "application/json",
|
||||
};
|
||||
|
||||
let title: string | null = null;
|
||||
let state: string | null = null;
|
||||
let httpStatus: number | null = null;
|
||||
let jsonError: string | null = null;
|
||||
|
||||
try {
|
||||
const prRes = await fetch(pullJsonUrl, { headers: headersJson });
|
||||
httpStatus = prRes.status;
|
||||
const bodyText = await prRes.text();
|
||||
if (!prRes.ok) {
|
||||
jsonError = `GET PR JSON failed: HTTP ${prRes.status} ${bodyText.slice(0, 500)}`;
|
||||
} else {
|
||||
const data = JSON.parse(bodyText) as Record<string, unknown>;
|
||||
const t = data.title;
|
||||
const s = data.state;
|
||||
title = typeof t === "string" ? t : null;
|
||||
state = typeof s === "string" ? s : null;
|
||||
}
|
||||
} catch (e) {
|
||||
jsonError = e instanceof Error ? e.message : String(e);
|
||||
}
|
||||
|
||||
let diffText: string | null = null;
|
||||
let diffByteLength: number | null = null;
|
||||
let diffError: string | null = jsonError;
|
||||
let diffCharTruncated = false;
|
||||
|
||||
if (jsonError === null) {
|
||||
try {
|
||||
const diffRes = await fetch(pullDiffUrl, {
|
||||
headers: {
|
||||
Authorization: `token ${token}`,
|
||||
Accept: "text/plain",
|
||||
},
|
||||
});
|
||||
httpStatus = diffRes.status;
|
||||
const rawDiff = await diffRes.text();
|
||||
if (!diffRes.ok) {
|
||||
diffError = `GET PR diff failed: HTTP ${diffRes.status} ${rawDiff.slice(0, 500)}`;
|
||||
} else {
|
||||
diffByteLength = Buffer.byteLength(rawDiff, "utf8");
|
||||
if (rawDiff.length > DIFF_TEXT_MAX_CHARS) {
|
||||
diffText = rawDiff.slice(0, DIFF_TEXT_MAX_CHARS);
|
||||
diffCharTruncated = true;
|
||||
diffError = null;
|
||||
} else {
|
||||
diffText = rawDiff;
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
diffError = e instanceof Error ? e.message : String(e);
|
||||
}
|
||||
}
|
||||
|
||||
const truncatedNote =
|
||||
diffCharTruncated && diffByteLength !== null
|
||||
? ` (diff truncated in meta to ${DIFF_TEXT_MAX_CHARS} chars; full byte length ${diffByteLength})`
|
||||
: "";
|
||||
|
||||
const meta: PrSummarizerMeta["fetcher"] = {
|
||||
prUrl: resolved.prUrl,
|
||||
owner: resolved.owner,
|
||||
repo: resolved.repo,
|
||||
prIndex: resolved.prIndex,
|
||||
giteaBaseUrl: resolved.giteaBaseUrl,
|
||||
title,
|
||||
state,
|
||||
diffText,
|
||||
diffByteLength,
|
||||
httpStatus,
|
||||
errorMessage: diffError,
|
||||
};
|
||||
|
||||
const content =
|
||||
diffError !== null
|
||||
? `Fetcher: ${resolved.owner}/${resolved.repo}#${resolved.prIndex} — failed. ${diffError}`
|
||||
: `Fetcher: ${resolved.owner}/${resolved.repo}#${resolved.prIndex} — ${title ?? "(no title)"} [${state ?? "?"}] diff bytes=${diffByteLength ?? 0} HTTP=${httpStatus ?? "?"}${truncatedNote}`;
|
||||
|
||||
return { content, meta };
|
||||
},
|
||||
|
||||
async analyzer(
|
||||
start: StartStep,
|
||||
messages: WorkflowMessage[],
|
||||
): Promise<RoleResult<PrSummarizerMeta["analyzer"]>> {
|
||||
const last = messages[messages.length - 1];
|
||||
const fm = last.meta as PrSummarizerMeta["fetcher"];
|
||||
|
||||
const skip = (reason: string): RoleResult<PrSummarizerMeta["analyzer"]> => ({
|
||||
content: `Analyzer skipped: ${reason}\n\n${reason}`,
|
||||
meta: {
|
||||
analysisMarkdown: `## 无法分析\n\n${reason}`,
|
||||
providerModel: null,
|
||||
errorMessage: reason,
|
||||
},
|
||||
});
|
||||
|
||||
if (last.role !== "fetcher") {
|
||||
return skip("上一则消息不是 fetcher 输出");
|
||||
}
|
||||
|
||||
if (fm.errorMessage !== null) {
|
||||
return skip(`拉取阶段失败: ${fm.errorMessage}`);
|
||||
}
|
||||
|
||||
const diff = fm.diffText;
|
||||
if (diff === null || diff.length === 0) {
|
||||
return skip("diff 为空,无法分析");
|
||||
}
|
||||
|
||||
if (isDryRun(start)) {
|
||||
return {
|
||||
content: "[dryRun] Analyzer skipped real LLM call.",
|
||||
meta: {
|
||||
analysisMarkdown: "## dryRun\n\n未调用模型。",
|
||||
providerModel: null,
|
||||
errorMessage: null,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const provider = await resolveDashScopeProvider();
|
||||
if (provider === null) {
|
||||
const excerpt = diff.split("\n").slice(0, 80).join("\n");
|
||||
const analysisMarkdown =
|
||||
`## 静态摘要(无 LLM 凭据)\n\n` +
|
||||
`- 仓库: ${fm.owner}/${fm.repo} PR #${fm.prIndex}\n` +
|
||||
`- 标题: ${fm.title ?? "(null)"}\n` +
|
||||
`- diff 行数(近似): ${diff.split("\n").length}\n\n` +
|
||||
`### Diff 开头\n\n\`\`\`diff\n${excerpt}\n\`\`\`\n`;
|
||||
return {
|
||||
content: analysisMarkdown,
|
||||
meta: {
|
||||
analysisMarkdown,
|
||||
providerModel: null,
|
||||
errorMessage: null,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const diffForModel = diff.length > DIFF_LLM_MAX_CHARS ? diff.slice(0, DIFF_LLM_MAX_CHARS) : diff;
|
||||
const truncated = diff.length > DIFF_LLM_MAX_CHARS;
|
||||
|
||||
const bundle =
|
||||
`Repository: ${fm.owner}/${fm.repo} PR index ${fm.prIndex}\n` +
|
||||
`Title: ${fm.title ?? ""}\n` +
|
||||
`State: ${fm.state ?? ""}\n` +
|
||||
(truncated ? `\n(diff truncated for model input to ${DIFF_LLM_MAX_CHARS} chars)\n` : "") +
|
||||
`\n--- unified diff ---\n${diffForModel}`;
|
||||
|
||||
const extractPrompt =
|
||||
`${nerveAgentContext}\n\n` +
|
||||
`You are a senior reviewer. Analyze this Gitea pull request diff.\n` +
|
||||
`Output structured findings as Markdown: scope, files touched, behavior change, risks, test ideas.\n\n` +
|
||||
`Optional nerve.yaml context:\n\`\`\`yaml\n${getNerveYaml().slice(0, 4000)}\n\`\`\`\n\n` +
|
||||
`---\n${bundle}`;
|
||||
|
||||
const extracted = await llmExtract({
|
||||
text: extractPrompt,
|
||||
schema: analysisExtractSchema,
|
||||
provider,
|
||||
dryRun: false,
|
||||
});
|
||||
|
||||
if (!extracted.ok) {
|
||||
const errText = JSON.stringify(extracted.error);
|
||||
return {
|
||||
content: `Analyzer LLM error: ${errText}`,
|
||||
meta: {
|
||||
analysisMarkdown: null,
|
||||
providerModel: provider.model,
|
||||
errorMessage: errText,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const analysisMarkdown = extracted.value.analysisMarkdown;
|
||||
return {
|
||||
content: analysisMarkdown,
|
||||
meta: {
|
||||
analysisMarkdown,
|
||||
providerModel: provider.model,
|
||||
errorMessage: null,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async writer(
|
||||
start: StartStep,
|
||||
messages: WorkflowMessage[],
|
||||
): Promise<RoleResult<PrSummarizerMeta["writer"]>> {
|
||||
const last = messages[messages.length - 1];
|
||||
const am = last.meta as PrSummarizerMeta["analyzer"];
|
||||
|
||||
const errOut = (msg: string): RoleResult<PrSummarizerMeta["writer"]> => ({
|
||||
content: `## 错误\n\n${msg}`,
|
||||
meta: {
|
||||
summaryZhMarkdown: `## 错误\n\n${msg}`,
|
||||
errorMessage: msg,
|
||||
},
|
||||
});
|
||||
|
||||
if (last.role !== "analyzer") {
|
||||
return errOut("上一则消息不是 analyzer 输出,无法生成总结。");
|
||||
}
|
||||
|
||||
if (am.errorMessage !== null) {
|
||||
return errOut(`分析阶段失败,未生成臆造总结:${am.errorMessage}`);
|
||||
}
|
||||
|
||||
const analysis = am.analysisMarkdown;
|
||||
if (analysis === null || analysis.trim() === "") {
|
||||
return errOut("分析正文为空,无法生成中文总结。");
|
||||
}
|
||||
|
||||
if (isDryRun(start)) {
|
||||
const stub = "## dryRun\n\n未调用模型生成中文总结。";
|
||||
return {
|
||||
content: stub,
|
||||
meta: { summaryZhMarkdown: stub, errorMessage: null },
|
||||
};
|
||||
}
|
||||
|
||||
const provider = await resolveDashScopeProvider();
|
||||
if (provider === null) {
|
||||
const stub =
|
||||
`## 中文摘要(无 LLM)\n\n` +
|
||||
`以下为上游分析原文摘录,请配置 DASHSCOPE 相关凭据以生成压缩中文总结。\n\n${analysis.slice(0, 8000)}`;
|
||||
return {
|
||||
content: stub,
|
||||
meta: { summaryZhMarkdown: stub, errorMessage: null },
|
||||
};
|
||||
}
|
||||
|
||||
const writerPrompt =
|
||||
`将下列 PR 技术分析改写为**中文 Markdown**交付物,包含:\n` +
|
||||
`- 标题(含仓库与 PR 编号)\n` +
|
||||
`- 变更要点(条列)\n` +
|
||||
`- 风险与注意事项\n` +
|
||||
`- 测试建议\n\n` +
|
||||
`---\n${analysis}`;
|
||||
|
||||
const extracted = await llmExtract({
|
||||
text: writerPrompt,
|
||||
schema: summaryExtractSchema,
|
||||
provider,
|
||||
dryRun: false,
|
||||
});
|
||||
|
||||
if (!extracted.ok) {
|
||||
const msg = JSON.stringify(extracted.error);
|
||||
return errOut(`Writer LLM 失败: ${msg}`);
|
||||
}
|
||||
|
||||
const summaryZhMarkdown = extracted.value.summaryZhMarkdown;
|
||||
return {
|
||||
content: summaryZhMarkdown,
|
||||
meta: {
|
||||
summaryZhMarkdown,
|
||||
errorMessage: null,
|
||||
},
|
||||
};
|
||||
},
|
||||
},
|
||||
|
||||
moderator(context: ModeratorContext<PrSummarizerMeta>) {
|
||||
if (context.steps.length === 0) {
|
||||
return "fetcher";
|
||||
}
|
||||
const signal = context.steps[context.steps.length - 1];
|
||||
if (signal.role === "fetcher") {
|
||||
return "analyzer";
|
||||
}
|
||||
if (signal.role === "analyzer") {
|
||||
return "writer";
|
||||
}
|
||||
if (signal.role === "writer") {
|
||||
return END;
|
||||
}
|
||||
return END;
|
||||
},
|
||||
};
|
||||
|
||||
export default workflow;
|
||||
21
workflows/pr-summarizer/package.json
Normal file
21
workflows/pr-summarizer/package.json
Normal file
@ -0,0 +1,21 @@
|
||||
{
|
||||
"name": "pr-summarizer-workflow",
|
||||
"version": "0.0.1",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"dependencies": {
|
||||
"@uncaged/nerve-core": "latest",
|
||||
"@uncaged/nerve-workflow-utils": "latest",
|
||||
"zod": "^4.3.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.0.0"
|
||||
},
|
||||
"pnpm": {
|
||||
"overrides": {
|
||||
"@uncaged/nerve-daemon": "link:../../../repos/nerve/packages/daemon",
|
||||
"@uncaged/nerve-core": "link:../../../repos/nerve/packages/core",
|
||||
"@uncaged/nerve-workflow-utils": "link:../../../repos/nerve/packages/workflow-utils"
|
||||
}
|
||||
}
|
||||
}
|
||||
49
workflows/pr-summarizer/pnpm-lock.yaml
generated
Normal file
49
workflows/pr-summarizer/pnpm-lock.yaml
generated
Normal file
@ -0,0 +1,49 @@
|
||||
lockfileVersion: '9.0'
|
||||
|
||||
settings:
|
||||
autoInstallPeers: true
|
||||
excludeLinksFromLockfile: false
|
||||
|
||||
overrides:
|
||||
'@uncaged/nerve-daemon': link:../../../repos/nerve/packages/daemon
|
||||
'@uncaged/nerve-core': link:../../../repos/nerve/packages/core
|
||||
'@uncaged/nerve-workflow-utils': link:../../../repos/nerve/packages/workflow-utils
|
||||
|
||||
importers:
|
||||
|
||||
.:
|
||||
dependencies:
|
||||
'@uncaged/nerve-core':
|
||||
specifier: link:../../../repos/nerve/packages/core
|
||||
version: link:../../../repos/nerve/packages/core
|
||||
'@uncaged/nerve-workflow-utils':
|
||||
specifier: link:../../../repos/nerve/packages/workflow-utils
|
||||
version: link:../../../repos/nerve/packages/workflow-utils
|
||||
zod:
|
||||
specifier: ^4.3.6
|
||||
version: 4.3.6
|
||||
devDependencies:
|
||||
'@types/node':
|
||||
specifier: ^22.0.0
|
||||
version: 22.19.17
|
||||
|
||||
packages:
|
||||
|
||||
'@types/node@22.19.17':
|
||||
resolution: {integrity: sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==}
|
||||
|
||||
undici-types@6.21.0:
|
||||
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
|
||||
|
||||
zod@4.3.6:
|
||||
resolution: {integrity: sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==}
|
||||
|
||||
snapshots:
|
||||
|
||||
'@types/node@22.19.17':
|
||||
dependencies:
|
||||
undici-types: 6.21.0
|
||||
|
||||
undici-types@6.21.0: {}
|
||||
|
||||
zod@4.3.6: {}
|
||||
13
workflows/pr-summarizer/tsconfig.json
Normal file
13
workflows/pr-summarizer/tsconfig.json
Normal file
@ -0,0 +1,13 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"lib": ["ES2022"],
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"noEmit": true,
|
||||
"types": ["node"]
|
||||
},
|
||||
"include": ["./**/*.ts"]
|
||||
}
|
||||
31
workflows/sense-generator/index.ts
Normal file
31
workflows/sense-generator/index.ts
Normal file
@ -0,0 +1,31 @@
|
||||
import type { WorkflowDefinition } from "@uncaged/nerve-core";
|
||||
import { END } from "@uncaged/nerve-core";
|
||||
import { buildPlannerRole } from "./roles/planner/index.js";
|
||||
import { buildCoderRole } from "./roles/coder/index.js";
|
||||
import { tester } from "./roles/tester/index.js";
|
||||
|
||||
import type { SenseMeta } from "./roles/types.js";
|
||||
|
||||
async function buildWorkflow(): Promise<WorkflowDefinition<SenseMeta>> {
|
||||
const planner = await buildPlannerRole();
|
||||
const coder = await buildCoderRole();
|
||||
|
||||
return {
|
||||
name: "sense-generator",
|
||||
roles: { planner, coder, tester },
|
||||
moderator(context) {
|
||||
if (context.steps.length === 0) return "planner";
|
||||
const last = context.steps[context.steps.length - 1];
|
||||
if (last.role === "planner") return "coder";
|
||||
if (last.role === "coder") return "tester";
|
||||
if (last.role === "tester") {
|
||||
if (last.meta.passed) return END;
|
||||
return last.meta.attempt < 3 ? "coder" : END;
|
||||
}
|
||||
return END;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const workflow = await buildWorkflow();
|
||||
export default workflow;
|
||||
22
workflows/sense-generator/package.json
Normal file
22
workflows/sense-generator/package.json
Normal file
@ -0,0 +1,22 @@
|
||||
{
|
||||
"name": "sense-generator-workflow",
|
||||
"version": "0.0.1",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"dependencies": {
|
||||
"@uncaged/nerve-core": "latest",
|
||||
"@uncaged/nerve-workflow-utils": "latest",
|
||||
"zod": "^4.3.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.0.0",
|
||||
"typescript": "^5.7.0"
|
||||
},
|
||||
"pnpm": {
|
||||
"overrides": {
|
||||
"@uncaged/nerve-daemon": "link:../../../repos/nerve/packages/daemon",
|
||||
"@uncaged/nerve-core": "link:../../../repos/nerve/packages/core",
|
||||
"@uncaged/nerve-workflow-utils": "link:../../../repos/nerve/packages/workflow-utils"
|
||||
}
|
||||
}
|
||||
}
|
||||
59
workflows/sense-generator/pnpm-lock.yaml
generated
Normal file
59
workflows/sense-generator/pnpm-lock.yaml
generated
Normal file
@ -0,0 +1,59 @@
|
||||
lockfileVersion: '9.0'
|
||||
|
||||
settings:
|
||||
autoInstallPeers: true
|
||||
excludeLinksFromLockfile: false
|
||||
|
||||
overrides:
|
||||
'@uncaged/nerve-daemon': link:../../../repos/nerve/packages/daemon
|
||||
'@uncaged/nerve-core': link:../../../repos/nerve/packages/core
|
||||
'@uncaged/nerve-workflow-utils': link:../../../repos/nerve/packages/workflow-utils
|
||||
|
||||
importers:
|
||||
|
||||
.:
|
||||
dependencies:
|
||||
'@uncaged/nerve-core':
|
||||
specifier: link:../../../repos/nerve/packages/core
|
||||
version: link:../../../repos/nerve/packages/core
|
||||
'@uncaged/nerve-workflow-utils':
|
||||
specifier: link:../../../repos/nerve/packages/workflow-utils
|
||||
version: link:../../../repos/nerve/packages/workflow-utils
|
||||
zod:
|
||||
specifier: ^4.3.6
|
||||
version: 4.3.6
|
||||
devDependencies:
|
||||
'@types/node':
|
||||
specifier: ^22.0.0
|
||||
version: 22.19.17
|
||||
typescript:
|
||||
specifier: ^5.7.0
|
||||
version: 5.9.3
|
||||
|
||||
packages:
|
||||
|
||||
'@types/node@22.19.17':
|
||||
resolution: {integrity: sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==}
|
||||
|
||||
typescript@5.9.3:
|
||||
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
|
||||
engines: {node: '>=14.17'}
|
||||
hasBin: true
|
||||
|
||||
undici-types@6.21.0:
|
||||
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
|
||||
|
||||
zod@4.3.6:
|
||||
resolution: {integrity: sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==}
|
||||
|
||||
snapshots:
|
||||
|
||||
'@types/node@22.19.17':
|
||||
dependencies:
|
||||
undici-types: 6.21.0
|
||||
|
||||
typescript@5.9.3: {}
|
||||
|
||||
undici-types@6.21.0: {}
|
||||
|
||||
zod@4.3.6: {}
|
||||
33
workflows/sense-generator/roles/coder/index.ts
Normal file
33
workflows/sense-generator/roles/coder/index.ts
Normal file
@ -0,0 +1,33 @@
|
||||
import { createCursorRole } from "@uncaged/nerve-workflow-utils";
|
||||
import { z } from "zod";
|
||||
import { resolveDashScopeProvider, NERVE_ROOT, SENSES_DIR } from "../shared.js";
|
||||
|
||||
import type { SenseMeta } from "../types.js";
|
||||
|
||||
export async function buildCoderRole() {
|
||||
const provider = await resolveDashScopeProvider();
|
||||
if (provider === null) {
|
||||
throw new Error("Cannot create coder: set DASHSCOPE_API_KEY and DASHSCOPE_BASE_URL");
|
||||
}
|
||||
return createCursorRole<SenseMeta["coder"]>({
|
||||
cwd: NERVE_ROOT,
|
||||
mode: "default",
|
||||
prompt: async (threadId) =>
|
||||
`Read the workflow thread for the planner's sense design: \`nerve thread ${threadId}\`
|
||||
|
||||
Implement the sense. Create exactly:
|
||||
1. The sense directory under ${SENSES_DIR}/<sense-name>/
|
||||
2. index.js — export async function compute(db, _peers), import schema from "./schema.ts"
|
||||
3. schema.ts — drizzle-orm/sqlite-core
|
||||
4. migrations/0001_init.sql — must match schema.ts
|
||||
5. Update ${NERVE_ROOT}/nerve.yaml — add sense config + reflex entry
|
||||
|
||||
Follow the patterns from existing senses. Create all files now.`,
|
||||
extract: {
|
||||
provider,
|
||||
schema: z.object({
|
||||
filesCreated: z.boolean().describe("true if the sense files were created"),
|
||||
}),
|
||||
},
|
||||
});
|
||||
}
|
||||
48
workflows/sense-generator/roles/planner/index.ts
Normal file
48
workflows/sense-generator/roles/planner/index.ts
Normal file
@ -0,0 +1,48 @@
|
||||
import { createCursorRole } from "@uncaged/nerve-workflow-utils";
|
||||
import { readFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { z } from "zod";
|
||||
import { resolveDashScopeProvider, buildSenseExamples, getNerveYaml, NERVE_ROOT } from "../shared.js";
|
||||
import type { SenseMeta } from "../types.js";
|
||||
|
||||
const senseExamples = buildSenseExamples();
|
||||
const nerveYaml = getNerveYaml();
|
||||
|
||||
export async function buildPlannerRole() {
|
||||
const provider = await resolveDashScopeProvider();
|
||||
if (provider === null) {
|
||||
throw new Error("Cannot create planner: set DASHSCOPE_API_KEY and DASHSCOPE_BASE_URL");
|
||||
}
|
||||
return createCursorRole<SenseMeta["planner"]>({
|
||||
cwd: NERVE_ROOT,
|
||||
mode: "ask",
|
||||
prompt: async (threadId) =>
|
||||
`You are planning a new Nerve sense.
|
||||
|
||||
Read the workflow thread for the user's request: \`nerve thread ${threadId}\`
|
||||
|
||||
Pick a good kebab-case name for this sense. Produce a PLAN (not code) in markdown:
|
||||
|
||||
## Sense Design
|
||||
### Name — kebab-case
|
||||
### Fields — name, type (integer/real/text), description
|
||||
### Compute Logic — step-by-step, specific Node.js APIs or shell commands
|
||||
### Trigger Config — group, interval, throttle, timeout
|
||||
|
||||
Reference senses:
|
||||
${senseExamples}
|
||||
|
||||
Current nerve.yaml:
|
||||
\`\`\`yaml
|
||||
${nerveYaml}
|
||||
\`\`\`
|
||||
|
||||
Output ONLY the plan. Be precise and implementation-ready.`,
|
||||
extract: {
|
||||
provider,
|
||||
schema: z.object({
|
||||
senseName: z.string().describe("kebab-case sense name from the plan"),
|
||||
}),
|
||||
},
|
||||
});
|
||||
}
|
||||
63
workflows/sense-generator/roles/shared.ts
Normal file
63
workflows/sense-generator/roles/shared.ts
Normal file
@ -0,0 +1,63 @@
|
||||
import { spawnSafe } from "@uncaged/nerve-workflow-utils";
|
||||
import { existsSync, readFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
|
||||
export const HOME = process.env.HOME ?? "/home/azureuser";
|
||||
export const NERVE_ROOT = join(HOME, ".uncaged-nerve");
|
||||
export const SENSES_DIR = join(NERVE_ROOT, "senses");
|
||||
|
||||
export async function cfgGet(key: string): Promise<string | null> {
|
||||
const result = await spawnSafe("cfg", ["get", key], {
|
||||
cwd: NERVE_ROOT,
|
||||
env: null,
|
||||
timeoutMs: 10_000,
|
||||
});
|
||||
if (!result.ok) return null;
|
||||
return result.value.stdout.trim() || null;
|
||||
}
|
||||
|
||||
export async function resolveDashScopeProvider(): Promise<{
|
||||
baseUrl: string;
|
||||
apiKey: string;
|
||||
model: string;
|
||||
} | null> {
|
||||
const apiKey = process.env.DASHSCOPE_API_KEY ?? (await cfgGet("DASHSCOPE_API_KEY"));
|
||||
const baseUrl = process.env.DASHSCOPE_BASE_URL ?? (await cfgGet("DASHSCOPE_BASE_URL"));
|
||||
const model = process.env.DASHSCOPE_MODEL ?? (await cfgGet("DASHSCOPE_MODEL")) ?? "qwen-plus";
|
||||
if (!apiKey || !baseUrl) return null;
|
||||
return { apiKey, baseUrl, model };
|
||||
}
|
||||
|
||||
export function getNerveYaml(): string {
|
||||
try {
|
||||
return readFileSync(join(NERVE_ROOT, "nerve.yaml"), "utf-8");
|
||||
} catch {
|
||||
return "# nerve.yaml unavailable";
|
||||
}
|
||||
}
|
||||
|
||||
export function buildSenseExamples(): string {
|
||||
const examples: string[] = [];
|
||||
for (const name of ["cpu-usage", "linux-system-health"]) {
|
||||
const dir = join(SENSES_DIR, name);
|
||||
if (!existsSync(dir)) continue;
|
||||
const indexFile = existsSync(join(dir, "index.js"))
|
||||
? readFileSync(join(dir, "index.js"), "utf-8")
|
||||
: "";
|
||||
const schema = existsSync(join(dir, "schema.ts"))
|
||||
? readFileSync(join(dir, "schema.ts"), "utf-8")
|
||||
: "";
|
||||
const migrationDir = join(dir, "migrations");
|
||||
let migration = "";
|
||||
if (existsSync(join(migrationDir, "0001_init.sql"))) {
|
||||
migration = readFileSync(join(migrationDir, "0001_init.sql"), "utf-8");
|
||||
}
|
||||
examples.push(
|
||||
`### Example sense: ${name}\n\n` +
|
||||
`**index.js:**\n\`\`\`js\n${indexFile}\n\`\`\`\n\n` +
|
||||
`**schema.ts:**\n\`\`\`ts\n${schema}\n\`\`\`\n\n` +
|
||||
`**migrations/0001_init.sql:**\n\`\`\`sql\n${migration}\n\`\`\``,
|
||||
);
|
||||
}
|
||||
return examples.join("\n\n---\n\n");
|
||||
}
|
||||
122
workflows/sense-generator/roles/tester/index.ts
Normal file
122
workflows/sense-generator/roles/tester/index.ts
Normal file
@ -0,0 +1,122 @@
|
||||
import type { RoleResult, StartStep, WorkflowMessage } from "@uncaged/nerve-core";
|
||||
import { spawnSafe } from "@uncaged/nerve-workflow-utils";
|
||||
import type { SpawnError } from "@uncaged/nerve-workflow-utils";
|
||||
import { existsSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { NERVE_ROOT, SENSES_DIR } from "../shared.js";
|
||||
|
||||
import type { SenseMeta } from "../types.js";
|
||||
|
||||
function formatSpawnFailure(error: SpawnError): string {
|
||||
if (error.kind === "spawn_failed") return error.message;
|
||||
if (error.kind === "timeout") return `timeout (stdout=${error.stdout.slice(0, 200)})`;
|
||||
return `exit ${error.exitCode} stderr=${error.stderr.slice(0, 400)}`;
|
||||
}
|
||||
|
||||
async function runSenseSmokeTest(
|
||||
senseName: string,
|
||||
): Promise<{ ok: boolean; log: string; reason: string }> {
|
||||
const logParts: string[] = [];
|
||||
|
||||
const runNerve = async (
|
||||
args: string[],
|
||||
): Promise<{ ok: true; out: string } | { ok: false; err: string }> => {
|
||||
const result = await spawnSafe("nerve", args, {
|
||||
cwd: NERVE_ROOT,
|
||||
env: null,
|
||||
timeoutMs: 300_000,
|
||||
});
|
||||
if (!result.ok) return { ok: false, err: formatSpawnFailure(result.error) };
|
||||
return { ok: true, out: result.value.stdout };
|
||||
};
|
||||
|
||||
const statusRun = await runNerve(["status"]);
|
||||
if (!statusRun.ok) {
|
||||
return {
|
||||
ok: false,
|
||||
log: `=== nerve status ===\nERROR: ${statusRun.err}`,
|
||||
reason: `Smoke test command failed: ${statusRun.err}`,
|
||||
};
|
||||
}
|
||||
logParts.push("=== nerve status ===\n" + statusRun.out);
|
||||
if (!statusRun.out.includes(senseName)) {
|
||||
return {
|
||||
ok: false,
|
||||
log: logParts.join("\n\n"),
|
||||
reason: `Sense "${senseName}" not listed in \`nerve status\` output`,
|
||||
};
|
||||
}
|
||||
|
||||
const triggerRun = await runNerve(["sense", "trigger", senseName]);
|
||||
if (!triggerRun.ok) {
|
||||
logParts.push(`=== nerve sense trigger ===\nERROR: ${triggerRun.err}`);
|
||||
return { ok: false, log: logParts.join("\n\n"), reason: `Trigger failed: ${triggerRun.err}` };
|
||||
}
|
||||
logParts.push("=== nerve sense trigger ===\n" + triggerRun.out);
|
||||
|
||||
let lastQuery = "";
|
||||
for (let i = 0; i < 25; i++) {
|
||||
await new Promise((r) => setTimeout(r, 1000));
|
||||
const queryRun = await runNerve(["sense", "query", senseName]);
|
||||
if (!queryRun.ok) {
|
||||
logParts.push(`=== query attempt ${i + 1} ===\nERROR: ${queryRun.err}`);
|
||||
} else {
|
||||
lastQuery = queryRun.out;
|
||||
logParts.push(`=== query attempt ${i + 1} ===\n${lastQuery}`);
|
||||
if (!lastQuery.includes("(0 rows)")) {
|
||||
return {
|
||||
ok: true,
|
||||
log: logParts.join("\n\n"),
|
||||
reason: "Trigger succeeded and query returned at least one row",
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
ok: false,
|
||||
log: logParts.join("\n\n"),
|
||||
reason: lastQuery.includes("(0 rows)")
|
||||
? "Query still returned 0 rows after trigger"
|
||||
: "Timed out waiting for successful sense query",
|
||||
};
|
||||
}
|
||||
|
||||
export async function tester(
|
||||
_start: StartStep,
|
||||
messages: WorkflowMessage[],
|
||||
): Promise<RoleResult<SenseMeta["tester"]>> {
|
||||
const attempt = messages.filter((m) => m.role === "tester").length + 1;
|
||||
|
||||
const plannerStep = messages.find((m) => m.role === "planner");
|
||||
const senseName = plannerStep
|
||||
? (plannerStep.meta as SenseMeta["planner"]).senseName
|
||||
: "";
|
||||
|
||||
if (senseName.length === 0) {
|
||||
return {
|
||||
content: "FAIL — no senseName from planner",
|
||||
meta: { passed: false, attempt },
|
||||
};
|
||||
}
|
||||
|
||||
const senseDir = join(SENSES_DIR, senseName);
|
||||
const missing = [
|
||||
existsSync(join(senseDir, "index.js")) ? null : "index.js",
|
||||
existsSync(join(senseDir, "schema.ts")) ? null : "schema.ts",
|
||||
existsSync(join(senseDir, "migrations", "0001_init.sql")) ? null : "migrations/0001_init.sql",
|
||||
].filter((x) => x !== null);
|
||||
|
||||
if (missing.length > 0) {
|
||||
return {
|
||||
content: `FAIL — missing files: ${missing.join(", ")}`,
|
||||
meta: { passed: false, attempt },
|
||||
};
|
||||
}
|
||||
|
||||
const smoke = await runSenseSmokeTest(senseName);
|
||||
return {
|
||||
content: `${smoke.ok ? "PASS" : "FAIL"} — ${smoke.reason}`,
|
||||
meta: { passed: smoke.ok, attempt },
|
||||
};
|
||||
}
|
||||
5
workflows/sense-generator/roles/types.ts
Normal file
5
workflows/sense-generator/roles/types.ts
Normal file
@ -0,0 +1,5 @@
|
||||
export type SenseMeta = {
|
||||
planner: { senseName: string };
|
||||
coder: { filesCreated: boolean };
|
||||
tester: { passed: boolean; attempt: number };
|
||||
};
|
||||
13
workflows/sense-generator/tsconfig.json
Normal file
13
workflows/sense-generator/tsconfig.json
Normal file
@ -0,0 +1,13 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"lib": ["ES2022"],
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"noEmit": true,
|
||||
"types": ["node"]
|
||||
},
|
||||
"include": ["./**/*.ts"]
|
||||
}
|
||||
@ -1,43 +0,0 @@
|
||||
import type { AgentFn, WorkflowDefinition } from "@uncaged/nerve-core";
|
||||
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||
|
||||
import { moderator } from "./moderator.js";
|
||||
import type { WorkflowMeta } from "./moderator.js";
|
||||
import { createCommitterRole } from "./roles/committer.js";
|
||||
import { createImplementRole } from "./roles/implement.js";
|
||||
import { createPlanRole } from "./roles/plan.js";
|
||||
import { createPrepareRole } from "./roles/prepare.js";
|
||||
import { createPublishRole } from "./roles/publish.js";
|
||||
import { createReadIssueRole } from "./roles/read-issue.js";
|
||||
import { createReviewRole } from "./roles/review.js";
|
||||
import { createTestRole } from "./roles/test.js";
|
||||
|
||||
export type CreateSolveIssueDeps = {
|
||||
defaultAdapter: AgentFn;
|
||||
adapters?: Partial<Record<keyof WorkflowMeta, AgentFn>>;
|
||||
nerveRoot: string;
|
||||
extract: LlmExtractorConfig;
|
||||
};
|
||||
|
||||
export function createSolveIssueWorkflow({
|
||||
defaultAdapter,
|
||||
adapters,
|
||||
nerveRoot,
|
||||
extract,
|
||||
}: CreateSolveIssueDeps): WorkflowDefinition<WorkflowMeta> {
|
||||
const a = (role: keyof WorkflowMeta) => adapters?.[role] ?? defaultAdapter;
|
||||
return {
|
||||
name: "solve-issue",
|
||||
roles: {
|
||||
"read-issue": createReadIssueRole(a("read-issue"), extract),
|
||||
prepare: createPrepareRole(a("prepare"), extract),
|
||||
plan: createPlanRole(a("plan"), { extract, nerveRoot }),
|
||||
implement: createImplementRole(a("implement"), { extract, nerveRoot }),
|
||||
committer: createCommitterRole(a("committer"), extract),
|
||||
review: createReviewRole(a("review"), extract, nerveRoot),
|
||||
test: createTestRole(a("test"), extract),
|
||||
publish: createPublishRole(a("publish"), { extract, nerveRoot }),
|
||||
},
|
||||
moderator,
|
||||
};
|
||||
}
|
||||
@ -1,37 +0,0 @@
|
||||
import { join } from "node:path";
|
||||
import { createCursorAdapter } from "@uncaged/nerve-adapter-cursor";
|
||||
import { hermesAdapter } from "@uncaged/nerve-adapter-hermes";
|
||||
import { createSolveIssueWorkflow } from "./build.js";
|
||||
import { resolveDashScopeProvider } from "./lib/provider.js";
|
||||
|
||||
const HOME = process.env.HOME ?? "/home/azureuser";
|
||||
const NERVE_ROOT = join(HOME, ".uncaged-nerve");
|
||||
|
||||
const provider = await resolveDashScopeProvider(NERVE_ROOT);
|
||||
|
||||
if (provider === null) {
|
||||
throw new Error("Set DASHSCOPE_API_KEY and DASHSCOPE_BASE_URL (or cfg get equivalents)");
|
||||
}
|
||||
|
||||
const CURSOR_TIMEOUT_MS = 300_000;
|
||||
|
||||
const workflow = createSolveIssueWorkflow({
|
||||
defaultAdapter: hermesAdapter,
|
||||
adapters: {
|
||||
plan: createCursorAdapter({
|
||||
type: "cursor",
|
||||
mode: "ask",
|
||||
model: "auto",
|
||||
timeout: CURSOR_TIMEOUT_MS,
|
||||
}),
|
||||
implement: createCursorAdapter({
|
||||
type: "cursor",
|
||||
model: "auto",
|
||||
timeout: CURSOR_TIMEOUT_MS,
|
||||
}),
|
||||
},
|
||||
nerveRoot: NERVE_ROOT,
|
||||
extract: { provider },
|
||||
});
|
||||
|
||||
export default workflow;
|
||||
@ -1,26 +0,0 @@
|
||||
import type { LlmProvider } from "@uncaged/nerve-workflow-utils";
|
||||
import { spawnSafe } from "@uncaged/nerve-workflow-utils";
|
||||
|
||||
export async function cfgGet(nerveRoot: string, key: string): Promise<string | null> {
|
||||
const result = await spawnSafe("cfg", ["get", key], {
|
||||
cwd: nerveRoot,
|
||||
env: null,
|
||||
timeoutMs: 10_000,
|
||||
abortSignal: null,
|
||||
});
|
||||
if (!result.ok) {
|
||||
return null;
|
||||
}
|
||||
const value = result.value.stdout.trim();
|
||||
return value.length > 0 ? value : null;
|
||||
}
|
||||
|
||||
export async function resolveDashScopeProvider(nerveRoot: string): Promise<LlmProvider | null> {
|
||||
const apiKey = process.env.DASHSCOPE_API_KEY ?? (await cfgGet(nerveRoot, "DASHSCOPE_API_KEY"));
|
||||
const baseUrl = process.env.DASHSCOPE_BASE_URL ?? (await cfgGet(nerveRoot, "DASHSCOPE_BASE_URL"));
|
||||
const model = process.env.DASHSCOPE_MODEL ?? (await cfgGet(nerveRoot, "DASHSCOPE_MODEL")) ?? "qwen-plus";
|
||||
if (!apiKey || !baseUrl) {
|
||||
return null;
|
||||
}
|
||||
return { apiKey, baseUrl, model };
|
||||
}
|
||||
@ -1,86 +0,0 @@
|
||||
import { join } from "node:path";
|
||||
import type { RoleStep, WorkflowMessage } from "@uncaged/nerve-core";
|
||||
|
||||
type SolveIssueParse = {
|
||||
host: string;
|
||||
owner: string;
|
||||
repo: string;
|
||||
number: number;
|
||||
};
|
||||
|
||||
type SolveIssueRepo = {
|
||||
path: string;
|
||||
defaultBranch: string;
|
||||
packageManager: string;
|
||||
};
|
||||
|
||||
const HOME = process.env.HOME ?? "/home/azureuser";
|
||||
|
||||
function extractMarkedSection(text: string, marker: string): Record<string, string> | null {
|
||||
const escaped = marker.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
const re = new RegExp(`---${escaped}---\\s*([\\s\\S]*?)(?:\\n---|$)`);
|
||||
const m = text.match(re);
|
||||
if (m === null) {
|
||||
return null;
|
||||
}
|
||||
const rec: Record<string, string> = {};
|
||||
for (const line of m[1].split("\n")) {
|
||||
const kv = line.match(/^([a-zA-Z]+):\s*(.+)$/);
|
||||
if (kv !== null) {
|
||||
rec[kv[1]] = kv[2].trim();
|
||||
}
|
||||
}
|
||||
return Object.keys(rec).length > 0 ? rec : null;
|
||||
}
|
||||
|
||||
function parseSolveIssueParse(text: string): SolveIssueParse | null {
|
||||
const rec = extractMarkedSection(text, "SOLVE_ISSUE_PARSE");
|
||||
if (rec === null) {
|
||||
return null;
|
||||
}
|
||||
const host = rec.host ?? "";
|
||||
const owner = rec.owner ?? "";
|
||||
const repo = rec.repo ?? "";
|
||||
const num = Number(rec.number ?? "");
|
||||
if (host.length === 0 || owner.length === 0 || repo.length === 0 || !Number.isFinite(num) || num <= 0) {
|
||||
return null;
|
||||
}
|
||||
return { host, owner, repo, number: num };
|
||||
}
|
||||
|
||||
function parseSolveIssueRepo(text: string): SolveIssueRepo | null {
|
||||
const rec = extractMarkedSection(text, "SOLVE_ISSUE_REPO");
|
||||
if (rec === null) {
|
||||
return null;
|
||||
}
|
||||
const path = rec.path ?? "";
|
||||
if (path.length === 0) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
path,
|
||||
defaultBranch: rec.defaultBranch ?? "main",
|
||||
packageManager: rec.packageManager ?? "pnpm",
|
||||
};
|
||||
}
|
||||
|
||||
/** Prefer explicit prepare marker; else ~/Code/<owner>/<repo> from read-issue parse block. */
|
||||
export function resolveRepoCwd(messages: WorkflowMessage[]): string | null {
|
||||
for (let i = messages.length - 1; i >= 0; i--) {
|
||||
if (messages[i].role === "prepare") {
|
||||
const repo = parseSolveIssueRepo(messages[i].content);
|
||||
if (repo !== null) {
|
||||
return repo.path;
|
||||
}
|
||||
}
|
||||
}
|
||||
for (let i = messages.length - 1; i >= 0; i--) {
|
||||
if (messages[i].role === "read-issue") {
|
||||
const parsed = parseSolveIssueParse(messages[i].content);
|
||||
if (parsed !== null) {
|
||||
return join(HOME, "Code", parsed.owner, parsed.repo);
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@ -1,99 +0,0 @@
|
||||
import { END } from "@uncaged/nerve-core";
|
||||
import type { Moderator } from "@uncaged/nerve-core";
|
||||
import type { ReadIssueMeta } from "./roles/read-issue.js";
|
||||
import type { PrepareMeta } from "./roles/prepare.js";
|
||||
import type { PlanMeta } from "./roles/plan.js";
|
||||
import type { ImplementMeta } from "./roles/implement.js";
|
||||
import type { CommitterMeta } from "./roles/committer.js";
|
||||
import type { ReviewMeta } from "./roles/review.js";
|
||||
import type { TestMeta } from "./roles/test.js";
|
||||
import type { PublishMeta } from "./roles/publish.js";
|
||||
|
||||
export type WorkflowMeta = {
|
||||
"read-issue": ReadIssueMeta;
|
||||
prepare: PrepareMeta;
|
||||
plan: PlanMeta;
|
||||
implement: ImplementMeta;
|
||||
committer: CommitterMeta;
|
||||
review: ReviewMeta;
|
||||
test: TestMeta;
|
||||
publish: PublishMeta;
|
||||
};
|
||||
|
||||
const MAX_IMPLEMENT_ROUNDS = 20;
|
||||
const MAX_TOTAL_REJECTIONS = 10;
|
||||
|
||||
function implementRounds(steps: { role: string }[]): number {
|
||||
return steps.filter((s) => s.role === "implement").length;
|
||||
}
|
||||
|
||||
function totalRejections(steps: { role: string; meta: unknown }[]): number {
|
||||
return steps.filter((s) => {
|
||||
if (s.role === "review") return !(s.meta as Record<string, boolean>).approved;
|
||||
if (s.role === "test") return !(s.meta as Record<string, boolean>).passed;
|
||||
if (s.role === "committer") return !(s.meta as Record<string, boolean>).committed;
|
||||
if (s.role === "publish") return !(s.meta as Record<string, boolean>).success;
|
||||
return false;
|
||||
}).length;
|
||||
}
|
||||
|
||||
function canRetryImplement(steps: { role: string; meta: unknown }[]): boolean {
|
||||
return implementRounds(steps) < MAX_IMPLEMENT_ROUNDS && totalRejections(steps) < MAX_TOTAL_REJECTIONS;
|
||||
}
|
||||
|
||||
export const moderator: Moderator<WorkflowMeta> = (context) => {
|
||||
if (context.steps.length === 0) {
|
||||
return "read-issue";
|
||||
}
|
||||
|
||||
const last = context.steps[context.steps.length - 1];
|
||||
|
||||
if (last.role === "read-issue") {
|
||||
return last.meta.ready ? "prepare" : END;
|
||||
}
|
||||
|
||||
if (last.role === "prepare") {
|
||||
return last.meta.ready ? "plan" : END;
|
||||
}
|
||||
|
||||
if (last.role === "plan") {
|
||||
return last.meta.ready ? "implement" : END;
|
||||
}
|
||||
|
||||
if (last.role === "implement") {
|
||||
if (last.meta.done) {
|
||||
return "committer";
|
||||
}
|
||||
return canRetryImplement(context.steps) ? "implement" : END;
|
||||
}
|
||||
|
||||
if (last.role === "committer") {
|
||||
if (last.meta.committed) {
|
||||
return "review";
|
||||
}
|
||||
return canRetryImplement(context.steps) ? "implement" : END;
|
||||
}
|
||||
|
||||
if (last.role === "review") {
|
||||
if (last.meta.approved) {
|
||||
return "test";
|
||||
}
|
||||
return canRetryImplement(context.steps) ? "implement" : END;
|
||||
}
|
||||
|
||||
if (last.role === "test") {
|
||||
if (last.meta.passed) {
|
||||
return "publish";
|
||||
}
|
||||
return canRetryImplement(context.steps) ? "implement" : END;
|
||||
}
|
||||
|
||||
if (last.role === "publish") {
|
||||
if (last.meta.success) {
|
||||
return END;
|
||||
}
|
||||
return canRetryImplement(context.steps) ? "implement" : END;
|
||||
}
|
||||
|
||||
return END;
|
||||
};
|
||||
@ -1,57 +0,0 @@
|
||||
import type { AgentFn, Role, ThreadContext } from "@uncaged/nerve-core";
|
||||
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||
import { createRole, decorateRole, withDryRun, onFail } from "@uncaged/nerve-workflow-utils";
|
||||
import { z } from "zod";
|
||||
|
||||
function committerPrompt({ threadId }: { threadId: string }): string {
|
||||
return `You are the committer agent. The **implement** step finished with a passing build; your job is to branch, commit, and push.
|
||||
|
||||
1. Read the workflow thread: \`nerve thread show ${threadId}\` — understand what was planned, implemented, and reviewed.
|
||||
2. In the thread, locate \`---SOLVE_ISSUE_PARSE---\` and \`---SOLVE_ISSUE_REPO---\`. From them you need issue **number**, **title** (for the branch slug), repo **path**, and **defaultBranch**.
|
||||
3. \`cd\` to the repo **path** from the markers. Optionally read \`CONVENTIONS.md\` in that repo root if present.
|
||||
4. Run \`git rev-parse --abbrev-ref HEAD\` and compare with **defaultBranch** from the markers. Implement leaves changes uncommitted on the default branch — you should be on that branch with a dirty working tree. If you are not on the default branch, or the tree is clean when you expected changes, set **committed** to false and explain.
|
||||
5. Run \`git status\`. If there is nothing to commit, set **committed** to false and explain.
|
||||
6. Create a feature branch (do not commit directly on the default branch if it would mix unrelated work):
|
||||
- Name: \`fix/<number>-<short-slug>\` for fixes, or \`feat/<number>-<short-slug>\` if the issue is clearly a feature.
|
||||
- **slug**: lowercase, hyphens only, short (from issue title words).
|
||||
- Example: \`git checkout -b fix/42-auth-timeout\`
|
||||
7. \`git add -A\`
|
||||
8. Write a **conventional commit** message describing what changed and why, using the thread context.
|
||||
9. \`git commit -m "<message>"\` — do NOT pass \`--author\`, use repo git config.
|
||||
10. \`git push -u origin <branch-name>\`
|
||||
|
||||
**committed=true** only if branch was created, commit succeeded, and **push** succeeded.
|
||||
|
||||
End your reply with a JSON line:
|
||||
\`\`\`json
|
||||
{ "committed": true }
|
||||
\`\`\`
|
||||
or
|
||||
\`\`\`json
|
||||
{ "committed": false }
|
||||
\`\`\``;
|
||||
}
|
||||
|
||||
export const committerMetaSchema = z.object({
|
||||
committed: z
|
||||
.boolean()
|
||||
.describe("true if branch created, changes committed, and pushed successfully"),
|
||||
});
|
||||
export type CommitterMeta = z.infer<typeof committerMetaSchema>;
|
||||
|
||||
export function createCommitterRole(
|
||||
adapter: AgentFn,
|
||||
extract: LlmExtractorConfig,
|
||||
): Role<CommitterMeta> {
|
||||
const inner = createRole(
|
||||
adapter,
|
||||
async (ctx: ThreadContext) => committerPrompt({ threadId: ctx.start.meta.threadId }),
|
||||
committerMetaSchema,
|
||||
extract,
|
||||
);
|
||||
|
||||
return decorateRole(inner, [
|
||||
withDryRun({ label: "committer", meta: { committed: true } as CommitterMeta }),
|
||||
onFail({ label: "committer", meta: { committed: false } as CommitterMeta }),
|
||||
]) as Role<CommitterMeta>;
|
||||
}
|
||||
@ -1,86 +0,0 @@
|
||||
import type { AgentFn, Role, RoleResult, ThreadContext, WorkflowMessage } from "@uncaged/nerve-core";
|
||||
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||
import { z } from "zod";
|
||||
|
||||
import { resolveRepoCwd } from "../lib/repo-context.js";
|
||||
|
||||
function buildImplementPrompt({ threadId, nerveRoot }: { threadId: string; nerveRoot: string }): string {
|
||||
return `You are the **implement** agent. You apply code changes for the issue.
|
||||
|
||||
Read workflow context (plan, reviewer/test feedback): \`nerve thread show ${threadId}\`
|
||||
|
||||
Read Nerve workspace conventions: \`cat ${nerveRoot}/CONVENTIONS.md\`
|
||||
|
||||
Your cwd is the target repository.
|
||||
|
||||
## Requirements
|
||||
|
||||
1. Implement the planned changes; address reviewer/tester feedback from the thread if any.
|
||||
2. Run the project **build** (\`pnpm build\`, \`npm run build\`, etc.) and fix issues until build passes.
|
||||
3. Multi-step: if you cannot finish this round, explain why and set **done** to false.
|
||||
|
||||
Do **not** run \`git checkout -b\`, \`git add\`, \`git commit\`, or \`git push\`. **Never** create commits on any branch — branching and commits are handled by the **committer** step after you finish.
|
||||
|
||||
Then close with JSON:
|
||||
\`\`\`json
|
||||
{ "done": true }
|
||||
\`\`\`
|
||||
or \`{ "done": false }\` matching whether implementation is complete.
|
||||
|
||||
**done=true** only when changes are complete **and** build passes in this round.`;
|
||||
}
|
||||
|
||||
export const implementMetaSchema = z.object({
|
||||
done: z.boolean().describe("true when changes are complete and build passes this round"),
|
||||
});
|
||||
export type ImplementMeta = z.infer<typeof implementMetaSchema>;
|
||||
|
||||
export type CreateImplementRoleDeps = {
|
||||
extract: LlmExtractorConfig;
|
||||
nerveRoot: string;
|
||||
};
|
||||
|
||||
export function createImplementRole(
|
||||
adapter: AgentFn,
|
||||
{ extract, nerveRoot }: CreateImplementRoleDeps,
|
||||
): Role<ImplementMeta> {
|
||||
return async (ctx: ThreadContext): Promise<RoleResult<ImplementMeta>> => {
|
||||
const messages = ctx.steps as unknown as WorkflowMessage[];
|
||||
const cwd = resolveRepoCwd(messages);
|
||||
if (cwd === null) {
|
||||
return {
|
||||
content: "implement cannot run: missing repo path in thread markers",
|
||||
meta: { done: false },
|
||||
};
|
||||
}
|
||||
|
||||
const innerRole = createRole(
|
||||
adapter,
|
||||
async (innerCtx: ThreadContext) =>
|
||||
buildImplementPrompt({
|
||||
threadId: innerCtx.start.meta.threadId,
|
||||
nerveRoot,
|
||||
}),
|
||||
implementMetaSchema,
|
||||
extract,
|
||||
);
|
||||
|
||||
const innerCtx: ThreadContext = {
|
||||
...ctx,
|
||||
start: {
|
||||
...ctx.start,
|
||||
meta: { ...ctx.start.meta, workdir: cwd },
|
||||
},
|
||||
};
|
||||
try {
|
||||
return await innerRole(innerCtx);
|
||||
} catch (e) {
|
||||
const msg = e instanceof Error ? e.message : String(e);
|
||||
return {
|
||||
content: `implement failed: ${msg}`,
|
||||
meta: { done: false },
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
@ -1,88 +0,0 @@
|
||||
import type { AgentFn, Role, RoleResult, ThreadContext, WorkflowMessage } from "@uncaged/nerve-core";
|
||||
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||
import { z } from "zod";
|
||||
|
||||
import { resolveRepoCwd } from "../lib/repo-context.js";
|
||||
|
||||
function buildPlanPrompt({ threadId, nerveRoot }: { threadId: string; nerveRoot: string }): string {
|
||||
return `You are the **plan** agent (analysis only — ask mode). You produce an implementation plan for fixing the issue.
|
||||
|
||||
Read workflow context: \`nerve thread show ${threadId}\`
|
||||
|
||||
Read Nerve workspace conventions (coding rules for agents): \`cat ${nerveRoot}/CONVENTIONS.md\`
|
||||
|
||||
In the **target repository** (your cwd), skim relevant files and read \`CONVENTIONS.md\` **if it exists** there.
|
||||
|
||||
## Output
|
||||
|
||||
Write an implementation plan in **markdown** with:
|
||||
|
||||
1. Problem understanding
|
||||
2. Change strategy
|
||||
3. Target files (paths)
|
||||
4. **Test commands** to run (explicit shell commands, e.g. \`pnpm test\`, \`pnpm vitest run\`)
|
||||
5. Risks
|
||||
|
||||
End your reply with a JSON code block (meta signal):
|
||||
\`\`\`json
|
||||
{ "ready": true }
|
||||
\`\`\`
|
||||
Use \`{ "ready": false }\` if the plan cannot be made actionable.
|
||||
|
||||
**ready=true** only when the plan is clear and actionable.`;
|
||||
}
|
||||
|
||||
export const planMetaSchema = z.object({
|
||||
ready: z.boolean().describe("true if plan is clear and actionable"),
|
||||
});
|
||||
export type PlanMeta = z.infer<typeof planMetaSchema>;
|
||||
|
||||
export type CreatePlanRoleDeps = {
|
||||
extract: LlmExtractorConfig;
|
||||
nerveRoot: string;
|
||||
};
|
||||
|
||||
export function createPlanRole(
|
||||
adapter: AgentFn,
|
||||
{ extract, nerveRoot }: CreatePlanRoleDeps,
|
||||
): Role<PlanMeta> {
|
||||
return async (ctx: ThreadContext): Promise<RoleResult<PlanMeta>> => {
|
||||
const messages = ctx.steps as unknown as WorkflowMessage[];
|
||||
const cwd = resolveRepoCwd(messages);
|
||||
if (cwd === null) {
|
||||
return {
|
||||
content: "plan cannot run: missing ---SOLVE_ISSUE_REPO--- or ---SOLVE_ISSUE_PARSE--- in thread",
|
||||
meta: { ready: false },
|
||||
};
|
||||
}
|
||||
|
||||
const innerRole = createRole(
|
||||
adapter,
|
||||
async (innerCtx: ThreadContext) =>
|
||||
buildPlanPrompt({
|
||||
threadId: innerCtx.start.meta.threadId,
|
||||
nerveRoot,
|
||||
}),
|
||||
planMetaSchema,
|
||||
extract,
|
||||
);
|
||||
|
||||
const innerCtx: ThreadContext = {
|
||||
...ctx,
|
||||
start: {
|
||||
...ctx.start,
|
||||
meta: { ...ctx.start.meta, workdir: cwd },
|
||||
},
|
||||
};
|
||||
try {
|
||||
return await innerRole(innerCtx);
|
||||
} catch (e) {
|
||||
const msg = e instanceof Error ? e.message : String(e);
|
||||
return {
|
||||
content: `plan failed: ${msg}`,
|
||||
meta: { ready: false },
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
@ -1,73 +0,0 @@
|
||||
import type { AgentFn, Role, ThreadContext } from "@uncaged/nerve-core";
|
||||
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||
import { z } from "zod";
|
||||
|
||||
function preparePrompt({ threadId }: { threadId: string }): string {
|
||||
return `You are the **prepare** agent. You ensure the target repository is ready for work.
|
||||
|
||||
Read prior messages / thread for issue markers: \`nerve thread show ${threadId}\`
|
||||
|
||||
## Goal
|
||||
|
||||
Find **owner**, **repo**, and **host** from \`---SOLVE_ISSUE_PARSE---\` in the thread (from read-issue).
|
||||
|
||||
Check the **initial user prompt** (the trigger message) for a local repo path. The user may specify it like:
|
||||
- \`--repo /path/to/repo\`
|
||||
- \`repo: /path/to/repo\`
|
||||
- or just mention an absolute path to the local clone
|
||||
|
||||
## Steps
|
||||
|
||||
### If a local path is provided in the trigger prompt:
|
||||
1. Verify \`<path>/.git\` exists — if not, fail with \`ready: false\`
|
||||
2. \`cd "<path>" && git fetch --all\`
|
||||
3. Ensure working tree clean: if \`git status --porcelain\` is non-empty, \`git stash push -u -m "solve-issue stash"\`
|
||||
4. Detect default branch (\`main\` or \`master\`) and \`git checkout <default> && git pull --ff-only\`
|
||||
5. Use this path as REPOPATH
|
||||
|
||||
### If no local path is provided:
|
||||
1. Let \`REPOPATH=$HOME/Code/<owner>/<repo>\` (expand \`$HOME\`)
|
||||
2. \`mkdir -p "$HOME/Code/<owner>"\`
|
||||
3. If \`REPOPATH/.git\` is missing: \`git clone https://<host>/<owner>/<repo>.git "$REPOPATH"\`
|
||||
Else: \`cd "$REPOPATH" && git fetch --all && git pull --ff-only\`
|
||||
4. Ensure working tree clean: if \`git status --porcelain\` is non-empty, \`git stash push -u -m "solve-issue stash"\`
|
||||
5. Detect default branch and \`git checkout <default>\`
|
||||
|
||||
### Then (both paths):
|
||||
6. Detect package manager: \`pnpm-lock.yaml\` → pnpm, \`yarn.lock\` → yarn, \`package-lock.json\` → npm; run install (\`pnpm install --no-frozen-lockfile\` / \`npm ci\` or \`npm install\` / \`yarn\`).
|
||||
7. If \`package.json\` has a \`build\` script, run the build (\`pnpm build\`, etc.) and fix nothing — only verify baseline passes.
|
||||
|
||||
## Required marker block
|
||||
|
||||
Emit **exactly**:
|
||||
\`\`\`
|
||||
---SOLVE_ISSUE_REPO---
|
||||
path: <absolute path to REPOPATH>
|
||||
defaultBranch: <main or master>
|
||||
packageManager: <pnpm|npm|yarn>
|
||||
---
|
||||
\`\`\`
|
||||
|
||||
End with:
|
||||
\`\`\`json
|
||||
{ "ready": true }
|
||||
\`\`\`
|
||||
or \`{ "ready": false }\` if the repo is invalid, or install/build baseline failed.
|
||||
|
||||
**ready=true** only when the repo exists at \`path\`, is clean, dependencies installed, and baseline build succeeded (or no build script).`;
|
||||
}
|
||||
|
||||
export const prepareMetaSchema = z.object({
|
||||
ready: z.boolean().describe("true if repo is ready and baseline build ok"),
|
||||
});
|
||||
export type PrepareMeta = z.infer<typeof prepareMetaSchema>;
|
||||
|
||||
export function createPrepareRole(adapter: AgentFn, extract: LlmExtractorConfig): Role<PrepareMeta> {
|
||||
return createRole(
|
||||
adapter,
|
||||
async (ctx: ThreadContext) => preparePrompt({ threadId: ctx.start.meta.threadId }),
|
||||
prepareMetaSchema,
|
||||
extract,
|
||||
);
|
||||
}
|
||||
@ -1,110 +0,0 @@
|
||||
import { mkdirSync, writeFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import type { AgentFn, Role, RoleResult, ThreadContext } from "@uncaged/nerve-core";
|
||||
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||
import { createRole, isDryRun } from "@uncaged/nerve-workflow-utils";
|
||||
import { z } from "zod";
|
||||
|
||||
function buildPublishPrompt({ threadId, nerveRoot }: { threadId: string; nerveRoot: string }): string {
|
||||
return `You are the **publish** agent (Hermes). Test has passed. Open a pull request for the current branch using the **tea** CLI.
|
||||
|
||||
## Context
|
||||
|
||||
- Read the full workflow thread: \`nerve thread show ${threadId}\`
|
||||
- Nerve workspace conventions (for tone/consistency, optional): \`cat ${nerveRoot}/CONVENTIONS.md\`
|
||||
|
||||
## Repo and issue (from the thread)
|
||||
|
||||
Find \`---SOLVE_ISSUE_PARSE---\` and \`---SOLVE_ISSUE_REPO---\` in prior messages. You need:
|
||||
- \`path\` — clone checkout directory (this is your working copy)
|
||||
- \`host\`, \`owner\`, \`repo\`, \`number\` for the issue
|
||||
- \`defaultBranch\` (for PR base) from SOLVE_ISSUE_REPO
|
||||
|
||||
**Issue link** for the Ref section: \`https://<host>/<owner>/<repo>/issues/<number>\`
|
||||
|
||||
## Steps (in order)
|
||||
|
||||
1. \`cd\` to the **repo \`path\`**. Run \`git rev-parse --abbrev-ref HEAD\` to get the current branch name. The **committer** step should already have pushed this branch; run \`git push -u origin <that-branch>\` only if the branch is not yet on the remote.
|
||||
2. Choose a **PR title** that reflects the real change (not a generic \`fix: issue #N\`): derive it from the issue title, plan, and thread summary (keep it concise; Conventional Commits style is fine, e.g. \`fix(auth): handle session expiry\`).
|
||||
3. Write a **PR body** in Markdown with exactly these sections, in this order, each with a \`##\` heading (fill with concise content based on the thread: plan, implement, review, test):
|
||||
- **## What** — one short paragraph: what this PR does
|
||||
- **## Why** — one short paragraph: motivation / issue
|
||||
- **## Changes** — bullet list of notable changes
|
||||
- **## Ref** — include one line \`Fixes #<number>\` (same \`number\` from SOLVE_ISSUE_PARSE; closes/links the issue where supported) **and** the issue URL \`https://<host>/<owner>/<repo>/issues/<number>\`
|
||||
4. Create the PR with **tea** (not curl/fetch to Gitea):
|
||||
- \`tea pr create --repo <owner>/<repo> --base <defaultBranch> --head <branch> --title "<your meaningful title>" --body <your markdown body>\`
|
||||
- You may use a heredoc or a temp file for \`--body\` if the shell requires it; keep the four sections in the body.
|
||||
5. Confirm the PR was created (tea prints a URL or PR number in typical setups).
|
||||
|
||||
**success=true** only if both **push** and **tea** PR creation succeed. If any step fails, set **success=false** and say why.
|
||||
|
||||
End your reply with a JSON line:
|
||||
\`\`\`json
|
||||
{ "success": true }
|
||||
\`\`\`
|
||||
or
|
||||
\`\`\`json
|
||||
{ "success": false }
|
||||
\`\`\``;
|
||||
}
|
||||
|
||||
export const publishMetaSchema = z.object({
|
||||
success: z.boolean().describe("true if git push and tea pr create both succeeded"),
|
||||
});
|
||||
export type PublishMeta = z.infer<typeof publishMetaSchema>;
|
||||
|
||||
export type CreatePublishRoleDeps = {
|
||||
extract: LlmExtractorConfig;
|
||||
nerveRoot: string;
|
||||
};
|
||||
|
||||
function logPath(nerveRoot: string): string {
|
||||
return join(nerveRoot, "logs", `solve-issue-publish-${Date.now()}.log`);
|
||||
}
|
||||
|
||||
export function createPublishRole(
|
||||
adapter: AgentFn,
|
||||
{ extract, nerveRoot }: CreatePublishRoleDeps,
|
||||
): Role<PublishMeta> {
|
||||
const innerRole = createRole(
|
||||
adapter,
|
||||
async (ctx: ThreadContext) =>
|
||||
buildPublishPrompt({ threadId: ctx.start.meta.threadId, nerveRoot }),
|
||||
publishMetaSchema,
|
||||
extract,
|
||||
);
|
||||
|
||||
return async (ctx: ThreadContext): Promise<RoleResult<PublishMeta>> => {
|
||||
const file = logPath(nerveRoot);
|
||||
mkdirSync(join(file, ".."), { recursive: true });
|
||||
|
||||
if (isDryRun(ctx.start)) {
|
||||
const msg = "[dry-run] publish skipped (no git push / PR)";
|
||||
writeFileSync(file, `${msg}\n`, "utf-8");
|
||||
return {
|
||||
content: `[dry-run] publish skipped — log: ${file}`,
|
||||
meta: { success: true },
|
||||
};
|
||||
}
|
||||
|
||||
const innerCtx: ThreadContext = {
|
||||
...ctx,
|
||||
start: {
|
||||
...ctx.start,
|
||||
meta: { ...ctx.start.meta, workdir: nerveRoot },
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
return await innerRole(innerCtx);
|
||||
} catch (e) {
|
||||
const msg = e instanceof Error ? e.message : String(e);
|
||||
const body = `publish failed: ${msg}\n`;
|
||||
writeFileSync(file, body, "utf-8");
|
||||
return {
|
||||
content: `publish failed: ${msg}\nLog: ${file}`,
|
||||
meta: { success: false },
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
@ -1,53 +0,0 @@
|
||||
import type { AgentFn, Role, ThreadContext } from "@uncaged/nerve-core";
|
||||
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||
import { z } from "zod";
|
||||
|
||||
function readIssuePrompt({ threadId }: { threadId: string }): string {
|
||||
return `You are the **read-issue** agent. You fetch Gitea issue content via the \`tea\` CLI.
|
||||
|
||||
Read the workflow thread start prompt for the issue URL (same run): \`nerve thread show ${threadId}\`
|
||||
|
||||
## Steps
|
||||
|
||||
1. From the **initial user prompt** (issue URL), extract **host**, **owner**, **repo**, and **issue number**. Supported shape:
|
||||
\`https://<host>/<owner>/<repo>/issues/<number>\`
|
||||
|
||||
2. Run:
|
||||
\`tea issue show <number> --repo <owner>/<repo> --comments\`
|
||||
(Add \`--json\` if helpful for parsing.)
|
||||
|
||||
3. In your reply, include **structured issue text**: title, body, labels, and each comment (author + body + time).
|
||||
|
||||
4. You **must** emit this marker block **exactly** (fill in real values):
|
||||
\`\`\`
|
||||
---SOLVE_ISSUE_PARSE---
|
||||
host: <host>
|
||||
owner: <owner>
|
||||
repo: <repo>
|
||||
number: <number>
|
||||
---
|
||||
\`\`\`
|
||||
|
||||
5. End with JSON meta (verbatim block):
|
||||
\`\`\`json
|
||||
{ "ready": true }
|
||||
\`\`\`
|
||||
Use \`{ "ready": false }\` if you could not fetch or parse the issue.
|
||||
|
||||
**ready=true** only if the issue was fetched successfully and the marker block is correct.`;
|
||||
}
|
||||
|
||||
export const readIssueMetaSchema = z.object({
|
||||
ready: z.boolean().describe("true if issue content was fetched and markers are present"),
|
||||
});
|
||||
export type ReadIssueMeta = z.infer<typeof readIssueMetaSchema>;
|
||||
|
||||
export function createReadIssueRole(adapter: AgentFn, extract: LlmExtractorConfig): Role<ReadIssueMeta> {
|
||||
return createRole(
|
||||
adapter,
|
||||
async (ctx: ThreadContext) => readIssuePrompt({ threadId: ctx.start.meta.threadId }),
|
||||
readIssueMetaSchema,
|
||||
extract,
|
||||
);
|
||||
}
|
||||
@ -1,59 +0,0 @@
|
||||
import type { AgentFn, Role, ThreadContext } from "@uncaged/nerve-core";
|
||||
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||
import { z } from "zod";
|
||||
|
||||
function reviewPrompt({ threadId, nerveRoot }: { threadId: string; nerveRoot: string }): string {
|
||||
return `You are a **code reviewer** (Hermes). You run after implement and before test.
|
||||
|
||||
Read Nerve workspace conventions: \`cat ${nerveRoot}/CONVENTIONS.md\`
|
||||
|
||||
Read workflow context: \`nerve thread show ${threadId}\`
|
||||
|
||||
Find **repo path** from \`---SOLVE_ISSUE_REPO--- path:\` in the thread (prepare step). \`cd\` there before any git commands.
|
||||
|
||||
## Static analysis
|
||||
|
||||
Run:
|
||||
|
||||
1. \`cd <repo-path> && git diff --stat\`
|
||||
2. \`cd <repo-path> && git diff\`
|
||||
3. \`cd <repo-path> && git status --short\`
|
||||
|
||||
## Checklist
|
||||
|
||||
Reject (**approved: false**) if you find:
|
||||
|
||||
- Garbage files, secrets/credentials, unrelated changes
|
||||
- Violations of CONVENTIONS.md (e.g. \`interface\` vs \`type\`, dynamic \`import()\`)
|
||||
|
||||
Approve (**approved: true**) if the diff is clean and focused.
|
||||
|
||||
End with:
|
||||
\`\`\`json
|
||||
{ "approved": true }
|
||||
\`\`\`
|
||||
or
|
||||
\`\`\`json
|
||||
{ "approved": false }
|
||||
\`\`\``;
|
||||
}
|
||||
|
||||
export const reviewMetaSchema = z.object({
|
||||
approved: z.boolean().describe("true if diff is clean and ready for tests"),
|
||||
});
|
||||
export type ReviewMeta = z.infer<typeof reviewMetaSchema>;
|
||||
|
||||
export function createReviewRole(
|
||||
adapter: AgentFn,
|
||||
extract: LlmExtractorConfig,
|
||||
nerveRoot: string,
|
||||
): Role<ReviewMeta> {
|
||||
return createRole(
|
||||
adapter,
|
||||
async (ctx: ThreadContext) =>
|
||||
reviewPrompt({ threadId: ctx.start.meta.threadId, nerveRoot }),
|
||||
reviewMetaSchema,
|
||||
extract,
|
||||
);
|
||||
}
|
||||
@ -1,40 +0,0 @@
|
||||
import type { AgentFn, Role, ThreadContext } from "@uncaged/nerve-core";
|
||||
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||
import { z } from "zod";
|
||||
|
||||
function testPrompt({ threadId }: { threadId: string }): string {
|
||||
return `You are the **test** agent (Hermes). You execute automated tests for the change.
|
||||
|
||||
Read workflow context: \`nerve thread show ${threadId}\`
|
||||
|
||||
Find **repo path** from \`---SOLVE_ISSUE_REPO--- path:\` in the thread.
|
||||
|
||||
From the **plan** step output, locate **Test commands** (explicit shell commands). Run each command with cwd = repo path, in order.
|
||||
|
||||
If the plan lists **no** test commands, try **pnpm test**, then **npm test** if pnpm is unavailable; if neither applies, explain skip.
|
||||
|
||||
Collect stdout/stderr snippets on failure.
|
||||
|
||||
End with JSON only:
|
||||
\`\`\`json
|
||||
{ "passed": true }
|
||||
\`\`\`
|
||||
or \`{ "passed": false }\`
|
||||
|
||||
**passed=true** only if every executed command exited 0 (or skip was justified with no failing command).`;
|
||||
}
|
||||
|
||||
export const testMetaSchema = z.object({
|
||||
passed: z.boolean().describe("true if all test commands passed"),
|
||||
});
|
||||
export type TestMeta = z.infer<typeof testMetaSchema>;
|
||||
|
||||
export function createTestRole(adapter: AgentFn, extract: LlmExtractorConfig): Role<TestMeta> {
|
||||
return createRole(
|
||||
adapter,
|
||||
async (ctx: ThreadContext) => testPrompt({ threadId: ctx.start.meta.threadId }),
|
||||
testMetaSchema,
|
||||
extract,
|
||||
);
|
||||
}
|
||||
807
workflows/workflow-generator/index.ts
Normal file
807
workflows/workflow-generator/index.ts
Normal file
@ -0,0 +1,807 @@
|
||||
import { existsSync, readFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import type { RoleResult, StartStep, WorkflowDefinition, WorkflowMessage } from "@uncaged/nerve-core";
|
||||
import { END } from "@uncaged/nerve-core";
|
||||
import type { SpawnError } from "@uncaged/nerve-workflow-utils";
|
||||
import {
|
||||
cursorAgent,
|
||||
isDryRun,
|
||||
llmExtract,
|
||||
nerveAgentContext,
|
||||
readNerveYaml,
|
||||
spawnSafe,
|
||||
} from "@uncaged/nerve-workflow-utils";
|
||||
import { z } from "zod";
|
||||
|
||||
const HOME = process.env.HOME ?? "/home/azureuser";
|
||||
const NERVE_ROOT = join(HOME, ".uncaged-nerve");
|
||||
const WORKFLOWS_DIR = join(NERVE_ROOT, "workflows");
|
||||
|
||||
type PlannerRole = {
|
||||
name: string;
|
||||
goal: string;
|
||||
io: string;
|
||||
};
|
||||
|
||||
type WorkflowMeta = {
|
||||
planner: {
|
||||
userPrompt: string;
|
||||
workflowName: string;
|
||||
roles: PlannerRole[];
|
||||
flowTransitions: string;
|
||||
validationLoopsDesign: string;
|
||||
externalDeps: string;
|
||||
dataFlow: string;
|
||||
planMarkdown: string;
|
||||
};
|
||||
coder: {
|
||||
workflowName: string;
|
||||
attempt: number;
|
||||
files: { indexTs: boolean; packageJson: boolean; tsconfigJson: boolean };
|
||||
lintPassed: boolean;
|
||||
buildPassed: boolean;
|
||||
lintLog: string;
|
||||
buildLog: string;
|
||||
cursorOutput: string;
|
||||
reason: string | null;
|
||||
};
|
||||
tester: {
|
||||
workflowName: string;
|
||||
attempt: number;
|
||||
passed: boolean;
|
||||
dryRunLog: string;
|
||||
reason: string;
|
||||
};
|
||||
committer: {
|
||||
invoked: boolean;
|
||||
success: boolean;
|
||||
branch: string | null;
|
||||
commitHash: string | null;
|
||||
pushed: boolean | null;
|
||||
log: string;
|
||||
error: string | null;
|
||||
};
|
||||
};
|
||||
|
||||
const roleSchema = z
|
||||
.object({
|
||||
name: z.string().default(""),
|
||||
goal: z.string().default(""),
|
||||
io: z.string().default(""),
|
||||
})
|
||||
.default({ name: "", goal: "", io: "" });
|
||||
|
||||
const plannerExtractSchema = z.object({
|
||||
workflowName: z
|
||||
.string()
|
||||
.default("")
|
||||
.describe("kebab-case workflow name under workflows/, e.g. issue-fixer"),
|
||||
roles: z.array(roleSchema).default([]),
|
||||
flowTransitions: z.preprocess((v) => (Array.isArray(v) ? v.join("\n") : v), z.string().default("")),
|
||||
validationLoopsDesign: z.preprocess((v) => (Array.isArray(v) ? v.join("\n") : v), z.string().default("")),
|
||||
externalDeps: z.preprocess((v) => (Array.isArray(v) ? v.join(", ") : v), z.string().default("")),
|
||||
dataFlow: z.preprocess((v) => (Array.isArray(v) ? v.join("\n") : v), z.string().default("")),
|
||||
planMarkdown: z.preprocess((v) => (Array.isArray(v) ? v.join("\n") : v), z.string().default("")),
|
||||
});
|
||||
|
||||
function getNerveYaml(): string {
|
||||
const result = readNerveYaml({ nerveRoot: NERVE_ROOT });
|
||||
return result.ok ? result.value : "# nerve.yaml unavailable";
|
||||
}
|
||||
|
||||
function buildSenseGeneratorReference(): string {
|
||||
const p = join(WORKFLOWS_DIR, "sense-generator", "index.ts");
|
||||
if (!existsSync(p)) {
|
||||
return "(missing workflows/sense-generator/index.ts)";
|
||||
}
|
||||
return readFileSync(p, "utf-8");
|
||||
}
|
||||
|
||||
function formatSpawnFailure(error: SpawnError): string {
|
||||
if (error.kind === "spawn_failed") {
|
||||
return error.message;
|
||||
}
|
||||
if (error.kind === "timeout") {
|
||||
return `timeout stdout=${error.stdout.slice(0, 300)} stderr=${error.stderr.slice(0, 300)}`;
|
||||
}
|
||||
return `exit ${error.exitCode} stderr=${error.stderr.slice(0, 500)}`;
|
||||
}
|
||||
|
||||
async function cfgGet(key: string): Promise<string | null> {
|
||||
const result = await spawnSafe("cfg", ["get", key], {
|
||||
cwd: NERVE_ROOT,
|
||||
env: null,
|
||||
timeoutMs: 10_000,
|
||||
});
|
||||
if (!result.ok) {
|
||||
return null;
|
||||
}
|
||||
const v = result.value.stdout.trim();
|
||||
return v.length > 0 ? v : null;
|
||||
}
|
||||
|
||||
async function resolveDashScopeProvider(): Promise<{
|
||||
baseUrl: string;
|
||||
apiKey: string;
|
||||
model: string;
|
||||
} | null> {
|
||||
const apiKey = process.env.DASHSCOPE_API_KEY ?? (await cfgGet("DASHSCOPE_API_KEY"));
|
||||
const baseUrl = process.env.DASHSCOPE_BASE_URL ?? (await cfgGet("DASHSCOPE_BASE_URL"));
|
||||
const model = process.env.DASHSCOPE_MODEL ?? (await cfgGet("DASHSCOPE_MODEL")) ?? "qwen-plus";
|
||||
if (!apiKey || !baseUrl) {
|
||||
return null;
|
||||
}
|
||||
return { apiKey, baseUrl, model };
|
||||
}
|
||||
|
||||
function lastMetaForRole<M>(messages: WorkflowMessage[], role: string): M | null {
|
||||
for (let i = messages.length - 1; i >= 0; i--) {
|
||||
if (messages[i].role === role) {
|
||||
return messages[i].meta as M;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function scanGeneratedCodePitfalls(source: string): string[] {
|
||||
const issues: string[] = [];
|
||||
if (/\bawait\s+import\s*\(/.test(source)) {
|
||||
issues.push("Found await import() in generated workflow code");
|
||||
}
|
||||
if (/\bimport\s*\(\s*["'`]/.test(source) && !source.includes("Dynamic import required")) {
|
||||
issues.push("Found undocumented dynamic import() call");
|
||||
}
|
||||
if (!/\bexport\s+default\s+/.test(source)) {
|
||||
issues.push("Missing default export of WorkflowDefinition");
|
||||
}
|
||||
return issues;
|
||||
}
|
||||
|
||||
function inferWorkflowName(messages: WorkflowMessage[]): string {
|
||||
const tester = lastMetaForRole<WorkflowMeta["tester"]>(messages, "tester");
|
||||
if (tester !== null && tester.workflowName.trim().length > 0) {
|
||||
return tester.workflowName.trim();
|
||||
}
|
||||
const coder = lastMetaForRole<WorkflowMeta["coder"]>(messages, "coder");
|
||||
if (coder !== null && coder.workflowName.trim().length > 0) {
|
||||
return coder.workflowName.trim();
|
||||
}
|
||||
const planner = lastMetaForRole<WorkflowMeta["planner"]>(messages, "planner");
|
||||
if (planner !== null && planner.workflowName.trim().length > 0) {
|
||||
return planner.workflowName.trim();
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
async function runLintAndBuild(
|
||||
workflowDir: string,
|
||||
dry: boolean,
|
||||
): Promise<{
|
||||
lintPassed: boolean;
|
||||
buildPassed: boolean;
|
||||
lintLog: string;
|
||||
buildLog: string;
|
||||
reason: string | null;
|
||||
}> {
|
||||
const lintRun = await spawnSafe("pnpm", ["run", "check"], {
|
||||
cwd: workflowDir,
|
||||
env: null,
|
||||
timeoutMs: 300_000,
|
||||
dryRun: dry,
|
||||
});
|
||||
if (!lintRun.ok) {
|
||||
return {
|
||||
lintPassed: false,
|
||||
buildPassed: false,
|
||||
lintLog: formatSpawnFailure(lintRun.error),
|
||||
buildLog: "",
|
||||
reason: `lint failed: ${formatSpawnFailure(lintRun.error)}`,
|
||||
};
|
||||
}
|
||||
|
||||
const lintLog = lintRun.value.stderr.trim() || lintRun.value.stdout.trim() || "(no output)";
|
||||
const tscRun = await spawnSafe("npx", ["tsc", "--noEmit"], {
|
||||
cwd: workflowDir,
|
||||
env: null,
|
||||
timeoutMs: 300_000,
|
||||
dryRun: dry,
|
||||
});
|
||||
if (!tscRun.ok) {
|
||||
return {
|
||||
lintPassed: true,
|
||||
buildPassed: false,
|
||||
lintLog,
|
||||
buildLog: formatSpawnFailure(tscRun.error),
|
||||
reason: `build failed: ${formatSpawnFailure(tscRun.error)}`,
|
||||
};
|
||||
}
|
||||
const buildLog = tscRun.value.stderr.trim() || tscRun.value.stdout.trim() || "(no output)";
|
||||
return { lintPassed: true, buildPassed: true, lintLog, buildLog, reason: null };
|
||||
}
|
||||
|
||||
async function runTesterDryRun(
|
||||
workflowName: string,
|
||||
planner: WorkflowMeta["planner"],
|
||||
coder: WorkflowMeta["coder"],
|
||||
dry: boolean,
|
||||
): Promise<{ passed: boolean; reason: string; log: string }> {
|
||||
if (dry) {
|
||||
return {
|
||||
passed: true,
|
||||
reason: "dry-run mode",
|
||||
log: "[dry-run] tester skipped external checks",
|
||||
};
|
||||
}
|
||||
const prompt = `You are testing a generated Nerve workflow by doing a dry-run review.
|
||||
|
||||
Workflow: ${workflowName}
|
||||
|
||||
Planner specification:
|
||||
${JSON.stringify(
|
||||
{
|
||||
roles: planner.roles,
|
||||
flowTransitions: planner.flowTransitions,
|
||||
validationLoopsDesign: planner.validationLoopsDesign,
|
||||
externalDeps: planner.externalDeps,
|
||||
dataFlow: planner.dataFlow,
|
||||
},
|
||||
null,
|
||||
2,
|
||||
)}
|
||||
|
||||
Coder output summary:
|
||||
${coder.cursorOutput.slice(0, 6000)}
|
||||
|
||||
Required checks:
|
||||
1) Verify role transitions are coherent and terminates to END.
|
||||
2) Verify generated workflow adheres to planner intent.
|
||||
3) Verify retry loops are explicit for recoverable failures.
|
||||
4) Verify no obvious runtime-breaking issue in generated index.ts.
|
||||
|
||||
Return exactly:
|
||||
PASS|<reason>|<compact markdown log>
|
||||
or
|
||||
FAIL|<reason>|<compact markdown log>`;
|
||||
|
||||
const run = await cursorAgent({
|
||||
prompt,
|
||||
mode: "ask",
|
||||
cwd: NERVE_ROOT,
|
||||
env: null,
|
||||
timeoutMs: null,
|
||||
dryRun: false,
|
||||
});
|
||||
if (!run.ok) {
|
||||
return {
|
||||
passed: false,
|
||||
reason: `tester agent failed: ${formatSpawnFailure(run.error)}`,
|
||||
log: "",
|
||||
};
|
||||
}
|
||||
const text = run.value.trim();
|
||||
const pass = text.startsWith("PASS|");
|
||||
const fail = text.startsWith("FAIL|");
|
||||
if (!pass && !fail) {
|
||||
return { passed: false, reason: "tester format invalid", log: text };
|
||||
}
|
||||
const parts = text.split("|");
|
||||
const reason = parts[1] ?? "no reason";
|
||||
const log = parts.slice(2).join("|").trim();
|
||||
return { passed: pass, reason, log };
|
||||
}
|
||||
|
||||
async function runHermesCommitter(
|
||||
workflowName: string,
|
||||
userPrompt: string,
|
||||
testerReason: string,
|
||||
dry: boolean,
|
||||
): Promise<{
|
||||
invoked: boolean;
|
||||
success: boolean;
|
||||
branch: string | null;
|
||||
commitHash: string | null;
|
||||
pushed: boolean | null;
|
||||
log: string;
|
||||
error: string | null;
|
||||
}> {
|
||||
const task = `You are a git committer subagent for Nerve workflow generation.
|
||||
Repository root: ${NERVE_ROOT}
|
||||
|
||||
Goal:
|
||||
- Commit and push generated workflow "${workflowName}".
|
||||
- Handle dirty worktree safely (do not discard unrelated user edits).
|
||||
- Detect default branch automatically.
|
||||
- Create a focused branch for this workflow update.
|
||||
- Stage only workflow files and required config updates.
|
||||
|
||||
Context:
|
||||
- User prompt summary: ${userPrompt.slice(0, 500)}
|
||||
- Tester result: ${testerReason}
|
||||
|
||||
Expected output format:
|
||||
BRANCH=<branch-or-empty>
|
||||
COMMIT=<hash-or-empty>
|
||||
PUSHED=<true|false|unknown>
|
||||
LOG_START
|
||||
<details>
|
||||
LOG_END`;
|
||||
|
||||
if (dry) {
|
||||
return {
|
||||
invoked: true,
|
||||
success: true,
|
||||
branch: "wf/dry-run",
|
||||
commitHash: null,
|
||||
pushed: null,
|
||||
log: "[dry-run] skipped hermes committer",
|
||||
error: null,
|
||||
};
|
||||
}
|
||||
|
||||
const commandAttempts: Array<{ cmd: string; args: string[] }> = [
|
||||
{ cmd: "hermes-agent", args: ["--cwd", NERVE_ROOT, "--task", task] },
|
||||
{ cmd: "hermes", args: ["agent", "--cwd", NERVE_ROOT, "--task", task] },
|
||||
];
|
||||
|
||||
for (const candidate of commandAttempts) {
|
||||
const run = await spawnSafe(candidate.cmd, candidate.args, {
|
||||
cwd: NERVE_ROOT,
|
||||
env: null,
|
||||
timeoutMs: 600_000,
|
||||
dryRun: false,
|
||||
});
|
||||
if (!run.ok) {
|
||||
continue;
|
||||
}
|
||||
const text = `${run.value.stdout}\n${run.value.stderr}`;
|
||||
const branch = text.match(/^BRANCH=(.*)$/m)?.[1]?.trim() ?? null;
|
||||
const commitHash = text.match(/^COMMIT=(.*)$/m)?.[1]?.trim() ?? null;
|
||||
const pushedText = text.match(/^PUSHED=(.*)$/m)?.[1]?.trim().toLowerCase() ?? "unknown";
|
||||
const pushed = pushedText === "true" ? true : pushedText === "false" ? false : null;
|
||||
return {
|
||||
invoked: true,
|
||||
success: true,
|
||||
branch: branch && branch.length > 0 ? branch : null,
|
||||
commitHash: commitHash && commitHash.length > 0 ? commitHash : null,
|
||||
pushed,
|
||||
log: text.slice(0, 20_000),
|
||||
error: null,
|
||||
};
|
||||
}
|
||||
|
||||
const fallback = await cursorAgent({
|
||||
prompt: `Run this git committer task in repository ${NERVE_ROOT}:\n\n${task}`,
|
||||
mode: "default",
|
||||
cwd: NERVE_ROOT,
|
||||
env: null,
|
||||
timeoutMs: null,
|
||||
dryRun: false,
|
||||
});
|
||||
if (!fallback.ok) {
|
||||
return {
|
||||
invoked: true,
|
||||
success: false,
|
||||
branch: null,
|
||||
commitHash: null,
|
||||
pushed: null,
|
||||
log: "",
|
||||
error: `hermes and fallback both failed: ${formatSpawnFailure(fallback.error)}`,
|
||||
};
|
||||
}
|
||||
|
||||
const out = fallback.value;
|
||||
const branch = out.match(/(?:branch|BRANCH)\s*[:=]\s*([^\s]+)/)?.[1] ?? null;
|
||||
const commitHash = out.match(/[a-f0-9]{7,40}/)?.[0] ?? null;
|
||||
return {
|
||||
invoked: true,
|
||||
success: true,
|
||||
branch,
|
||||
commitHash,
|
||||
pushed: out.toLowerCase().includes("push") ? true : null,
|
||||
log: out.slice(0, 20_000),
|
||||
error: null,
|
||||
};
|
||||
}
|
||||
|
||||
const workflow: WorkflowDefinition<WorkflowMeta> = {
|
||||
name: "workflow-generator",
|
||||
|
||||
roles: {
|
||||
async planner(
|
||||
start: StartStep,
|
||||
_messages: WorkflowMessage[],
|
||||
): Promise<RoleResult<WorkflowMeta["planner"]>> {
|
||||
const dry = isDryRun(start);
|
||||
const provider = await resolveDashScopeProvider();
|
||||
const userPrompt = start.content;
|
||||
|
||||
if (provider === null) {
|
||||
return {
|
||||
content: "Cannot run planner: missing DASHSCOPE_API_KEY or DASHSCOPE_BASE_URL.",
|
||||
meta: {
|
||||
userPrompt,
|
||||
workflowName: "",
|
||||
roles: [],
|
||||
flowTransitions: "",
|
||||
validationLoopsDesign: "",
|
||||
externalDeps: "",
|
||||
dataFlow: "",
|
||||
planMarkdown: "",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const planningText = `Design a Nerve workflow plan from this request.
|
||||
|
||||
${nerveAgentContext}
|
||||
|
||||
User request:
|
||||
${userPrompt}
|
||||
|
||||
Target root: ${NERVE_ROOT}
|
||||
Workflow dir root: ${WORKFLOWS_DIR}
|
||||
|
||||
Reference structure:
|
||||
\`\`\`ts
|
||||
${buildSenseGeneratorReference().slice(0, 18_000)}
|
||||
\`\`\`
|
||||
|
||||
Current nerve.yaml:
|
||||
\`\`\`yaml
|
||||
${getNerveYaml()}
|
||||
\`\`\`
|
||||
|
||||
Produce a complete markdown plan that includes:
|
||||
- workflow name
|
||||
- roles list
|
||||
- flow/transitions
|
||||
- validation loops design
|
||||
- external deps
|
||||
- data flow`;
|
||||
|
||||
const extracted = await llmExtract({
|
||||
text: planningText,
|
||||
schema: plannerExtractSchema,
|
||||
provider,
|
||||
dryRun: dry,
|
||||
});
|
||||
if (!extracted.ok) {
|
||||
return {
|
||||
content: `[planner] llmExtract failed: ${JSON.stringify(extracted.error)}`,
|
||||
meta: {
|
||||
userPrompt,
|
||||
workflowName: "",
|
||||
roles: [],
|
||||
flowTransitions: "",
|
||||
validationLoopsDesign: "",
|
||||
externalDeps: "",
|
||||
dataFlow: "",
|
||||
planMarkdown: "",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const value = extracted.value;
|
||||
const planMarkdown =
|
||||
value.planMarkdown.length > 0
|
||||
? value.planMarkdown
|
||||
: [
|
||||
`# Workflow Plan`,
|
||||
`- workflowName: ${value.workflowName}`,
|
||||
``,
|
||||
`## Roles`,
|
||||
...value.roles.map((r) => `- ${r.name}: ${r.goal} (${r.io})`),
|
||||
``,
|
||||
`## Flow Transitions`,
|
||||
value.flowTransitions,
|
||||
``,
|
||||
`## Validation Loops`,
|
||||
value.validationLoopsDesign,
|
||||
``,
|
||||
`## External Dependencies`,
|
||||
value.externalDeps,
|
||||
``,
|
||||
`## Data Flow`,
|
||||
value.dataFlow,
|
||||
].join("\n");
|
||||
|
||||
return {
|
||||
content: planMarkdown,
|
||||
meta: {
|
||||
userPrompt,
|
||||
workflowName: value.workflowName,
|
||||
roles: value.roles,
|
||||
flowTransitions: value.flowTransitions,
|
||||
validationLoopsDesign: value.validationLoopsDesign,
|
||||
externalDeps: value.externalDeps,
|
||||
dataFlow: value.dataFlow,
|
||||
planMarkdown,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async coder(start: StartStep, messages: WorkflowMessage[]): Promise<RoleResult<WorkflowMeta["coder"]>> {
|
||||
const dry = isDryRun(start);
|
||||
const plannerMeta = lastMetaForRole<WorkflowMeta["planner"]>(messages, "planner");
|
||||
const previousTester = lastMetaForRole<WorkflowMeta["tester"]>(messages, "tester");
|
||||
const attempt = messages.filter((m) => m.role === "coder").length + 1;
|
||||
|
||||
if (plannerMeta === null || plannerMeta.workflowName.trim().length === 0) {
|
||||
return {
|
||||
content: "coder cannot continue: missing planner output",
|
||||
meta: {
|
||||
workflowName: "",
|
||||
attempt,
|
||||
files: { indexTs: false, packageJson: false, tsconfigJson: false },
|
||||
lintPassed: false,
|
||||
buildPassed: false,
|
||||
lintLog: "",
|
||||
buildLog: "",
|
||||
cursorOutput: "",
|
||||
reason: "missing planner output",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const wfName = plannerMeta.workflowName.trim();
|
||||
const feedback =
|
||||
previousTester !== null && previousTester.passed === false
|
||||
? `\n\nPrevious tester failure to fix:\n${previousTester.reason}\n${previousTester.dryRunLog}\n`
|
||||
: "";
|
||||
|
||||
const codingPrompt = `Implement a Nerve workflow package under ${WORKFLOWS_DIR}/${wfName}/.
|
||||
|
||||
Planner output:
|
||||
${plannerMeta.planMarkdown}
|
||||
|
||||
Structured planner fields:
|
||||
${JSON.stringify(
|
||||
{
|
||||
workflowName: plannerMeta.workflowName,
|
||||
roles: plannerMeta.roles,
|
||||
flowTransitions: plannerMeta.flowTransitions,
|
||||
validationLoopsDesign: plannerMeta.validationLoopsDesign,
|
||||
externalDeps: plannerMeta.externalDeps,
|
||||
dataFlow: plannerMeta.dataFlow,
|
||||
},
|
||||
null,
|
||||
2,
|
||||
)}
|
||||
${feedback}
|
||||
|
||||
Required files:
|
||||
1) ${WORKFLOWS_DIR}/${wfName}/index.ts
|
||||
2) ${WORKFLOWS_DIR}/${wfName}/package.json
|
||||
3) ${WORKFLOWS_DIR}/${wfName}/tsconfig.json
|
||||
4) update ${NERVE_ROOT}/nerve.yaml with workflows.${wfName}
|
||||
|
||||
Rules:
|
||||
- keep WorkflowDefinition<WorkflowMeta> pattern
|
||||
- no dynamic import()
|
||||
- use types (not interfaces)
|
||||
- include retry-aware moderator routing
|
||||
- write compile-ready TypeScript`;
|
||||
|
||||
const agentRun = await cursorAgent({
|
||||
prompt: codingPrompt,
|
||||
mode: "default",
|
||||
cwd: NERVE_ROOT,
|
||||
env: null,
|
||||
timeoutMs: null,
|
||||
dryRun: dry,
|
||||
});
|
||||
|
||||
const workflowDir = join(WORKFLOWS_DIR, wfName);
|
||||
const files = {
|
||||
indexTs: existsSync(join(workflowDir, "index.ts")),
|
||||
packageJson: existsSync(join(workflowDir, "package.json")),
|
||||
tsconfigJson: existsSync(join(workflowDir, "tsconfig.json")),
|
||||
};
|
||||
const missing = [
|
||||
files.indexTs ? null : "index.ts",
|
||||
files.packageJson ? null : "package.json",
|
||||
files.tsconfigJson ? null : "tsconfig.json",
|
||||
].filter((x) => x !== null) as string[];
|
||||
|
||||
if (!agentRun.ok) {
|
||||
return {
|
||||
content: `coder failed: ${formatSpawnFailure(agentRun.error)}`,
|
||||
meta: {
|
||||
workflowName: wfName,
|
||||
attempt,
|
||||
files,
|
||||
lintPassed: false,
|
||||
buildPassed: false,
|
||||
lintLog: "",
|
||||
buildLog: "",
|
||||
cursorOutput: "",
|
||||
reason: formatSpawnFailure(agentRun.error),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (missing.length > 0) {
|
||||
return {
|
||||
content: `coder failed: missing required files (${missing.join(", ")})`,
|
||||
meta: {
|
||||
workflowName: wfName,
|
||||
attempt,
|
||||
files,
|
||||
lintPassed: false,
|
||||
buildPassed: false,
|
||||
lintLog: "",
|
||||
buildLog: "",
|
||||
cursorOutput: agentRun.value,
|
||||
reason: `missing files: ${missing.join(", ")}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const source = readFileSync(join(workflowDir, "index.ts"), "utf-8");
|
||||
const pitfalls = scanGeneratedCodePitfalls(source);
|
||||
if (pitfalls.length > 0) {
|
||||
return {
|
||||
content: `coder static check failed:\n${pitfalls.join("\n")}`,
|
||||
meta: {
|
||||
workflowName: wfName,
|
||||
attempt,
|
||||
files,
|
||||
lintPassed: false,
|
||||
buildPassed: false,
|
||||
lintLog: pitfalls.join("\n"),
|
||||
buildLog: "",
|
||||
cursorOutput: agentRun.value,
|
||||
reason: pitfalls.join("; "),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const check = await runLintAndBuild(workflowDir, dry);
|
||||
const passed = check.lintPassed && check.buildPassed;
|
||||
return {
|
||||
content: passed
|
||||
? `coder PASS: lint+build ok\n\n${check.lintLog}\n\n${check.buildLog}`
|
||||
: `coder FAIL: ${check.reason ?? "unknown error"}`,
|
||||
meta: {
|
||||
workflowName: wfName,
|
||||
attempt,
|
||||
files,
|
||||
lintPassed: check.lintPassed,
|
||||
buildPassed: check.buildPassed,
|
||||
lintLog: check.lintLog,
|
||||
buildLog: check.buildLog,
|
||||
cursorOutput: agentRun.value,
|
||||
reason: check.reason,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async tester(start: StartStep, messages: WorkflowMessage[]): Promise<RoleResult<WorkflowMeta["tester"]>> {
|
||||
const dry = isDryRun(start);
|
||||
const plannerMeta = lastMetaForRole<WorkflowMeta["planner"]>(messages, "planner");
|
||||
const coderMeta = lastMetaForRole<WorkflowMeta["coder"]>(messages, "coder");
|
||||
const attempt = messages.filter((m) => m.role === "tester").length + 1;
|
||||
|
||||
if (plannerMeta === null || coderMeta === null) {
|
||||
return {
|
||||
content: "tester cannot continue: missing planner/coder output",
|
||||
meta: {
|
||||
workflowName: "",
|
||||
attempt,
|
||||
passed: false,
|
||||
dryRunLog: "",
|
||||
reason: "missing planner/coder output",
|
||||
},
|
||||
};
|
||||
}
|
||||
if (!coderMeta.lintPassed || !coderMeta.buildPassed) {
|
||||
return {
|
||||
content: "tester blocked: coder has not passed lint+build",
|
||||
meta: {
|
||||
workflowName: coderMeta.workflowName,
|
||||
attempt,
|
||||
passed: false,
|
||||
dryRunLog: `${coderMeta.lintLog}\n\n${coderMeta.buildLog}`,
|
||||
reason: "coder did not pass lint+build",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const dryRun = await runTesterDryRun(coderMeta.workflowName, plannerMeta, coderMeta, dry);
|
||||
return {
|
||||
content: `${dryRun.passed ? "PASS" : "FAIL"} — ${dryRun.reason}`,
|
||||
meta: {
|
||||
workflowName: coderMeta.workflowName,
|
||||
attempt,
|
||||
passed: dryRun.passed,
|
||||
dryRunLog: dryRun.log,
|
||||
reason: dryRun.reason,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async committer(
|
||||
start: StartStep,
|
||||
messages: WorkflowMessage[],
|
||||
): Promise<RoleResult<WorkflowMeta["committer"]>> {
|
||||
const dry = isDryRun(start);
|
||||
const planner = lastMetaForRole<WorkflowMeta["planner"]>(messages, "planner");
|
||||
const tester = lastMetaForRole<WorkflowMeta["tester"]>(messages, "tester");
|
||||
const workflowName = inferWorkflowName(messages);
|
||||
|
||||
if (planner === null || tester === null || workflowName.length === 0) {
|
||||
return {
|
||||
content: "committer skipped: missing planner/tester/workflowName context",
|
||||
meta: {
|
||||
invoked: false,
|
||||
success: false,
|
||||
branch: null,
|
||||
commitHash: null,
|
||||
pushed: null,
|
||||
log: "",
|
||||
error: "missing committer context",
|
||||
},
|
||||
};
|
||||
}
|
||||
if (!tester.passed) {
|
||||
return {
|
||||
content: "committer skipped: tester not passed",
|
||||
meta: {
|
||||
invoked: false,
|
||||
success: false,
|
||||
branch: null,
|
||||
commitHash: null,
|
||||
pushed: null,
|
||||
log: "",
|
||||
error: "tester not passed",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const committed = await runHermesCommitter(
|
||||
workflowName,
|
||||
planner.userPrompt,
|
||||
tester.reason,
|
||||
dry,
|
||||
);
|
||||
return {
|
||||
content: committed.success ? committed.log : `committer failed: ${committed.error ?? "unknown"}`,
|
||||
meta: committed,
|
||||
};
|
||||
},
|
||||
},
|
||||
|
||||
moderator(context) {
|
||||
if (context.steps.length === 0) {
|
||||
return "planner";
|
||||
}
|
||||
const last = context.steps[context.steps.length - 1];
|
||||
|
||||
if (last.role === "planner") {
|
||||
if (last.meta.workflowName.trim().length > 0) return "coder";
|
||||
const plannerAttempts = context.steps.filter((s) => s.role === "planner").length;
|
||||
return plannerAttempts < 3 ? "planner" : END;
|
||||
}
|
||||
if (last.role === "coder") {
|
||||
if (last.meta.lintPassed && last.meta.buildPassed) {
|
||||
return "tester";
|
||||
}
|
||||
if (last.meta.attempt < 3) {
|
||||
return "coder";
|
||||
}
|
||||
return END;
|
||||
}
|
||||
if (last.role === "tester") {
|
||||
if (last.meta.passed) {
|
||||
return "committer";
|
||||
}
|
||||
if (last.meta.attempt < 3) {
|
||||
return "coder";
|
||||
}
|
||||
return END;
|
||||
}
|
||||
return END;
|
||||
},
|
||||
};
|
||||
|
||||
export default workflow;
|
||||
22
workflows/workflow-generator/package.json
Normal file
22
workflows/workflow-generator/package.json
Normal file
@ -0,0 +1,22 @@
|
||||
{
|
||||
"name": "workflow-generator-workflow",
|
||||
"version": "0.0.1",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"dependencies": {
|
||||
"@uncaged/nerve-core": "latest",
|
||||
"@uncaged/nerve-workflow-utils": "latest",
|
||||
"zod": "^4.3.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.0.0",
|
||||
"typescript": "^5.7.0"
|
||||
},
|
||||
"pnpm": {
|
||||
"overrides": {
|
||||
"@uncaged/nerve-daemon": "link:../../../repos/nerve/packages/daemon",
|
||||
"@uncaged/nerve-core": "link:../../../repos/nerve/packages/core",
|
||||
"@uncaged/nerve-workflow-utils": "link:../../../repos/nerve/packages/workflow-utils"
|
||||
}
|
||||
}
|
||||
}
|
||||
59
workflows/workflow-generator/pnpm-lock.yaml
generated
Normal file
59
workflows/workflow-generator/pnpm-lock.yaml
generated
Normal file
@ -0,0 +1,59 @@
|
||||
lockfileVersion: '9.0'
|
||||
|
||||
settings:
|
||||
autoInstallPeers: true
|
||||
excludeLinksFromLockfile: false
|
||||
|
||||
overrides:
|
||||
'@uncaged/nerve-daemon': link:../../../repos/nerve/packages/daemon
|
||||
'@uncaged/nerve-core': link:../../../repos/nerve/packages/core
|
||||
'@uncaged/nerve-workflow-utils': link:../../../repos/nerve/packages/workflow-utils
|
||||
|
||||
importers:
|
||||
|
||||
.:
|
||||
dependencies:
|
||||
'@uncaged/nerve-core':
|
||||
specifier: link:../../../repos/nerve/packages/core
|
||||
version: link:../../../repos/nerve/packages/core
|
||||
'@uncaged/nerve-workflow-utils':
|
||||
specifier: link:../../../repos/nerve/packages/workflow-utils
|
||||
version: link:../../../repos/nerve/packages/workflow-utils
|
||||
zod:
|
||||
specifier: ^4.3.6
|
||||
version: 4.3.6
|
||||
devDependencies:
|
||||
'@types/node':
|
||||
specifier: ^22.0.0
|
||||
version: 22.19.17
|
||||
typescript:
|
||||
specifier: ^5.7.0
|
||||
version: 5.9.3
|
||||
|
||||
packages:
|
||||
|
||||
'@types/node@22.19.17':
|
||||
resolution: {integrity: sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==}
|
||||
|
||||
typescript@5.9.3:
|
||||
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
|
||||
engines: {node: '>=14.17'}
|
||||
hasBin: true
|
||||
|
||||
undici-types@6.21.0:
|
||||
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
|
||||
|
||||
zod@4.3.6:
|
||||
resolution: {integrity: sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==}
|
||||
|
||||
snapshots:
|
||||
|
||||
'@types/node@22.19.17':
|
||||
dependencies:
|
||||
undici-types: 6.21.0
|
||||
|
||||
typescript@5.9.3: {}
|
||||
|
||||
undici-types@6.21.0: {}
|
||||
|
||||
zod@4.3.6: {}
|
||||
13
workflows/workflow-generator/tsconfig.json
Normal file
13
workflows/workflow-generator/tsconfig.json
Normal file
@ -0,0 +1,13 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"lib": ["ES2022"],
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"noEmit": true,
|
||||
"types": ["node"]
|
||||
},
|
||||
"include": ["./**/*.ts"]
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user