Compare commits
70 Commits
feat/3-wor
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| a4625a4559 | |||
| c71212a0ce | |||
| 8186a23ceb | |||
| 29d47bd9c4 | |||
| 436ccf12b3 | |||
| 2f78c72e4e | |||
| dc1e96d8f3 | |||
| 7432f80d61 | |||
| 1da41c7f08 | |||
| 07be0d3dfa | |||
| 0fdd2d26cc | |||
| cf7e288874 | |||
| f7cf1a1cb2 | |||
| e4fd5d6ba4 | |||
| 1c512435de | |||
| 8774d71d57 | |||
| 1d9e574c94 | |||
| 252162ea8e | |||
| 60979aaa6a | |||
| b0cff7e0ed | |||
| b282dfdb7b | |||
| bed5ecb56b | |||
| 6a2dbb7335 | |||
| 174df68368 | |||
| 59b8f033ba | |||
| 0a9da468f7 | |||
| ac47daa42b | |||
| a609dc2486 | |||
| eaddd88109 | |||
| 1683e41b05 | |||
| a506e5b36b | |||
| 42f943c303 | |||
| 215a8f6566 | |||
| f6e29a5cae | |||
| f651389ad8 | |||
| 85fac3158d | |||
| 16bea3b8a7 | |||
| 03146b210a | |||
|
|
c585e0d8a8 | ||
| 3a2b8a49a3 | |||
| aef9943746 | |||
|
|
95df8bc3c2 | ||
|
|
70fd064bad | ||
| 56ce22fb1b | |||
| 66ce30cdfb | |||
| 28ac2e9dad | |||
| 86f02da306 | |||
| 7313111548 | |||
| 64a5fc5301 | |||
| d786827ac8 | |||
| d6e95f5c65 | |||
| 95587260f6 | |||
| 57c740cdde | |||
| 75f2768a8c | |||
| 3d9f239230 | |||
| bbcaf1eba5 | |||
| fbcc1ff30c | |||
| 76760c4d29 | |||
| 18e201b49c | |||
| daf07b5746 | |||
| bd89dcaff6 | |||
| 994de1e7ff | |||
| e8765abac6 | |||
| ef7d83ad0a | |||
| 495d8d1b60 | |||
| 0fab8a68c3 | |||
| 7fb161cf96 | |||
| 6778ba5246 | |||
| d638623456 | |||
| bf77e3452a |
2
.gitignore
vendored
2
.gitignore
vendored
@ -4,3 +4,5 @@ logs/
|
|||||||
nerve.pid
|
nerve.pid
|
||||||
nerve.sock
|
nerve.sock
|
||||||
false/
|
false/
|
||||||
|
*.db
|
||||||
|
dist/
|
||||||
|
|||||||
154
CONVENTIONS.md
Normal file
154
CONVENTIONS.md
Normal file
@ -0,0 +1,154 @@
|
|||||||
|
# Nerve Workspace Conventions
|
||||||
|
|
||||||
|
This document defines coding and workflow conventions for the nerve-workspace (`~/.uncaged-nerve`).
|
||||||
|
All roles (planner, coder, reviewer, tester) should reference this file.
|
||||||
|
|
||||||
|
## Language & Paradigm
|
||||||
|
|
||||||
|
### Functional-first
|
||||||
|
|
||||||
|
Use `function` + `type`, not `class` + `interface`.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ✅ Good
|
||||||
|
type Signal = { senseId: string; value: unknown; ts: number };
|
||||||
|
function createSignal(senseId: string, value: unknown): Signal { ... }
|
||||||
|
|
||||||
|
// ❌ Bad
|
||||||
|
class Signal implements ISignal { ... }
|
||||||
|
```
|
||||||
|
|
||||||
|
### Rules
|
||||||
|
|
||||||
|
| Rule | Description |
|
||||||
|
|------|-------------|
|
||||||
|
| `type` over `interface` | All type definitions use `type` |
|
||||||
|
| `function` over `class` | Pure functions + closures, no class |
|
||||||
|
| No `this` | Functions must not depend on `this` context |
|
||||||
|
| No inheritance | No `extends`, `implements`, `abstract` |
|
||||||
|
| Composition over inheritance | Use function composition |
|
||||||
|
| No optional properties | Use `T \| null` instead of `?:` |
|
||||||
|
| No dynamic `import()` | Always static top-level `import` |
|
||||||
|
| `async/await` only | Never `.then()` chains |
|
||||||
|
|
||||||
|
### Exceptions
|
||||||
|
|
||||||
|
Classes allowed when required by a library (e.g. Drizzle `sqliteTable`) or Error subclasses.
|
||||||
|
|
||||||
|
## Naming
|
||||||
|
|
||||||
|
| Type | Style | Example |
|
||||||
|
|------|-------|---------|
|
||||||
|
| Files | kebab-case | `signal-bus.ts` |
|
||||||
|
| Types | PascalCase | `SignalBus` |
|
||||||
|
| Functions/variables | camelCase | `createSignalBus` |
|
||||||
|
| Constants | UPPER_SNAKE | `MAX_RETRY_COUNT` |
|
||||||
|
|
||||||
|
## Error Handling
|
||||||
|
|
||||||
|
- Use `Result<T, E>` for expected failures
|
||||||
|
- `throw` only for unrecoverable bugs
|
||||||
|
- No try-catch for flow control
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
type Result<T, E = Error> = { ok: true; value: T } | { ok: false; error: E };
|
||||||
|
```
|
||||||
|
|
||||||
|
## Workflow Structure
|
||||||
|
|
||||||
|
Each workflow follows the multi-file pattern:
|
||||||
|
|
||||||
|
```
|
||||||
|
workflows/<name>/
|
||||||
|
index.ts — WorkflowDefinition default export (thin entry point)
|
||||||
|
build.ts — factory function with dependency injection
|
||||||
|
moderator.ts — moderator function + WorkflowMeta type
|
||||||
|
roles/
|
||||||
|
<role>/
|
||||||
|
index.ts — build function + meta schema
|
||||||
|
prompt.ts — prompt pure function (string template)
|
||||||
|
package.json — with esbuild build script
|
||||||
|
tsconfig.json
|
||||||
|
```
|
||||||
|
|
||||||
|
### Role Implementation Patterns
|
||||||
|
|
||||||
|
| Pattern | When to use | Example |
|
||||||
|
|---------|-------------|---------|
|
||||||
|
| `createCursorRole` | Needs file system access (code generation, planning) | planner, coder |
|
||||||
|
| `createHermesRole` | Needs shell + tools (testing, reviewing) | tester, reviewer |
|
||||||
|
| `createLlmRole` | Pure LLM reasoning, no tools | analysis roles |
|
||||||
|
| `createRole(hermesAdapter, …)` | Agent role with LLM + shell (branch/commit/push from thread context) | solve-issue committer, publish |
|
||||||
|
| Direct `Role<Meta>` | No LLM needed, scripted logic | thin wrappers only |
|
||||||
|
|
||||||
|
### Meta Convention
|
||||||
|
|
||||||
|
Meta is a **routing signal only** — one boolean per role:
|
||||||
|
- `{ ready: boolean }` — planner
|
||||||
|
- `{ done: boolean }` — coder
|
||||||
|
- `{ approved: boolean }` — reviewer
|
||||||
|
- `{ passed: boolean }` — tester
|
||||||
|
- `{ committed: boolean }` — committer (solve-issue: branch created, pushed)
|
||||||
|
- `{ success: boolean }` — publish (PR opened)
|
||||||
|
|
||||||
|
### Standard Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
planner → coder → reviewer → tester → committer → END
|
||||||
|
```
|
||||||
|
|
||||||
|
- Reviewer rejection → back to coder (within MAX_CODER_ITERATIONS)
|
||||||
|
- Tester failure → back to coder (within MAX_CODER_ITERATIONS)
|
||||||
|
- Committer failure → back to coder (within MAX_CODER_ITERATIONS)
|
||||||
|
|
||||||
|
## Sense Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
senses/<name>/
|
||||||
|
src/
|
||||||
|
index.ts — compute() function + schema
|
||||||
|
schema.ts — Drizzle table definition
|
||||||
|
migrations/ — SQLite migrations
|
||||||
|
package.json — with esbuild build script
|
||||||
|
```
|
||||||
|
|
||||||
|
## Toolchain
|
||||||
|
|
||||||
|
| Tool | Purpose |
|
||||||
|
|------|---------|
|
||||||
|
| **pnpm** | Package manager (workspace mode) |
|
||||||
|
| **TypeScript** | Type checking |
|
||||||
|
| **esbuild** | Bundling (each workflow/sense bundles independently) |
|
||||||
|
|
||||||
|
### Commands
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pnpm build # build all packages
|
||||||
|
pnpm -r build # same, explicit recursive
|
||||||
|
cd workflows/<name> && pnpm build # build one workflow
|
||||||
|
```
|
||||||
|
|
||||||
|
## Git & Commit Convention
|
||||||
|
|
||||||
|
```
|
||||||
|
<type>(<scope>): <description>
|
||||||
|
|
||||||
|
type: feat | fix | refactor | docs | chore | test
|
||||||
|
scope: workflow | sense | core | ...
|
||||||
|
```
|
||||||
|
|
||||||
|
### What NOT to commit
|
||||||
|
|
||||||
|
- `node_modules/`
|
||||||
|
- `dist/` (build outputs, generated by esbuild)
|
||||||
|
- `.DS_Store`
|
||||||
|
- pnpm cache artifacts (e.g. `false/` directories from `--no-cache` misuse)
|
||||||
|
- Secrets, API keys, tokens
|
||||||
|
- Unrelated file changes outside the task scope
|
||||||
|
|
||||||
|
## Dependencies
|
||||||
|
|
||||||
|
Shared packages from the nerve monorepo:
|
||||||
|
- `@uncaged/nerve-core` — types, END constant, WorkflowDefinition
|
||||||
|
- `@uncaged/nerve-workflow-utils` — role factories, spawnSafe, llmExtract, cursorAgent
|
||||||
|
- `zod` — schema definitions for meta extraction
|
||||||
35
nerve.yaml
35
nerve.yaml
@ -1,33 +1,26 @@
|
|||||||
# nerve.yaml — Nerve workspace configuration
|
# nerve.yaml — Nerve workspace configuration
|
||||||
|
|
||||||
|
extract:
|
||||||
|
provider: dashscope
|
||||||
|
model: qwen-plus
|
||||||
|
|
||||||
senses:
|
senses:
|
||||||
linux-system-health:
|
|
||||||
group: system
|
|
||||||
interval: 30s
|
|
||||||
throttle: 10s
|
|
||||||
timeout: 15s
|
|
||||||
hermes-gateway-health:
|
hermes-gateway-health:
|
||||||
group: system
|
group: system
|
||||||
interval: 2m
|
interval: 2m
|
||||||
throttle: 30s
|
throttle: 30s
|
||||||
timeout: 30s
|
timeout: 30s
|
||||||
hermes-session-message-stats:
|
|
||||||
group: hermes
|
|
||||||
interval: 15m
|
|
||||||
throttle: 30s
|
|
||||||
timeout: 60s
|
|
||||||
worker-process-metrics:
|
|
||||||
group: system
|
|
||||||
interval: 1m
|
|
||||||
throttle: 15s
|
|
||||||
timeout: 5s
|
|
||||||
|
|
||||||
workflows:
|
workflows:
|
||||||
sense-generator:
|
develop-sense:
|
||||||
concurrency: 1
|
concurrency: 1
|
||||||
overflow: drop
|
overflow: queue
|
||||||
workflow-generator:
|
develop-workflow:
|
||||||
concurrency: 1
|
concurrency: 1
|
||||||
overflow: drop
|
overflow: queue
|
||||||
gitea-issue-solver:
|
solve-issue:
|
||||||
concurrency: 1
|
concurrency: 1
|
||||||
overflow: drop
|
overflow: queue
|
||||||
|
extract-knowledge:
|
||||||
|
concurrency: 1
|
||||||
|
overflow: queue
|
||||||
|
|||||||
21
package.json
21
package.json
@ -3,24 +3,39 @@
|
|||||||
"version": "0.0.1",
|
"version": "0.0.1",
|
||||||
"private": true,
|
"private": true,
|
||||||
"type": "module",
|
"type": "module",
|
||||||
|
"scripts": {
|
||||||
|
"build": "node scripts/build.mjs"
|
||||||
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@uncaged/nerve-adapter-cursor": "link:../repos/nerve/packages/adapter-cursor",
|
||||||
|
"@uncaged/nerve-adapter-hermes": "link:../repos/nerve/packages/adapter-hermes",
|
||||||
"@uncaged/nerve-core": "latest",
|
"@uncaged/nerve-core": "latest",
|
||||||
"@uncaged/nerve-daemon": "latest",
|
"@uncaged/nerve-daemon": "link:../repos/nerve/packages/daemon",
|
||||||
|
"@uncaged/nerve-role-committer": "link:../repos/nerve/packages/role-committer",
|
||||||
|
"@uncaged/nerve-role-reviewer": "link:../repos/nerve/packages/role-reviewer",
|
||||||
|
"@uncaged/nerve-workflow-meta": "link:../repos/nerve/packages/workflow-meta",
|
||||||
"@uncaged/nerve-workflow-utils": "link:../repos/nerve/packages/workflow-utils",
|
"@uncaged/nerve-workflow-utils": "link:../repos/nerve/packages/workflow-utils",
|
||||||
"drizzle-orm": "latest",
|
"drizzle-orm": "latest",
|
||||||
"zod": "^4.3.6"
|
"zod": "^4.3.6"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"drizzle-kit": "latest"
|
"@types/node": "^22.0.0",
|
||||||
|
"drizzle-kit": "latest",
|
||||||
|
"esbuild": "^0.27.0",
|
||||||
|
"typescript": "^5.7.0"
|
||||||
},
|
},
|
||||||
"pnpm": {
|
"pnpm": {
|
||||||
"onlyBuiltDependencies": [
|
"onlyBuiltDependencies": [
|
||||||
"esbuild"
|
"esbuild"
|
||||||
],
|
],
|
||||||
"overrides": {
|
"overrides": {
|
||||||
|
"@uncaged/nerve-adapter-cursor": "link:../repos/nerve/packages/adapter-cursor",
|
||||||
|
"@uncaged/nerve-adapter-hermes": "link:../repos/nerve/packages/adapter-hermes",
|
||||||
"@uncaged/nerve-daemon": "link:../repos/nerve/packages/daemon",
|
"@uncaged/nerve-daemon": "link:../repos/nerve/packages/daemon",
|
||||||
"@uncaged/nerve-core": "link:../repos/nerve/packages/core",
|
"@uncaged/nerve-core": "link:../repos/nerve/packages/core",
|
||||||
"@uncaged/nerve-workflow-utils": "link:../repos/nerve/packages/workflow-utils"
|
"@uncaged/nerve-workflow-utils": "link:../repos/nerve/packages/workflow-utils",
|
||||||
|
"@uncaged/nerve-role-committer": "link:../repos/nerve/packages/role-committer",
|
||||||
|
"@uncaged/nerve-workflow-meta": "link:../repos/nerve/packages/workflow-meta"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
225
pnpm-lock.yaml
generated
225
pnpm-lock.yaml
generated
@ -5,20 +5,39 @@ settings:
|
|||||||
excludeLinksFromLockfile: false
|
excludeLinksFromLockfile: false
|
||||||
|
|
||||||
overrides:
|
overrides:
|
||||||
|
'@uncaged/nerve-adapter-cursor': link:../repos/nerve/packages/adapter-cursor
|
||||||
|
'@uncaged/nerve-adapter-hermes': link:../repos/nerve/packages/adapter-hermes
|
||||||
'@uncaged/nerve-daemon': link:../repos/nerve/packages/daemon
|
'@uncaged/nerve-daemon': link:../repos/nerve/packages/daemon
|
||||||
'@uncaged/nerve-core': link:../repos/nerve/packages/core
|
'@uncaged/nerve-core': link:../repos/nerve/packages/core
|
||||||
'@uncaged/nerve-workflow-utils': link:../repos/nerve/packages/workflow-utils
|
'@uncaged/nerve-workflow-utils': link:../repos/nerve/packages/workflow-utils
|
||||||
|
'@uncaged/nerve-role-committer': link:../repos/nerve/packages/role-committer
|
||||||
|
'@uncaged/nerve-workflow-meta': link:../repos/nerve/packages/workflow-meta
|
||||||
|
|
||||||
importers:
|
importers:
|
||||||
|
|
||||||
.:
|
.:
|
||||||
dependencies:
|
dependencies:
|
||||||
|
'@uncaged/nerve-adapter-cursor':
|
||||||
|
specifier: link:../repos/nerve/packages/adapter-cursor
|
||||||
|
version: link:../repos/nerve/packages/adapter-cursor
|
||||||
|
'@uncaged/nerve-adapter-hermes':
|
||||||
|
specifier: link:../repos/nerve/packages/adapter-hermes
|
||||||
|
version: link:../repos/nerve/packages/adapter-hermes
|
||||||
'@uncaged/nerve-core':
|
'@uncaged/nerve-core':
|
||||||
specifier: link:../repos/nerve/packages/core
|
specifier: link:../repos/nerve/packages/core
|
||||||
version: link:../repos/nerve/packages/core
|
version: link:../repos/nerve/packages/core
|
||||||
'@uncaged/nerve-daemon':
|
'@uncaged/nerve-daemon':
|
||||||
specifier: link:../repos/nerve/packages/daemon
|
specifier: link:../repos/nerve/packages/daemon
|
||||||
version: link:../repos/nerve/packages/daemon
|
version: link:../repos/nerve/packages/daemon
|
||||||
|
'@uncaged/nerve-role-committer':
|
||||||
|
specifier: link:../repos/nerve/packages/role-committer
|
||||||
|
version: link:../repos/nerve/packages/role-committer
|
||||||
|
'@uncaged/nerve-role-reviewer':
|
||||||
|
specifier: link:../repos/nerve/packages/role-reviewer
|
||||||
|
version: link:../repos/nerve/packages/role-reviewer
|
||||||
|
'@uncaged/nerve-workflow-meta':
|
||||||
|
specifier: link:../repos/nerve/packages/workflow-meta
|
||||||
|
version: link:../repos/nerve/packages/workflow-meta
|
||||||
'@uncaged/nerve-workflow-utils':
|
'@uncaged/nerve-workflow-utils':
|
||||||
specifier: link:../repos/nerve/packages/workflow-utils
|
specifier: link:../repos/nerve/packages/workflow-utils
|
||||||
version: link:../repos/nerve/packages/workflow-utils
|
version: link:../repos/nerve/packages/workflow-utils
|
||||||
@ -29,9 +48,196 @@ importers:
|
|||||||
specifier: ^4.3.6
|
specifier: ^4.3.6
|
||||||
version: 4.3.6
|
version: 4.3.6
|
||||||
devDependencies:
|
devDependencies:
|
||||||
|
'@types/node':
|
||||||
|
specifier: ^22.0.0
|
||||||
|
version: 22.19.17
|
||||||
drizzle-kit:
|
drizzle-kit:
|
||||||
specifier: latest
|
specifier: latest
|
||||||
version: 0.31.10
|
version: 0.31.10
|
||||||
|
esbuild:
|
||||||
|
specifier: ^0.27.0
|
||||||
|
version: 0.27.7
|
||||||
|
typescript:
|
||||||
|
specifier: ^5.7.0
|
||||||
|
version: 5.9.3
|
||||||
|
|
||||||
|
senses/git-workspace-status:
|
||||||
|
devDependencies:
|
||||||
|
'@types/node':
|
||||||
|
specifier: ^22.0.0
|
||||||
|
version: 22.19.17
|
||||||
|
esbuild:
|
||||||
|
specifier: ^0.27.0
|
||||||
|
version: 0.27.7
|
||||||
|
typescript:
|
||||||
|
specifier: ^5.7.0
|
||||||
|
version: 5.9.3
|
||||||
|
|
||||||
|
senses/hermes-gateway-health:
|
||||||
|
devDependencies:
|
||||||
|
'@types/node':
|
||||||
|
specifier: ^22.0.0
|
||||||
|
version: 22.19.17
|
||||||
|
esbuild:
|
||||||
|
specifier: ^0.27.0
|
||||||
|
version: 0.27.7
|
||||||
|
typescript:
|
||||||
|
specifier: ^5.7.0
|
||||||
|
version: 5.9.3
|
||||||
|
|
||||||
|
senses/hermes-session-message-stats:
|
||||||
|
devDependencies:
|
||||||
|
'@types/node':
|
||||||
|
specifier: ^22.0.0
|
||||||
|
version: 22.19.17
|
||||||
|
esbuild:
|
||||||
|
specifier: ^0.27.0
|
||||||
|
version: 0.27.7
|
||||||
|
typescript:
|
||||||
|
specifier: ^5.7.0
|
||||||
|
version: 5.9.3
|
||||||
|
|
||||||
|
senses/linux-system-health:
|
||||||
|
devDependencies:
|
||||||
|
'@types/node':
|
||||||
|
specifier: ^22.0.0
|
||||||
|
version: 22.19.17
|
||||||
|
esbuild:
|
||||||
|
specifier: ^0.27.0
|
||||||
|
version: 0.27.7
|
||||||
|
typescript:
|
||||||
|
specifier: ^5.7.0
|
||||||
|
version: 5.9.3
|
||||||
|
|
||||||
|
senses/worker-process-metrics:
|
||||||
|
devDependencies:
|
||||||
|
'@types/node':
|
||||||
|
specifier: ^22.0.0
|
||||||
|
version: 22.19.17
|
||||||
|
esbuild:
|
||||||
|
specifier: ^0.27.0
|
||||||
|
version: 0.27.7
|
||||||
|
typescript:
|
||||||
|
specifier: ^5.7.0
|
||||||
|
version: 5.9.3
|
||||||
|
|
||||||
|
workflows/develop-sense:
|
||||||
|
dependencies:
|
||||||
|
'@uncaged/nerve-adapter-cursor':
|
||||||
|
specifier: link:../../../repos/nerve/packages/adapter-cursor
|
||||||
|
version: link:../../../repos/nerve/packages/adapter-cursor
|
||||||
|
'@uncaged/nerve-adapter-hermes':
|
||||||
|
specifier: link:../../../repos/nerve/packages/adapter-hermes
|
||||||
|
version: link:../../../repos/nerve/packages/adapter-hermes
|
||||||
|
'@uncaged/nerve-core':
|
||||||
|
specifier: link:../../../repos/nerve/packages/core
|
||||||
|
version: link:../../../repos/nerve/packages/core
|
||||||
|
'@uncaged/nerve-workflow-meta':
|
||||||
|
specifier: link:../../../repos/nerve/packages/workflow-meta
|
||||||
|
version: link:../../../repos/nerve/packages/workflow-meta
|
||||||
|
'@uncaged/nerve-workflow-utils':
|
||||||
|
specifier: link:../../../repos/nerve/packages/workflow-utils
|
||||||
|
version: link:../../../repos/nerve/packages/workflow-utils
|
||||||
|
zod:
|
||||||
|
specifier: ^4.3.6
|
||||||
|
version: 4.3.6
|
||||||
|
devDependencies:
|
||||||
|
'@types/node':
|
||||||
|
specifier: ^22.0.0
|
||||||
|
version: 22.19.17
|
||||||
|
esbuild:
|
||||||
|
specifier: ^0.27.0
|
||||||
|
version: 0.27.7
|
||||||
|
typescript:
|
||||||
|
specifier: ^5.7.0
|
||||||
|
version: 5.9.3
|
||||||
|
|
||||||
|
workflows/develop-workflow:
|
||||||
|
dependencies:
|
||||||
|
'@uncaged/nerve-adapter-cursor':
|
||||||
|
specifier: link:../../../repos/nerve/packages/adapter-cursor
|
||||||
|
version: link:../../../repos/nerve/packages/adapter-cursor
|
||||||
|
'@uncaged/nerve-adapter-hermes':
|
||||||
|
specifier: link:../../../repos/nerve/packages/adapter-hermes
|
||||||
|
version: link:../../../repos/nerve/packages/adapter-hermes
|
||||||
|
'@uncaged/nerve-core':
|
||||||
|
specifier: link:../../../repos/nerve/packages/core
|
||||||
|
version: link:../../../repos/nerve/packages/core
|
||||||
|
'@uncaged/nerve-workflow-meta':
|
||||||
|
specifier: link:../../../repos/nerve/packages/workflow-meta
|
||||||
|
version: link:../../../repos/nerve/packages/workflow-meta
|
||||||
|
'@uncaged/nerve-workflow-utils':
|
||||||
|
specifier: link:../../../repos/nerve/packages/workflow-utils
|
||||||
|
version: link:../../../repos/nerve/packages/workflow-utils
|
||||||
|
zod:
|
||||||
|
specifier: ^4.3.6
|
||||||
|
version: 4.3.6
|
||||||
|
devDependencies:
|
||||||
|
'@types/node':
|
||||||
|
specifier: ^22.0.0
|
||||||
|
version: 22.19.17
|
||||||
|
esbuild:
|
||||||
|
specifier: ^0.27.0
|
||||||
|
version: 0.27.7
|
||||||
|
typescript:
|
||||||
|
specifier: ^5.7.0
|
||||||
|
version: 5.9.3
|
||||||
|
|
||||||
|
workflows/extract-knowledge:
|
||||||
|
dependencies:
|
||||||
|
'@uncaged/nerve-adapter-cursor':
|
||||||
|
specifier: link:../../../repos/nerve/packages/adapter-cursor
|
||||||
|
version: link:../../../repos/nerve/packages/adapter-cursor
|
||||||
|
'@uncaged/nerve-adapter-hermes':
|
||||||
|
specifier: link:../../../repos/nerve/packages/adapter-hermes
|
||||||
|
version: link:../../../repos/nerve/packages/adapter-hermes
|
||||||
|
'@uncaged/nerve-core':
|
||||||
|
specifier: link:../../../repos/nerve/packages/core
|
||||||
|
version: link:../../../repos/nerve/packages/core
|
||||||
|
'@uncaged/nerve-workflow-utils':
|
||||||
|
specifier: link:../../../repos/nerve/packages/workflow-utils
|
||||||
|
version: link:../../../repos/nerve/packages/workflow-utils
|
||||||
|
zod:
|
||||||
|
specifier: ^4.3.6
|
||||||
|
version: 4.3.6
|
||||||
|
devDependencies:
|
||||||
|
'@types/node':
|
||||||
|
specifier: ^22.0.0
|
||||||
|
version: 22.19.17
|
||||||
|
esbuild:
|
||||||
|
specifier: ^0.27.0
|
||||||
|
version: 0.27.7
|
||||||
|
typescript:
|
||||||
|
specifier: ^5.7.0
|
||||||
|
version: 5.9.3
|
||||||
|
|
||||||
|
workflows/solve-issue:
|
||||||
|
dependencies:
|
||||||
|
'@uncaged/nerve-adapter-cursor':
|
||||||
|
specifier: link:../../../repos/nerve/packages/adapter-cursor
|
||||||
|
version: link:../../../repos/nerve/packages/adapter-cursor
|
||||||
|
'@uncaged/nerve-adapter-hermes':
|
||||||
|
specifier: link:../../../repos/nerve/packages/adapter-hermes
|
||||||
|
version: link:../../../repos/nerve/packages/adapter-hermes
|
||||||
|
'@uncaged/nerve-core':
|
||||||
|
specifier: link:../../../repos/nerve/packages/core
|
||||||
|
version: link:../../../repos/nerve/packages/core
|
||||||
|
'@uncaged/nerve-workflow-utils':
|
||||||
|
specifier: link:../../../repos/nerve/packages/workflow-utils
|
||||||
|
version: link:../../../repos/nerve/packages/workflow-utils
|
||||||
|
zod:
|
||||||
|
specifier: ^4.3.6
|
||||||
|
version: 4.3.6
|
||||||
|
devDependencies:
|
||||||
|
'@types/node':
|
||||||
|
specifier: ^22.0.0
|
||||||
|
version: 22.19.17
|
||||||
|
esbuild:
|
||||||
|
specifier: ^0.27.0
|
||||||
|
version: 0.27.7
|
||||||
|
typescript:
|
||||||
|
specifier: ^5.7.0
|
||||||
|
version: 5.9.3
|
||||||
|
|
||||||
packages:
|
packages:
|
||||||
|
|
||||||
@ -490,6 +696,9 @@ packages:
|
|||||||
cpu: [x64]
|
cpu: [x64]
|
||||||
os: [win32]
|
os: [win32]
|
||||||
|
|
||||||
|
'@types/node@22.19.17':
|
||||||
|
resolution: {integrity: sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==}
|
||||||
|
|
||||||
base64-js@1.5.1:
|
base64-js@1.5.1:
|
||||||
resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==}
|
resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==}
|
||||||
|
|
||||||
@ -750,6 +959,14 @@ packages:
|
|||||||
tunnel-agent@0.6.0:
|
tunnel-agent@0.6.0:
|
||||||
resolution: {integrity: sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==}
|
resolution: {integrity: sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==}
|
||||||
|
|
||||||
|
typescript@5.9.3:
|
||||||
|
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
|
||||||
|
engines: {node: '>=14.17'}
|
||||||
|
hasBin: true
|
||||||
|
|
||||||
|
undici-types@6.21.0:
|
||||||
|
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
|
||||||
|
|
||||||
util-deprecate@1.0.2:
|
util-deprecate@1.0.2:
|
||||||
resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==}
|
resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==}
|
||||||
|
|
||||||
@ -995,6 +1212,10 @@ snapshots:
|
|||||||
'@esbuild/win32-x64@0.27.7':
|
'@esbuild/win32-x64@0.27.7':
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
'@types/node@22.19.17':
|
||||||
|
dependencies:
|
||||||
|
undici-types: 6.21.0
|
||||||
|
|
||||||
base64-js@1.5.1:
|
base64-js@1.5.1:
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
@ -1286,6 +1507,10 @@ snapshots:
|
|||||||
safe-buffer: 5.2.1
|
safe-buffer: 5.2.1
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
typescript@5.9.3: {}
|
||||||
|
|
||||||
|
undici-types@6.21.0: {}
|
||||||
|
|
||||||
util-deprecate@1.0.2:
|
util-deprecate@1.0.2:
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
|||||||
46
scripts/build.mjs
Normal file
46
scripts/build.mjs
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
import * as esbuild from "esbuild";
|
||||||
|
import fs from "node:fs";
|
||||||
|
import path from "node:path";
|
||||||
|
import { fileURLToPath } from "node:url";
|
||||||
|
|
||||||
|
const root = path.join(path.dirname(fileURLToPath(import.meta.url)), "..");
|
||||||
|
const dist = path.join(root, "dist");
|
||||||
|
|
||||||
|
const opts = {
|
||||||
|
bundle: true,
|
||||||
|
platform: "node",
|
||||||
|
format: "esm",
|
||||||
|
packages: "external",
|
||||||
|
};
|
||||||
|
|
||||||
|
function listDirs(dir) {
|
||||||
|
if (!fs.existsSync(dir)) return [];
|
||||||
|
return fs
|
||||||
|
.readdirSync(dir)
|
||||||
|
.filter((name) => !name.startsWith(".") && !name.startsWith("_"))
|
||||||
|
.map((name) => ({ name, full: path.join(dir, name) }))
|
||||||
|
.filter(({ full }) => fs.statSync(full).isDirectory());
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
// Clean dist/
|
||||||
|
fs.rmSync(dist, { recursive: true, force: true });
|
||||||
|
|
||||||
|
for (const { name, full } of listDirs(path.join(root, "senses"))) {
|
||||||
|
const entry = path.join(full, "src", "index.ts");
|
||||||
|
if (!fs.existsSync(entry)) continue;
|
||||||
|
const outfile = path.join(dist, "senses", name, "index.js");
|
||||||
|
fs.mkdirSync(path.dirname(outfile), { recursive: true });
|
||||||
|
await esbuild.build({ ...opts, entryPoints: [entry], outfile });
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const { name, full } of listDirs(path.join(root, "workflows"))) {
|
||||||
|
const entry = path.join(full, "index.ts");
|
||||||
|
if (!fs.existsSync(entry)) continue;
|
||||||
|
const outfile = path.join(dist, "workflows", name, "index.js");
|
||||||
|
fs.mkdirSync(path.dirname(outfile), { recursive: true });
|
||||||
|
await esbuild.build({ ...opts, entryPoints: [entry], outfile });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await main();
|
||||||
@ -1,374 +0,0 @@
|
|||||||
// src/index.ts
|
|
||||||
import { execFile } from "node:child_process";
|
|
||||||
|
|
||||||
// src/schema.ts
|
|
||||||
import { integer, real, sqliteTable, text } from "drizzle-orm/sqlite-core";
|
|
||||||
var hermesGatewayHealth = sqliteTable("hermes_gateway_health", {
|
|
||||||
id: integer("id").primaryKey({ autoIncrement: true }),
|
|
||||||
ts: integer("ts").notNull(),
|
|
||||||
alive: integer("alive").notNull(),
|
|
||||||
mainPid: integer("main_pid").notNull(),
|
|
||||||
rssBytes: integer("rss_bytes").notNull(),
|
|
||||||
cpuPercent: real("cpu_percent").notNull(),
|
|
||||||
uptimeSec: integer("uptime_sec").notNull(),
|
|
||||||
activeSessions: integer("active_sessions").notNull(),
|
|
||||||
childProcessCount: integer("child_process_count").notNull(),
|
|
||||||
httpOk: integer("http_ok").notNull(),
|
|
||||||
httpStatusCode: integer("http_status_code").notNull(),
|
|
||||||
httpLatencyMs: integer("http_latency_ms").notNull(),
|
|
||||||
httpError: text("http_error").notNull()
|
|
||||||
});
|
|
||||||
|
|
||||||
// src/index.ts
|
|
||||||
var EXEC_TIMEOUT_MS = 25e3;
|
|
||||||
var HTTP_TIMEOUT_MS = Math.min(23e3, EXEC_TIMEOUT_MS - 2e3);
|
|
||||||
var HTTP_ERROR_MAX_LEN = 256;
|
|
||||||
function gatewayProbeUrl() {
|
|
||||||
const u = process.env.HERMES_GATEWAY_HEALTH_URL ?? process.env.NERVE_HERMES_GATEWAY_URL ?? "";
|
|
||||||
return String(u).trim();
|
|
||||||
}
|
|
||||||
function truncateHttpError(err) {
|
|
||||||
const raw = err && typeof err === "object" && "code" in err && err.code ? String(err.code) : String(err?.message ?? err ?? "error");
|
|
||||||
const s = raw.trim() || "error";
|
|
||||||
return s.length > HTTP_ERROR_MAX_LEN ? s.slice(0, HTTP_ERROR_MAX_LEN) : s;
|
|
||||||
}
|
|
||||||
async function probeGatewayHttp(url) {
|
|
||||||
if (!url) {
|
|
||||||
return {
|
|
||||||
httpOk: 0,
|
|
||||||
httpStatusCode: 0,
|
|
||||||
httpLatencyMs: 0,
|
|
||||||
httpError: "missing_url"
|
|
||||||
};
|
|
||||||
}
|
|
||||||
const t0 = Date.now();
|
|
||||||
try {
|
|
||||||
const signal = AbortSignal.timeout(HTTP_TIMEOUT_MS);
|
|
||||||
const res = await fetch(url, {
|
|
||||||
method: "GET",
|
|
||||||
signal,
|
|
||||||
redirect: "follow"
|
|
||||||
});
|
|
||||||
const httpLatencyMs = Date.now() - t0;
|
|
||||||
const code = res.status;
|
|
||||||
const ok = code >= 200 && code < 400;
|
|
||||||
return {
|
|
||||||
httpOk: ok ? 1 : 0,
|
|
||||||
httpStatusCode: code,
|
|
||||||
httpLatencyMs,
|
|
||||||
httpError: ok ? "" : truncateHttpError({ message: `HTTP ${code}` })
|
|
||||||
};
|
|
||||||
} catch (err) {
|
|
||||||
const httpLatencyMs = Date.now() - t0;
|
|
||||||
return {
|
|
||||||
httpOk: 0,
|
|
||||||
httpStatusCode: 0,
|
|
||||||
httpLatencyMs,
|
|
||||||
httpError: truncateHttpError(err)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function etimeToSeconds(etime) {
|
|
||||||
let s = String(etime).trim();
|
|
||||||
if (!s) return 0;
|
|
||||||
let days = 0;
|
|
||||||
if (s.includes("-")) {
|
|
||||||
const idx = s.indexOf("-");
|
|
||||||
const d = Number.parseInt(s.slice(0, idx), 10);
|
|
||||||
days = Number.isFinite(d) ? d : 0;
|
|
||||||
s = s.slice(idx + 1);
|
|
||||||
}
|
|
||||||
const parts = s.split(":").map((x) => Number.parseInt(String(x).trim(), 10));
|
|
||||||
if (parts.some((n) => !Number.isFinite(n))) return 0;
|
|
||||||
if (parts.length === 3) {
|
|
||||||
return Math.trunc(days * 86400 + parts[0] * 3600 + parts[1] * 60 + parts[2]);
|
|
||||||
}
|
|
||||||
if (parts.length === 2) {
|
|
||||||
return Math.trunc(days * 86400 + parts[0] * 60 + parts[1]);
|
|
||||||
}
|
|
||||||
if (parts.length === 1) {
|
|
||||||
return Math.trunc(days * 86400 + parts[0]);
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
function execFileUtf8(file, args, opts = {}) {
|
|
||||||
return new Promise((resolve) => {
|
|
||||||
execFile(
|
|
||||||
file,
|
|
||||||
args,
|
|
||||||
{
|
|
||||||
encoding: "utf8",
|
|
||||||
maxBuffer: 8 * 1024 * 1024,
|
|
||||||
timeout: EXEC_TIMEOUT_MS,
|
|
||||||
...opts
|
|
||||||
},
|
|
||||||
(err, stdout, stderr) => {
|
|
||||||
const exitCode = err && typeof err.status === "number" ? err.status : err ? -1 : 0;
|
|
||||||
resolve({
|
|
||||||
exitCode,
|
|
||||||
errCode: err?.code,
|
|
||||||
stdout: String(stdout ?? ""),
|
|
||||||
stderr: String(stderr ?? "")
|
|
||||||
});
|
|
||||||
}
|
|
||||||
);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
function parseMainPidFromStatus(text2) {
|
|
||||||
const m = text2.match(/Main PID:\s*(\d+)/i);
|
|
||||||
return m ? Math.trunc(Number.parseInt(m[1], 10)) || 0 : 0;
|
|
||||||
}
|
|
||||||
function parseActiveLineFromStatus(text2) {
|
|
||||||
for (const line of text2.split("\n")) {
|
|
||||||
if (/^\s*Active:/i.test(line)) {
|
|
||||||
const m = line.match(/Active:\s*(\S+)\s*\(([^)]*)\)/i);
|
|
||||||
if (m) {
|
|
||||||
return {
|
|
||||||
active: m[1].toLowerCase() === "active",
|
|
||||||
subRunning: m[2].toLowerCase().includes("running")
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return { active: false, subRunning: false };
|
|
||||||
}
|
|
||||||
function parseSystemctlShow(text2) {
|
|
||||||
let mainPid = 0;
|
|
||||||
let active = false;
|
|
||||||
let subRunning = false;
|
|
||||||
for (const line of text2.split("\n")) {
|
|
||||||
const t = line.trim();
|
|
||||||
if (t.startsWith("MainPID=")) {
|
|
||||||
mainPid = Math.trunc(Number.parseInt(t.slice("MainPID=".length), 10)) || 0;
|
|
||||||
} else if (t.startsWith("ActiveState=")) {
|
|
||||||
active = t.slice("ActiveState=".length).trim().toLowerCase() === "active";
|
|
||||||
} else if (t.startsWith("SubState=")) {
|
|
||||||
subRunning = t.slice("SubState=".length).trim().toLowerCase() === "running";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return { mainPid, active, subRunning };
|
|
||||||
}
|
|
||||||
async function readSystemdState() {
|
|
||||||
const status = await execFileUtf8("systemctl", [
|
|
||||||
"--user",
|
|
||||||
"--no-pager",
|
|
||||||
"status",
|
|
||||||
"hermes-gateway"
|
|
||||||
]);
|
|
||||||
const combined = `${status.stdout}
|
|
||||||
${status.stderr}`.trim();
|
|
||||||
let mainPid = parseMainPidFromStatus(combined);
|
|
||||||
let { active, subRunning } = parseActiveLineFromStatus(combined);
|
|
||||||
const needShow = mainPid <= 0 || !active || !subRunning;
|
|
||||||
if (needShow) {
|
|
||||||
const show = await execFileUtf8("systemctl", [
|
|
||||||
"--user",
|
|
||||||
"--no-pager",
|
|
||||||
"show",
|
|
||||||
"hermes-gateway",
|
|
||||||
"-p",
|
|
||||||
"MainPID",
|
|
||||||
"-p",
|
|
||||||
"ActiveState",
|
|
||||||
"-p",
|
|
||||||
"SubState"
|
|
||||||
]);
|
|
||||||
const showText = `${show.stdout}
|
|
||||||
${show.stderr}`;
|
|
||||||
const s = parseSystemctlShow(showText);
|
|
||||||
if (mainPid <= 0 && s.mainPid > 0) mainPid = s.mainPid;
|
|
||||||
if (!active) active = s.active;
|
|
||||||
if (!subRunning) subRunning = s.subRunning;
|
|
||||||
}
|
|
||||||
return { mainPid, systemdActiveRunning: active && subRunning };
|
|
||||||
}
|
|
||||||
async function processExists(mainPid) {
|
|
||||||
if (mainPid <= 0) return false;
|
|
||||||
const r = await execFileUtf8("ps", ["-p", String(mainPid), "-o", "pid="]);
|
|
||||||
if (r.errCode === "ENOENT") return false;
|
|
||||||
return r.stdout.trim().length > 0;
|
|
||||||
}
|
|
||||||
async function readPsMetrics(mainPid) {
|
|
||||||
if (mainPid <= 0) {
|
|
||||||
return { rssBytes: 0, cpuPercent: 0, uptimeSec: 0 };
|
|
||||||
}
|
|
||||||
let r = await execFileUtf8("ps", [
|
|
||||||
"-p",
|
|
||||||
String(mainPid),
|
|
||||||
"-o",
|
|
||||||
"rss=,%cpu=,etimes="
|
|
||||||
]);
|
|
||||||
let line = r.stdout.trim().replace(/\s+/g, " ");
|
|
||||||
if (r.errCode === "ENOENT" || !line) {
|
|
||||||
return { rssBytes: 0, cpuPercent: 0, uptimeSec: 0 };
|
|
||||||
}
|
|
||||||
let parts = line.split(" ").filter(Boolean);
|
|
||||||
if (parts.length < 3) {
|
|
||||||
r = await execFileUtf8("ps", [
|
|
||||||
"-p",
|
|
||||||
String(mainPid),
|
|
||||||
"-o",
|
|
||||||
"rss=,%cpu=,etime="
|
|
||||||
]);
|
|
||||||
line = r.stdout.trim().replace(/\s+/g, " ");
|
|
||||||
parts = line.split(" ").filter(Boolean);
|
|
||||||
if (parts.length < 3) {
|
|
||||||
return { rssBytes: 0, cpuPercent: 0, uptimeSec: 0 };
|
|
||||||
}
|
|
||||||
const rssKiB2 = Number(parts[0]);
|
|
||||||
const cpu2 = Number(parts[1]);
|
|
||||||
const uptimeSec2 = etimeToSeconds(parts.slice(2).join(" "));
|
|
||||||
const rssBytes2 = Number.isFinite(rssKiB2) ? Math.trunc(rssKiB2 * 1024) : 0;
|
|
||||||
const cpuPercent2 = Number.isFinite(cpu2) ? Math.round(cpu2 * 100) / 100 : 0;
|
|
||||||
return { rssBytes: rssBytes2, cpuPercent: cpuPercent2, uptimeSec: uptimeSec2 };
|
|
||||||
}
|
|
||||||
const rssKiB = Number(parts[0]);
|
|
||||||
const cpu = Number(parts[1]);
|
|
||||||
const etimes = Number(parts[2]);
|
|
||||||
const rssBytes = Number.isFinite(rssKiB) ? Math.trunc(rssKiB * 1024) : 0;
|
|
||||||
const cpuPercent = Number.isFinite(cpu) ? Math.round(cpu * 100) / 100 : 0;
|
|
||||||
const uptimeSec = Number.isFinite(etimes) ? Math.trunc(etimes) : 0;
|
|
||||||
return { rssBytes, cpuPercent, uptimeSec };
|
|
||||||
}
|
|
||||||
function parseActiveSessionsFromHermesStats(text2) {
|
|
||||||
const src = String(text2);
|
|
||||||
const patterns = [
|
|
||||||
/^\s*Active\s+sessions?:\s*(\d+)/gim,
|
|
||||||
/^\s*active\s+sessions?:\s*(\d+)/gim,
|
|
||||||
/^\s*Total\s+sessions?:\s*(\d+)/gim
|
|
||||||
];
|
|
||||||
for (const re of patterns) {
|
|
||||||
re.lastIndex = 0;
|
|
||||||
const m = re.exec(src);
|
|
||||||
if (m) {
|
|
||||||
const n = Math.trunc(Number.parseInt(m[1], 10));
|
|
||||||
return Number.isFinite(n) ? n : 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
async function readActiveSessions() {
|
|
||||||
try {
|
|
||||||
const r = await execFileUtf8("hermes", ["sessions", "stats"]);
|
|
||||||
if (r.errCode === "ENOENT") return 0;
|
|
||||||
return parseActiveSessionsFromHermesStats(`${r.stdout}
|
|
||||||
${r.stderr}`);
|
|
||||||
} catch {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
async function countDirectChildren(mainPid) {
|
|
||||||
if (mainPid <= 0) return 0;
|
|
||||||
try {
|
|
||||||
const r = await execFileUtf8("ps", [
|
|
||||||
"--no-headers",
|
|
||||||
"-o",
|
|
||||||
"pid",
|
|
||||||
"--ppid",
|
|
||||||
String(mainPid)
|
|
||||||
]);
|
|
||||||
if (r.errCode === "ENOENT") return 0;
|
|
||||||
const lines = r.stdout.split("\n").map((l) => l.trim()).filter(Boolean);
|
|
||||||
return lines.length;
|
|
||||||
} catch {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
async function compute(db, _peers) {
|
|
||||||
const ts = Date.now();
|
|
||||||
let mainPid = 0;
|
|
||||||
let systemdActiveRunning = false;
|
|
||||||
try {
|
|
||||||
const st = await readSystemdState();
|
|
||||||
mainPid = st.mainPid;
|
|
||||||
systemdActiveRunning = st.systemdActiveRunning;
|
|
||||||
} catch {
|
|
||||||
mainPid = 0;
|
|
||||||
systemdActiveRunning = false;
|
|
||||||
}
|
|
||||||
let psOk = false;
|
|
||||||
try {
|
|
||||||
psOk = await processExists(mainPid);
|
|
||||||
} catch {
|
|
||||||
psOk = false;
|
|
||||||
}
|
|
||||||
let rssBytes = 0;
|
|
||||||
let cpuPercent = 0;
|
|
||||||
let uptimeSec = 0;
|
|
||||||
if (psOk) {
|
|
||||||
try {
|
|
||||||
const m = await readPsMetrics(mainPid);
|
|
||||||
rssBytes = m.rssBytes;
|
|
||||||
cpuPercent = m.cpuPercent;
|
|
||||||
uptimeSec = m.uptimeSec;
|
|
||||||
} catch {
|
|
||||||
rssBytes = 0;
|
|
||||||
cpuPercent = 0;
|
|
||||||
uptimeSec = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const alive = systemdActiveRunning && mainPid > 0 && psOk ? 1 : 0;
|
|
||||||
let activeSessions = 0;
|
|
||||||
try {
|
|
||||||
activeSessions = await readActiveSessions();
|
|
||||||
} catch {
|
|
||||||
activeSessions = 0;
|
|
||||||
}
|
|
||||||
let childProcessCount = 0;
|
|
||||||
if (alive && mainPid > 0) {
|
|
||||||
try {
|
|
||||||
childProcessCount = await countDirectChildren(mainPid);
|
|
||||||
} catch {
|
|
||||||
childProcessCount = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let httpOk = 0;
|
|
||||||
let httpStatusCode = 0;
|
|
||||||
let httpLatencyMs = 0;
|
|
||||||
let httpError = "";
|
|
||||||
try {
|
|
||||||
const h = await probeGatewayHttp(gatewayProbeUrl());
|
|
||||||
httpOk = h.httpOk;
|
|
||||||
httpStatusCode = h.httpStatusCode;
|
|
||||||
httpLatencyMs = h.httpLatencyMs;
|
|
||||||
httpError = h.httpError;
|
|
||||||
} catch {
|
|
||||||
httpOk = 0;
|
|
||||||
httpStatusCode = 0;
|
|
||||||
httpLatencyMs = 0;
|
|
||||||
httpError = "probe_failed";
|
|
||||||
}
|
|
||||||
const storedMainPid = mainPid > 0 ? mainPid : 0;
|
|
||||||
const row = {
|
|
||||||
ts,
|
|
||||||
alive,
|
|
||||||
mainPid: storedMainPid,
|
|
||||||
rssBytes: alive ? rssBytes : 0,
|
|
||||||
cpuPercent: alive ? cpuPercent : 0,
|
|
||||||
uptimeSec: alive ? uptimeSec : 0,
|
|
||||||
activeSessions,
|
|
||||||
childProcessCount: alive ? childProcessCount : 0,
|
|
||||||
httpOk,
|
|
||||||
httpStatusCode,
|
|
||||||
httpLatencyMs,
|
|
||||||
httpError
|
|
||||||
};
|
|
||||||
await db.insert(hermesGatewayHealth).values(row);
|
|
||||||
return {
|
|
||||||
ts: row.ts,
|
|
||||||
alive: row.alive,
|
|
||||||
mainPid: row.mainPid,
|
|
||||||
rssBytes: row.rssBytes,
|
|
||||||
cpuPercent: row.cpuPercent,
|
|
||||||
uptimeSec: row.uptimeSec,
|
|
||||||
activeSessions: row.activeSessions,
|
|
||||||
childProcessCount: row.childProcessCount,
|
|
||||||
httpOk: row.httpOk,
|
|
||||||
httpStatusCode: row.httpStatusCode,
|
|
||||||
httpLatencyMs: row.httpLatencyMs,
|
|
||||||
httpError: row.httpError
|
|
||||||
};
|
|
||||||
}
|
|
||||||
export {
|
|
||||||
compute
|
|
||||||
};
|
|
||||||
@ -1,14 +0,0 @@
|
|||||||
-- Migration: 0001_init
|
|
||||||
-- Creates the hermes_gateway_health table for hermes-gateway-health sense.
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS hermes_gateway_health (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
ts INTEGER NOT NULL,
|
|
||||||
alive INTEGER NOT NULL,
|
|
||||||
main_pid INTEGER NOT NULL,
|
|
||||||
rss_bytes INTEGER NOT NULL,
|
|
||||||
cpu_percent REAL NOT NULL,
|
|
||||||
uptime_sec INTEGER NOT NULL,
|
|
||||||
active_sessions INTEGER NOT NULL,
|
|
||||||
child_process_count INTEGER NOT NULL
|
|
||||||
);
|
|
||||||
@ -1,7 +0,0 @@
|
|||||||
-- Migration: 0002_add_http_probe
|
|
||||||
-- HTTP reachability columns for hermes-gateway-health sense.
|
|
||||||
|
|
||||||
ALTER TABLE hermes_gateway_health ADD COLUMN http_ok INTEGER NOT NULL DEFAULT 0;
|
|
||||||
ALTER TABLE hermes_gateway_health ADD COLUMN http_status_code INTEGER NOT NULL DEFAULT 0;
|
|
||||||
ALTER TABLE hermes_gateway_health ADD COLUMN http_latency_ms INTEGER NOT NULL DEFAULT 0;
|
|
||||||
ALTER TABLE hermes_gateway_health ADD COLUMN http_error TEXT NOT NULL DEFAULT '';
|
|
||||||
@ -1,17 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "sense-hermes-gateway-health",
|
|
||||||
"version": "0.0.1",
|
|
||||||
"private": true,
|
|
||||||
"type": "module",
|
|
||||||
"scripts": {
|
|
||||||
"build": "esbuild src/index.ts --bundle --platform=node --format=esm --outfile=index.js --packages=external"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@types/node": "^22.0.0",
|
|
||||||
"esbuild": "^0.27.0",
|
|
||||||
"typescript": "^5.7.0"
|
|
||||||
},
|
|
||||||
"pnpm": {
|
|
||||||
"onlyBuiltDependencies": ["esbuild"]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
310
senses/hermes-gateway-health/pnpm-lock.yaml
generated
310
senses/hermes-gateway-health/pnpm-lock.yaml
generated
@ -1,310 +0,0 @@
|
|||||||
lockfileVersion: '9.0'
|
|
||||||
|
|
||||||
settings:
|
|
||||||
autoInstallPeers: true
|
|
||||||
excludeLinksFromLockfile: false
|
|
||||||
|
|
||||||
importers:
|
|
||||||
|
|
||||||
.:
|
|
||||||
devDependencies:
|
|
||||||
'@types/node':
|
|
||||||
specifier: ^22.0.0
|
|
||||||
version: 22.19.17
|
|
||||||
esbuild:
|
|
||||||
specifier: ^0.27.0
|
|
||||||
version: 0.27.7
|
|
||||||
typescript:
|
|
||||||
specifier: ^5.7.0
|
|
||||||
version: 5.9.3
|
|
||||||
|
|
||||||
packages:
|
|
||||||
|
|
||||||
'@esbuild/aix-ppc64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-EKX3Qwmhz1eMdEJokhALr0YiD0lhQNwDqkPYyPhiSwKrh7/4KRjQc04sZ8db+5DVVnZ1LmbNDI1uAMPEUBnQPg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [ppc64]
|
|
||||||
os: [aix]
|
|
||||||
|
|
||||||
'@esbuild/android-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-62dPZHpIXzvChfvfLJow3q5dDtiNMkwiRzPylSCfriLvZeq0a1bWChrGx/BbUbPwOrsWKMn8idSllklzBy+dgQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [android]
|
|
||||||
|
|
||||||
'@esbuild/android-arm@0.27.7':
|
|
||||||
resolution: {integrity: sha512-jbPXvB4Yj2yBV7HUfE2KHe4GJX51QplCN1pGbYjvsyCZbQmies29EoJbkEc+vYuU5o45AfQn37vZlyXy4YJ8RQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm]
|
|
||||||
os: [android]
|
|
||||||
|
|
||||||
'@esbuild/android-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-x5VpMODneVDb70PYV2VQOmIUUiBtY3D3mPBG8NxVk5CogneYhkR7MmM3yR/uMdITLrC1ml/NV1rj4bMJuy9MCg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [android]
|
|
||||||
|
|
||||||
'@esbuild/darwin-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-5lckdqeuBPlKUwvoCXIgI2D9/ABmPq3Rdp7IfL70393YgaASt7tbju3Ac+ePVi3KDH6N2RqePfHnXkaDtY9fkw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [darwin]
|
|
||||||
|
|
||||||
'@esbuild/darwin-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-rYnXrKcXuT7Z+WL5K980jVFdvVKhCHhUwid+dDYQpH+qu+TefcomiMAJpIiC2EM3Rjtq0sO3StMV/+3w3MyyqQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [darwin]
|
|
||||||
|
|
||||||
'@esbuild/freebsd-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-B48PqeCsEgOtzME2GbNM2roU29AMTuOIN91dsMO30t+Ydis3z/3Ngoj5hhnsOSSwNzS+6JppqWsuhTp6E82l2w==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [freebsd]
|
|
||||||
|
|
||||||
'@esbuild/freebsd-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-jOBDK5XEjA4m5IJK3bpAQF9/Lelu/Z9ZcdhTRLf4cajlB+8VEhFFRjWgfy3M1O4rO2GQ/b2dLwCUGpiF/eATNQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [freebsd]
|
|
||||||
|
|
||||||
'@esbuild/linux-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-RZPHBoxXuNnPQO9rvjh5jdkRmVizktkT7TCDkDmQ0W2SwHInKCAV95GRuvdSvA7w4VMwfCjUiPwDi0ZO6Nfe9A==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-arm@0.27.7':
|
|
||||||
resolution: {integrity: sha512-RkT/YXYBTSULo3+af8Ib0ykH8u2MBh57o7q/DAs3lTJlyVQkgQvlrPTnjIzzRPQyavxtPtfg0EopvDyIt0j1rA==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-ia32@0.27.7':
|
|
||||||
resolution: {integrity: sha512-GA48aKNkyQDbd3KtkplYWT102C5sn/EZTY4XROkxONgruHPU72l+gW+FfF8tf2cFjeHaRbWpOYa/uRBz/Xq1Pg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [ia32]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-loong64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-a4POruNM2oWsD4WKvBSEKGIiWQF8fZOAsycHOt6JBpZ+JN2n2JH9WAv56SOyu9X5IqAjqSIPTaJkqN8F7XOQ5Q==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [loong64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-mips64el@0.27.7':
|
|
||||||
resolution: {integrity: sha512-KabT5I6StirGfIz0FMgl1I+R1H73Gp0ofL9A3nG3i/cYFJzKHhouBV5VWK1CSgKvVaG4q1RNpCTR2LuTVB3fIw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [mips64el]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-ppc64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-gRsL4x6wsGHGRqhtI+ifpN/vpOFTQtnbsupUF5R5YTAg+y/lKelYR1hXbnBdzDjGbMYjVJLJTd2OFmMewAgwlQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [ppc64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-riscv64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-hL25LbxO1QOngGzu2U5xeXtxXcW+/GvMN3ejANqXkxZ/opySAZMrc+9LY/WyjAan41unrR3YrmtTsUpwT66InQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [riscv64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-s390x@0.27.7':
|
|
||||||
resolution: {integrity: sha512-2k8go8Ycu1Kb46vEelhu1vqEP+UeRVj2zY1pSuPdgvbd5ykAw82Lrro28vXUrRmzEsUV0NzCf54yARIK8r0fdw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [s390x]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-hzznmADPt+OmsYzw1EE33ccA+HPdIqiCRq7cQeL1Jlq2gb1+OyWBkMCrYGBJ+sxVzve2ZJEVeePbLM2iEIZSxA==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/netbsd-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-b6pqtrQdigZBwZxAn1UpazEisvwaIDvdbMbmrly7cDTMFnw/+3lVxxCTGOrkPVnsYIosJJXAsILG9XcQS+Yu6w==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [netbsd]
|
|
||||||
|
|
||||||
'@esbuild/netbsd-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-OfatkLojr6U+WN5EDYuoQhtM+1xco+/6FSzJJnuWiUw5eVcicbyK3dq5EeV/QHT1uy6GoDhGbFpprUiHUYggrw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [netbsd]
|
|
||||||
|
|
||||||
'@esbuild/openbsd-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-AFuojMQTxAz75Fo8idVcqoQWEHIXFRbOc1TrVcFSgCZtQfSdc1RXgB3tjOn/krRHENUB4j00bfGjyl2mJrU37A==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [openbsd]
|
|
||||||
|
|
||||||
'@esbuild/openbsd-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-+A1NJmfM8WNDv5CLVQYJ5PshuRm/4cI6WMZRg1by1GwPIQPCTs1GLEUHwiiQGT5zDdyLiRM/l1G0Pv54gvtKIg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [openbsd]
|
|
||||||
|
|
||||||
'@esbuild/openharmony-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-+KrvYb/C8zA9CU/g0sR6w2RBw7IGc5J2BPnc3dYc5VJxHCSF1yNMxTV5LQ7GuKteQXZtspjFbiuW5/dOj7H4Yw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [openharmony]
|
|
||||||
|
|
||||||
'@esbuild/sunos-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-ikktIhFBzQNt/QDyOL580ti9+5mL/YZeUPKU2ivGtGjdTYoqz6jObj6nOMfhASpS4GU4Q/Clh1QtxWAvcYKamA==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [sunos]
|
|
||||||
|
|
||||||
'@esbuild/win32-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-7yRhbHvPqSpRUV7Q20VuDwbjW5kIMwTHpptuUzV+AA46kiPze5Z7qgt6CLCK3pWFrHeNfDd1VKgyP4O+ng17CA==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [win32]
|
|
||||||
|
|
||||||
'@esbuild/win32-ia32@0.27.7':
|
|
||||||
resolution: {integrity: sha512-SmwKXe6VHIyZYbBLJrhOoCJRB/Z1tckzmgTLfFYOfpMAx63BJEaL9ExI8x7v0oAO3Zh6D/Oi1gVxEYr5oUCFhw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [ia32]
|
|
||||||
os: [win32]
|
|
||||||
|
|
||||||
'@esbuild/win32-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-56hiAJPhwQ1R4i+21FVF7V8kSD5zZTdHcVuRFMW0hn753vVfQN8xlx4uOPT4xoGH0Z/oVATuR82AiqSTDIpaHg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [win32]
|
|
||||||
|
|
||||||
'@types/node@22.19.17':
|
|
||||||
resolution: {integrity: sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==}
|
|
||||||
|
|
||||||
esbuild@0.27.7:
|
|
||||||
resolution: {integrity: sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
hasBin: true
|
|
||||||
|
|
||||||
typescript@5.9.3:
|
|
||||||
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
|
|
||||||
engines: {node: '>=14.17'}
|
|
||||||
hasBin: true
|
|
||||||
|
|
||||||
undici-types@6.21.0:
|
|
||||||
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
|
|
||||||
|
|
||||||
snapshots:
|
|
||||||
|
|
||||||
'@esbuild/aix-ppc64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/android-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/android-arm@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/android-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/darwin-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/darwin-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/freebsd-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/freebsd-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-arm@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-ia32@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-loong64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-mips64el@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-ppc64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-riscv64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-s390x@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/netbsd-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/netbsd-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/openbsd-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/openbsd-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/openharmony-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/sunos-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/win32-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/win32-ia32@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/win32-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@types/node@22.19.17':
|
|
||||||
dependencies:
|
|
||||||
undici-types: 6.21.0
|
|
||||||
|
|
||||||
esbuild@0.27.7:
|
|
||||||
optionalDependencies:
|
|
||||||
'@esbuild/aix-ppc64': 0.27.7
|
|
||||||
'@esbuild/android-arm': 0.27.7
|
|
||||||
'@esbuild/android-arm64': 0.27.7
|
|
||||||
'@esbuild/android-x64': 0.27.7
|
|
||||||
'@esbuild/darwin-arm64': 0.27.7
|
|
||||||
'@esbuild/darwin-x64': 0.27.7
|
|
||||||
'@esbuild/freebsd-arm64': 0.27.7
|
|
||||||
'@esbuild/freebsd-x64': 0.27.7
|
|
||||||
'@esbuild/linux-arm': 0.27.7
|
|
||||||
'@esbuild/linux-arm64': 0.27.7
|
|
||||||
'@esbuild/linux-ia32': 0.27.7
|
|
||||||
'@esbuild/linux-loong64': 0.27.7
|
|
||||||
'@esbuild/linux-mips64el': 0.27.7
|
|
||||||
'@esbuild/linux-ppc64': 0.27.7
|
|
||||||
'@esbuild/linux-riscv64': 0.27.7
|
|
||||||
'@esbuild/linux-s390x': 0.27.7
|
|
||||||
'@esbuild/linux-x64': 0.27.7
|
|
||||||
'@esbuild/netbsd-arm64': 0.27.7
|
|
||||||
'@esbuild/netbsd-x64': 0.27.7
|
|
||||||
'@esbuild/openbsd-arm64': 0.27.7
|
|
||||||
'@esbuild/openbsd-x64': 0.27.7
|
|
||||||
'@esbuild/openharmony-arm64': 0.27.7
|
|
||||||
'@esbuild/sunos-x64': 0.27.7
|
|
||||||
'@esbuild/win32-arm64': 0.27.7
|
|
||||||
'@esbuild/win32-ia32': 0.27.7
|
|
||||||
'@esbuild/win32-x64': 0.27.7
|
|
||||||
|
|
||||||
typescript@5.9.3: {}
|
|
||||||
|
|
||||||
undici-types@6.21.0: {}
|
|
||||||
@ -1,7 +1,4 @@
|
|||||||
import { execFile } from "node:child_process";
|
import { execFile } from "node:child_process";
|
||||||
import type { LibSQLDatabase } from "drizzle-orm/libsql";
|
|
||||||
import { hermesGatewayHealth } from "./schema.ts";
|
|
||||||
|
|
||||||
/** Keep subprocess deadlines slightly under typical sense timeout (30s). */
|
/** Keep subprocess deadlines slightly under typical sense timeout (30s). */
|
||||||
const EXEC_TIMEOUT_MS = 25_000;
|
const EXEC_TIMEOUT_MS = 25_000;
|
||||||
|
|
||||||
@ -10,6 +7,22 @@ const HTTP_TIMEOUT_MS = Math.min(23_000, EXEC_TIMEOUT_MS - 2000);
|
|||||||
|
|
||||||
const HTTP_ERROR_MAX_LEN = 256;
|
const HTTP_ERROR_MAX_LEN = 256;
|
||||||
|
|
||||||
|
/** How many consecutive failures before triggering a restart. */
|
||||||
|
const FAILURE_THRESHOLD = 3;
|
||||||
|
|
||||||
|
type SenseState = {
|
||||||
|
consecutiveFailures: number;
|
||||||
|
lastRestartTs: number;
|
||||||
|
/** Minimum ms between restart attempts to avoid restart loops. */
|
||||||
|
restartCooldownMs: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const initialState: SenseState = {
|
||||||
|
consecutiveFailures: 0,
|
||||||
|
lastRestartTs: 0,
|
||||||
|
restartCooldownMs: 300_000, // 5 minutes
|
||||||
|
};
|
||||||
|
|
||||||
function gatewayProbeUrl(): string {
|
function gatewayProbeUrl(): string {
|
||||||
const u =
|
const u =
|
||||||
process.env.HERMES_GATEWAY_HEALTH_URL ??
|
process.env.HERMES_GATEWAY_HEALTH_URL ??
|
||||||
@ -27,17 +40,13 @@ function truncateHttpError(err: unknown): string {
|
|||||||
return s.length > HTTP_ERROR_MAX_LEN ? s.slice(0, HTTP_ERROR_MAX_LEN) : s;
|
return s.length > HTTP_ERROR_MAX_LEN ? s.slice(0, HTTP_ERROR_MAX_LEN) : s;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface HttpProbeResult {
|
type HttpProbeResult = {
|
||||||
httpOk: number;
|
httpOk: number;
|
||||||
httpStatusCode: number;
|
httpStatusCode: number;
|
||||||
httpLatencyMs: number;
|
httpLatencyMs: number;
|
||||||
httpError: string;
|
httpError: string;
|
||||||
}
|
};
|
||||||
|
|
||||||
/**
|
|
||||||
* GET the gateway URL; success = HTTP 200–399.
|
|
||||||
* URL must be set via HERMES_GATEWAY_HEALTH_URL or NERVE_HERMES_GATEWAY_URL.
|
|
||||||
*/
|
|
||||||
async function probeGatewayHttp(url: string): Promise<HttpProbeResult> {
|
async function probeGatewayHttp(url: string): Promise<HttpProbeResult> {
|
||||||
if (!url) {
|
if (!url) {
|
||||||
return {
|
return {
|
||||||
@ -75,10 +84,6 @@ async function probeGatewayHttp(url: string): Promise<HttpProbeResult> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* When `ps` lacks `etimes` (wall-clock seconds since start), parse `etime`
|
|
||||||
* ([[dd-]hh:]mm:ss) into seconds. See ps(1) `etime` field description.
|
|
||||||
*/
|
|
||||||
function etimeToSeconds(etime: string): number {
|
function etimeToSeconds(etime: string): number {
|
||||||
let s = String(etime).trim();
|
let s = String(etime).trim();
|
||||||
if (!s) return 0;
|
if (!s) return 0;
|
||||||
@ -103,12 +108,12 @@ function etimeToSeconds(etime: string): number {
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface ExecResult {
|
type ExecResult = {
|
||||||
exitCode: number;
|
exitCode: number;
|
||||||
errCode: string | undefined;
|
errCode: string | undefined;
|
||||||
stdout: string;
|
stdout: string;
|
||||||
stderr: string;
|
stderr: string;
|
||||||
}
|
};
|
||||||
|
|
||||||
function execFileUtf8(file: string, args: string[], opts: Record<string, unknown> = {}): Promise<ExecResult> {
|
function execFileUtf8(file: string, args: string[], opts: Record<string, unknown> = {}): Promise<ExecResult> {
|
||||||
return new Promise((resolve) => {
|
return new Promise((resolve) => {
|
||||||
@ -217,11 +222,11 @@ async function processExists(mainPid: number): Promise<boolean> {
|
|||||||
return r.stdout.trim().length > 0;
|
return r.stdout.trim().length > 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface PsMetrics {
|
type PsMetrics = {
|
||||||
rssBytes: number;
|
rssBytes: number;
|
||||||
cpuPercent: number;
|
cpuPercent: number;
|
||||||
uptimeSec: number;
|
uptimeSec: number;
|
||||||
}
|
};
|
||||||
|
|
||||||
async function readPsMetrics(mainPid: number): Promise<PsMetrics> {
|
async function readPsMetrics(mainPid: number): Promise<PsMetrics> {
|
||||||
if (mainPid <= 0) {
|
if (mainPid <= 0) {
|
||||||
@ -266,61 +271,12 @@ async function readPsMetrics(mainPid: number): Promise<PsMetrics> {
|
|||||||
return { rssBytes, cpuPercent, uptimeSec };
|
return { rssBytes, cpuPercent, uptimeSec };
|
||||||
}
|
}
|
||||||
|
|
||||||
function parseActiveSessionsFromHermesStats(text: string): number {
|
export async function compute(prevState: SenseState) {
|
||||||
const src = String(text);
|
const now = Date.now();
|
||||||
const patterns = [
|
|
||||||
/^\s*Active\s+sessions?:\s*(\d+)/gim,
|
|
||||||
/^\s*active\s+sessions?:\s*(\d+)/gim,
|
|
||||||
/^\s*Total\s+sessions?:\s*(\d+)/gim,
|
|
||||||
];
|
|
||||||
for (const re of patterns) {
|
|
||||||
re.lastIndex = 0;
|
|
||||||
const m = re.exec(src);
|
|
||||||
if (m) {
|
|
||||||
const n = Math.trunc(Number.parseInt(m[1], 10));
|
|
||||||
return Number.isFinite(n) ? n : 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function readActiveSessions(): Promise<number> {
|
|
||||||
try {
|
|
||||||
const r = await execFileUtf8("hermes", ["sessions", "stats"]);
|
|
||||||
if (r.errCode === "ENOENT") return 0;
|
|
||||||
return parseActiveSessionsFromHermesStats(`${r.stdout}\n${r.stderr}`);
|
|
||||||
} catch {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function countDirectChildren(mainPid: number): Promise<number> {
|
|
||||||
if (mainPid <= 0) return 0;
|
|
||||||
try {
|
|
||||||
const r = await execFileUtf8("ps", [
|
|
||||||
"--no-headers",
|
|
||||||
"-o",
|
|
||||||
"pid",
|
|
||||||
"--ppid",
|
|
||||||
String(mainPid),
|
|
||||||
]);
|
|
||||||
if (r.errCode === "ENOENT") return 0;
|
|
||||||
const lines = r.stdout
|
|
||||||
.split("\n")
|
|
||||||
.map((l) => l.trim())
|
|
||||||
.filter(Boolean);
|
|
||||||
return lines.length;
|
|
||||||
} catch {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function compute(db: LibSQLDatabase, _peers: unknown) {
|
|
||||||
const ts = Date.now();
|
|
||||||
|
|
||||||
|
// --- probe gateway ---
|
||||||
let mainPid = 0;
|
let mainPid = 0;
|
||||||
let systemdActiveRunning = false;
|
let systemdActiveRunning = false;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const st = await readSystemdState();
|
const st = await readSystemdState();
|
||||||
mainPid = st.mainPid;
|
mainPid = st.mainPid;
|
||||||
@ -355,22 +311,6 @@ export async function compute(db: LibSQLDatabase, _peers: unknown) {
|
|||||||
|
|
||||||
const alive = systemdActiveRunning && mainPid > 0 && psOk ? 1 : 0;
|
const alive = systemdActiveRunning && mainPid > 0 && psOk ? 1 : 0;
|
||||||
|
|
||||||
let activeSessions = 0;
|
|
||||||
try {
|
|
||||||
activeSessions = await readActiveSessions();
|
|
||||||
} catch {
|
|
||||||
activeSessions = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
let childProcessCount = 0;
|
|
||||||
if (alive && mainPid > 0) {
|
|
||||||
try {
|
|
||||||
childProcessCount = await countDirectChildren(mainPid);
|
|
||||||
} catch {
|
|
||||||
childProcessCount = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let httpOk = 0;
|
let httpOk = 0;
|
||||||
let httpStatusCode = 0;
|
let httpStatusCode = 0;
|
||||||
let httpLatencyMs = 0;
|
let httpLatencyMs = 0;
|
||||||
@ -388,37 +328,42 @@ export async function compute(db: LibSQLDatabase, _peers: unknown) {
|
|||||||
httpError = "probe_failed";
|
httpError = "probe_failed";
|
||||||
}
|
}
|
||||||
|
|
||||||
const storedMainPid = mainPid > 0 ? mainPid : 0;
|
// --- decide health ---
|
||||||
|
const healthy = alive === 1 && httpOk === 1;
|
||||||
|
|
||||||
const row = {
|
// --- state machine: track consecutive failures ---
|
||||||
ts,
|
const consecutiveFailures = healthy ? 0 : prevState.consecutiveFailures + 1;
|
||||||
|
const lastRestartTs = prevState.lastRestartTs;
|
||||||
|
const cooldown = prevState.restartCooldownMs;
|
||||||
|
const cooldownElapsed = now - lastRestartTs >= cooldown;
|
||||||
|
|
||||||
|
// --- trigger restart? ---
|
||||||
|
const shouldRestart =
|
||||||
|
consecutiveFailures >= FAILURE_THRESHOLD && cooldownElapsed;
|
||||||
|
|
||||||
|
const nextState: SenseState = {
|
||||||
|
consecutiveFailures,
|
||||||
|
lastRestartTs: shouldRestart ? now : lastRestartTs,
|
||||||
|
restartCooldownMs: cooldown,
|
||||||
|
};
|
||||||
|
|
||||||
|
const signal = {
|
||||||
|
ts: now,
|
||||||
alive,
|
alive,
|
||||||
mainPid: storedMainPid,
|
mainPid: mainPid > 0 ? mainPid : 0,
|
||||||
rssBytes: alive ? rssBytes : 0,
|
rssBytes: alive ? rssBytes : 0,
|
||||||
cpuPercent: alive ? cpuPercent : 0,
|
cpuPercent: alive ? cpuPercent : 0,
|
||||||
uptimeSec: alive ? uptimeSec : 0,
|
uptimeSec: alive ? uptimeSec : 0,
|
||||||
activeSessions,
|
|
||||||
childProcessCount: alive ? childProcessCount : 0,
|
|
||||||
httpOk,
|
httpOk,
|
||||||
httpStatusCode,
|
httpStatusCode,
|
||||||
httpLatencyMs,
|
httpLatencyMs,
|
||||||
httpError,
|
httpError,
|
||||||
|
consecutiveFailures,
|
||||||
};
|
};
|
||||||
|
|
||||||
await db.insert(hermesGatewayHealth).values(row);
|
const trigger = shouldRestart
|
||||||
|
? { command: "systemctl --user restart hermes-gateway" }
|
||||||
|
: null;
|
||||||
|
|
||||||
return {
|
return { state: nextState, signal, trigger };
|
||||||
ts: row.ts,
|
|
||||||
alive: row.alive,
|
|
||||||
mainPid: row.mainPid,
|
|
||||||
rssBytes: row.rssBytes,
|
|
||||||
cpuPercent: row.cpuPercent,
|
|
||||||
uptimeSec: row.uptimeSec,
|
|
||||||
activeSessions: row.activeSessions,
|
|
||||||
childProcessCount: row.childProcessCount,
|
|
||||||
httpOk: row.httpOk,
|
|
||||||
httpStatusCode: row.httpStatusCode,
|
|
||||||
httpLatencyMs: row.httpLatencyMs,
|
|
||||||
httpError: row.httpError,
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,17 +0,0 @@
|
|||||||
import { integer, real, sqliteTable, text } from "drizzle-orm/sqlite-core";
|
|
||||||
|
|
||||||
export const hermesGatewayHealth = sqliteTable("hermes_gateway_health", {
|
|
||||||
id: integer("id").primaryKey({ autoIncrement: true }),
|
|
||||||
ts: integer("ts").notNull(),
|
|
||||||
alive: integer("alive").notNull(),
|
|
||||||
mainPid: integer("main_pid").notNull(),
|
|
||||||
rssBytes: integer("rss_bytes").notNull(),
|
|
||||||
cpuPercent: real("cpu_percent").notNull(),
|
|
||||||
uptimeSec: integer("uptime_sec").notNull(),
|
|
||||||
activeSessions: integer("active_sessions").notNull(),
|
|
||||||
childProcessCount: integer("child_process_count").notNull(),
|
|
||||||
httpOk: integer("http_ok").notNull(),
|
|
||||||
httpStatusCode: integer("http_status_code").notNull(),
|
|
||||||
httpLatencyMs: integer("http_latency_ms").notNull(),
|
|
||||||
httpError: text("http_error").notNull(),
|
|
||||||
});
|
|
||||||
@ -1,118 +0,0 @@
|
|||||||
// src/index.ts
|
|
||||||
import { createReadStream } from "node:fs";
|
|
||||||
import { readdir } from "node:fs/promises";
|
|
||||||
import { homedir } from "node:os";
|
|
||||||
import { join } from "node:path";
|
|
||||||
import { createInterface } from "node:readline";
|
|
||||||
|
|
||||||
// src/schema.ts
|
|
||||||
import { integer, sqliteTable } from "drizzle-orm/sqlite-core";
|
|
||||||
var hermesSessionMessageStats = sqliteTable("hermes_session_message_stats", {
|
|
||||||
id: integer("id").primaryKey({ autoIncrement: true }),
|
|
||||||
ts: integer("ts").notNull(),
|
|
||||||
totalUserMessages: integer("total_user_messages").notNull(),
|
|
||||||
totalAssistantMessages: integer("total_assistant_messages").notNull(),
|
|
||||||
totalToolMessages: integer("total_tool_messages").notNull(),
|
|
||||||
totalMessages: integer("total_messages").notNull(),
|
|
||||||
activeSessions: integer("active_sessions").notNull(),
|
|
||||||
measurementWindowSeconds: integer("measurement_window_seconds").notNull()
|
|
||||||
});
|
|
||||||
|
|
||||||
// src/index.ts
|
|
||||||
var MEASUREMENT_WINDOW_MS = 9e5;
|
|
||||||
var MEASUREMENT_WINDOW_SECONDS = 900;
|
|
||||||
async function aggregateJsonlFile(filePath, cutoffMs, nowMs) {
|
|
||||||
let user = 0;
|
|
||||||
let assistant = 0;
|
|
||||||
let tool = 0;
|
|
||||||
let fileHadActivity = false;
|
|
||||||
const input = createReadStream(filePath, { encoding: "utf8" });
|
|
||||||
const rl = createInterface({ input, crlfDelay: Infinity });
|
|
||||||
try {
|
|
||||||
for await (const line of rl) {
|
|
||||||
const trimmed = line.trim();
|
|
||||||
if (!trimmed) continue;
|
|
||||||
let obj;
|
|
||||||
try {
|
|
||||||
obj = JSON.parse(trimmed);
|
|
||||||
} catch {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (typeof obj !== "object" || obj === null || typeof obj.role !== "string" || typeof obj.timestamp !== "string") {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
const record = obj;
|
|
||||||
const t = Date.parse(record.timestamp);
|
|
||||||
if (!Number.isFinite(t) || t < cutoffMs || t > nowMs) continue;
|
|
||||||
const roleNorm = record.role.trim().toLowerCase();
|
|
||||||
if (roleNorm === "user") {
|
|
||||||
user++;
|
|
||||||
fileHadActivity = true;
|
|
||||||
} else if (roleNorm === "assistant") {
|
|
||||||
assistant++;
|
|
||||||
fileHadActivity = true;
|
|
||||||
} else if (roleNorm === "tool") {
|
|
||||||
tool++;
|
|
||||||
fileHadActivity = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
rl.close();
|
|
||||||
}
|
|
||||||
return { user, assistant, tool, fileHadActivity };
|
|
||||||
}
|
|
||||||
async function compute(db, _peers) {
|
|
||||||
const nowMs = Date.now();
|
|
||||||
const cutoffMs = nowMs - MEASUREMENT_WINDOW_MS;
|
|
||||||
const ts = nowMs;
|
|
||||||
let totalUserMessages = 0;
|
|
||||||
let totalAssistantMessages = 0;
|
|
||||||
let totalToolMessages = 0;
|
|
||||||
let activeSessions = 0;
|
|
||||||
const sessionsDir = join(homedir(), ".hermes", "sessions");
|
|
||||||
let files = [];
|
|
||||||
try {
|
|
||||||
const entries = await readdir(sessionsDir, { withFileTypes: true });
|
|
||||||
files = entries.filter((e) => e.isFile() && e.name.endsWith(".jsonl")).map((e) => join(sessionsDir, e.name));
|
|
||||||
} catch (err) {
|
|
||||||
if (err && typeof err === "object" && "code" in err && err.code === "ENOENT") {
|
|
||||||
files = [];
|
|
||||||
} else {
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (const filePath of files) {
|
|
||||||
const { user, assistant, tool, fileHadActivity } = await aggregateJsonlFile(
|
|
||||||
filePath,
|
|
||||||
cutoffMs,
|
|
||||||
nowMs
|
|
||||||
);
|
|
||||||
totalUserMessages += user;
|
|
||||||
totalAssistantMessages += assistant;
|
|
||||||
totalToolMessages += tool;
|
|
||||||
if (fileHadActivity) activeSessions++;
|
|
||||||
}
|
|
||||||
const totalMessages = totalUserMessages + totalAssistantMessages + totalToolMessages;
|
|
||||||
const row = {
|
|
||||||
ts,
|
|
||||||
totalUserMessages,
|
|
||||||
totalAssistantMessages,
|
|
||||||
totalToolMessages,
|
|
||||||
totalMessages,
|
|
||||||
activeSessions,
|
|
||||||
measurementWindowSeconds: MEASUREMENT_WINDOW_SECONDS
|
|
||||||
};
|
|
||||||
await db.insert(hermesSessionMessageStats).values(row);
|
|
||||||
return {
|
|
||||||
ts: row.ts,
|
|
||||||
totalUserMessages: row.totalUserMessages,
|
|
||||||
totalAssistantMessages: row.totalAssistantMessages,
|
|
||||||
totalToolMessages: row.totalToolMessages,
|
|
||||||
totalMessages: row.totalMessages,
|
|
||||||
activeSessions: row.activeSessions,
|
|
||||||
measurementWindowSeconds: row.measurementWindowSeconds
|
|
||||||
};
|
|
||||||
}
|
|
||||||
export {
|
|
||||||
compute
|
|
||||||
};
|
|
||||||
@ -1,13 +0,0 @@
|
|||||||
-- Migration: 0001_init
|
|
||||||
-- Creates the hermes_session_message_stats table for hermes-session-message-stats sense.
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS hermes_session_message_stats (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
ts INTEGER NOT NULL,
|
|
||||||
total_user_messages INTEGER NOT NULL,
|
|
||||||
total_assistant_messages INTEGER NOT NULL,
|
|
||||||
total_tool_messages INTEGER NOT NULL,
|
|
||||||
total_messages INTEGER NOT NULL,
|
|
||||||
active_sessions INTEGER NOT NULL,
|
|
||||||
measurement_window_seconds INTEGER NOT NULL
|
|
||||||
);
|
|
||||||
@ -1,17 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "sense-hermes-session-message-stats",
|
|
||||||
"version": "0.0.1",
|
|
||||||
"private": true,
|
|
||||||
"type": "module",
|
|
||||||
"scripts": {
|
|
||||||
"build": "esbuild src/index.ts --bundle --platform=node --format=esm --outfile=index.js --packages=external"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@types/node": "^22.0.0",
|
|
||||||
"esbuild": "^0.27.0",
|
|
||||||
"typescript": "^5.7.0"
|
|
||||||
},
|
|
||||||
"pnpm": {
|
|
||||||
"onlyBuiltDependencies": ["esbuild"]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
310
senses/hermes-session-message-stats/pnpm-lock.yaml
generated
310
senses/hermes-session-message-stats/pnpm-lock.yaml
generated
@ -1,310 +0,0 @@
|
|||||||
lockfileVersion: '9.0'
|
|
||||||
|
|
||||||
settings:
|
|
||||||
autoInstallPeers: true
|
|
||||||
excludeLinksFromLockfile: false
|
|
||||||
|
|
||||||
importers:
|
|
||||||
|
|
||||||
.:
|
|
||||||
devDependencies:
|
|
||||||
'@types/node':
|
|
||||||
specifier: ^22.0.0
|
|
||||||
version: 22.19.17
|
|
||||||
esbuild:
|
|
||||||
specifier: ^0.27.0
|
|
||||||
version: 0.27.7
|
|
||||||
typescript:
|
|
||||||
specifier: ^5.7.0
|
|
||||||
version: 5.9.3
|
|
||||||
|
|
||||||
packages:
|
|
||||||
|
|
||||||
'@esbuild/aix-ppc64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-EKX3Qwmhz1eMdEJokhALr0YiD0lhQNwDqkPYyPhiSwKrh7/4KRjQc04sZ8db+5DVVnZ1LmbNDI1uAMPEUBnQPg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [ppc64]
|
|
||||||
os: [aix]
|
|
||||||
|
|
||||||
'@esbuild/android-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-62dPZHpIXzvChfvfLJow3q5dDtiNMkwiRzPylSCfriLvZeq0a1bWChrGx/BbUbPwOrsWKMn8idSllklzBy+dgQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [android]
|
|
||||||
|
|
||||||
'@esbuild/android-arm@0.27.7':
|
|
||||||
resolution: {integrity: sha512-jbPXvB4Yj2yBV7HUfE2KHe4GJX51QplCN1pGbYjvsyCZbQmies29EoJbkEc+vYuU5o45AfQn37vZlyXy4YJ8RQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm]
|
|
||||||
os: [android]
|
|
||||||
|
|
||||||
'@esbuild/android-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-x5VpMODneVDb70PYV2VQOmIUUiBtY3D3mPBG8NxVk5CogneYhkR7MmM3yR/uMdITLrC1ml/NV1rj4bMJuy9MCg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [android]
|
|
||||||
|
|
||||||
'@esbuild/darwin-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-5lckdqeuBPlKUwvoCXIgI2D9/ABmPq3Rdp7IfL70393YgaASt7tbju3Ac+ePVi3KDH6N2RqePfHnXkaDtY9fkw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [darwin]
|
|
||||||
|
|
||||||
'@esbuild/darwin-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-rYnXrKcXuT7Z+WL5K980jVFdvVKhCHhUwid+dDYQpH+qu+TefcomiMAJpIiC2EM3Rjtq0sO3StMV/+3w3MyyqQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [darwin]
|
|
||||||
|
|
||||||
'@esbuild/freebsd-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-B48PqeCsEgOtzME2GbNM2roU29AMTuOIN91dsMO30t+Ydis3z/3Ngoj5hhnsOSSwNzS+6JppqWsuhTp6E82l2w==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [freebsd]
|
|
||||||
|
|
||||||
'@esbuild/freebsd-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-jOBDK5XEjA4m5IJK3bpAQF9/Lelu/Z9ZcdhTRLf4cajlB+8VEhFFRjWgfy3M1O4rO2GQ/b2dLwCUGpiF/eATNQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [freebsd]
|
|
||||||
|
|
||||||
'@esbuild/linux-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-RZPHBoxXuNnPQO9rvjh5jdkRmVizktkT7TCDkDmQ0W2SwHInKCAV95GRuvdSvA7w4VMwfCjUiPwDi0ZO6Nfe9A==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-arm@0.27.7':
|
|
||||||
resolution: {integrity: sha512-RkT/YXYBTSULo3+af8Ib0ykH8u2MBh57o7q/DAs3lTJlyVQkgQvlrPTnjIzzRPQyavxtPtfg0EopvDyIt0j1rA==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-ia32@0.27.7':
|
|
||||||
resolution: {integrity: sha512-GA48aKNkyQDbd3KtkplYWT102C5sn/EZTY4XROkxONgruHPU72l+gW+FfF8tf2cFjeHaRbWpOYa/uRBz/Xq1Pg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [ia32]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-loong64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-a4POruNM2oWsD4WKvBSEKGIiWQF8fZOAsycHOt6JBpZ+JN2n2JH9WAv56SOyu9X5IqAjqSIPTaJkqN8F7XOQ5Q==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [loong64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-mips64el@0.27.7':
|
|
||||||
resolution: {integrity: sha512-KabT5I6StirGfIz0FMgl1I+R1H73Gp0ofL9A3nG3i/cYFJzKHhouBV5VWK1CSgKvVaG4q1RNpCTR2LuTVB3fIw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [mips64el]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-ppc64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-gRsL4x6wsGHGRqhtI+ifpN/vpOFTQtnbsupUF5R5YTAg+y/lKelYR1hXbnBdzDjGbMYjVJLJTd2OFmMewAgwlQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [ppc64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-riscv64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-hL25LbxO1QOngGzu2U5xeXtxXcW+/GvMN3ejANqXkxZ/opySAZMrc+9LY/WyjAan41unrR3YrmtTsUpwT66InQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [riscv64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-s390x@0.27.7':
|
|
||||||
resolution: {integrity: sha512-2k8go8Ycu1Kb46vEelhu1vqEP+UeRVj2zY1pSuPdgvbd5ykAw82Lrro28vXUrRmzEsUV0NzCf54yARIK8r0fdw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [s390x]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-hzznmADPt+OmsYzw1EE33ccA+HPdIqiCRq7cQeL1Jlq2gb1+OyWBkMCrYGBJ+sxVzve2ZJEVeePbLM2iEIZSxA==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/netbsd-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-b6pqtrQdigZBwZxAn1UpazEisvwaIDvdbMbmrly7cDTMFnw/+3lVxxCTGOrkPVnsYIosJJXAsILG9XcQS+Yu6w==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [netbsd]
|
|
||||||
|
|
||||||
'@esbuild/netbsd-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-OfatkLojr6U+WN5EDYuoQhtM+1xco+/6FSzJJnuWiUw5eVcicbyK3dq5EeV/QHT1uy6GoDhGbFpprUiHUYggrw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [netbsd]
|
|
||||||
|
|
||||||
'@esbuild/openbsd-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-AFuojMQTxAz75Fo8idVcqoQWEHIXFRbOc1TrVcFSgCZtQfSdc1RXgB3tjOn/krRHENUB4j00bfGjyl2mJrU37A==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [openbsd]
|
|
||||||
|
|
||||||
'@esbuild/openbsd-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-+A1NJmfM8WNDv5CLVQYJ5PshuRm/4cI6WMZRg1by1GwPIQPCTs1GLEUHwiiQGT5zDdyLiRM/l1G0Pv54gvtKIg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [openbsd]
|
|
||||||
|
|
||||||
'@esbuild/openharmony-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-+KrvYb/C8zA9CU/g0sR6w2RBw7IGc5J2BPnc3dYc5VJxHCSF1yNMxTV5LQ7GuKteQXZtspjFbiuW5/dOj7H4Yw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [openharmony]
|
|
||||||
|
|
||||||
'@esbuild/sunos-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-ikktIhFBzQNt/QDyOL580ti9+5mL/YZeUPKU2ivGtGjdTYoqz6jObj6nOMfhASpS4GU4Q/Clh1QtxWAvcYKamA==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [sunos]
|
|
||||||
|
|
||||||
'@esbuild/win32-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-7yRhbHvPqSpRUV7Q20VuDwbjW5kIMwTHpptuUzV+AA46kiPze5Z7qgt6CLCK3pWFrHeNfDd1VKgyP4O+ng17CA==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [win32]
|
|
||||||
|
|
||||||
'@esbuild/win32-ia32@0.27.7':
|
|
||||||
resolution: {integrity: sha512-SmwKXe6VHIyZYbBLJrhOoCJRB/Z1tckzmgTLfFYOfpMAx63BJEaL9ExI8x7v0oAO3Zh6D/Oi1gVxEYr5oUCFhw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [ia32]
|
|
||||||
os: [win32]
|
|
||||||
|
|
||||||
'@esbuild/win32-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-56hiAJPhwQ1R4i+21FVF7V8kSD5zZTdHcVuRFMW0hn753vVfQN8xlx4uOPT4xoGH0Z/oVATuR82AiqSTDIpaHg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [win32]
|
|
||||||
|
|
||||||
'@types/node@22.19.17':
|
|
||||||
resolution: {integrity: sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==}
|
|
||||||
|
|
||||||
esbuild@0.27.7:
|
|
||||||
resolution: {integrity: sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
hasBin: true
|
|
||||||
|
|
||||||
typescript@5.9.3:
|
|
||||||
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
|
|
||||||
engines: {node: '>=14.17'}
|
|
||||||
hasBin: true
|
|
||||||
|
|
||||||
undici-types@6.21.0:
|
|
||||||
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
|
|
||||||
|
|
||||||
snapshots:
|
|
||||||
|
|
||||||
'@esbuild/aix-ppc64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/android-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/android-arm@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/android-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/darwin-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/darwin-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/freebsd-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/freebsd-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-arm@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-ia32@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-loong64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-mips64el@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-ppc64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-riscv64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-s390x@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/netbsd-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/netbsd-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/openbsd-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/openbsd-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/openharmony-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/sunos-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/win32-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/win32-ia32@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/win32-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@types/node@22.19.17':
|
|
||||||
dependencies:
|
|
||||||
undici-types: 6.21.0
|
|
||||||
|
|
||||||
esbuild@0.27.7:
|
|
||||||
optionalDependencies:
|
|
||||||
'@esbuild/aix-ppc64': 0.27.7
|
|
||||||
'@esbuild/android-arm': 0.27.7
|
|
||||||
'@esbuild/android-arm64': 0.27.7
|
|
||||||
'@esbuild/android-x64': 0.27.7
|
|
||||||
'@esbuild/darwin-arm64': 0.27.7
|
|
||||||
'@esbuild/darwin-x64': 0.27.7
|
|
||||||
'@esbuild/freebsd-arm64': 0.27.7
|
|
||||||
'@esbuild/freebsd-x64': 0.27.7
|
|
||||||
'@esbuild/linux-arm': 0.27.7
|
|
||||||
'@esbuild/linux-arm64': 0.27.7
|
|
||||||
'@esbuild/linux-ia32': 0.27.7
|
|
||||||
'@esbuild/linux-loong64': 0.27.7
|
|
||||||
'@esbuild/linux-mips64el': 0.27.7
|
|
||||||
'@esbuild/linux-ppc64': 0.27.7
|
|
||||||
'@esbuild/linux-riscv64': 0.27.7
|
|
||||||
'@esbuild/linux-s390x': 0.27.7
|
|
||||||
'@esbuild/linux-x64': 0.27.7
|
|
||||||
'@esbuild/netbsd-arm64': 0.27.7
|
|
||||||
'@esbuild/netbsd-x64': 0.27.7
|
|
||||||
'@esbuild/openbsd-arm64': 0.27.7
|
|
||||||
'@esbuild/openbsd-x64': 0.27.7
|
|
||||||
'@esbuild/openharmony-arm64': 0.27.7
|
|
||||||
'@esbuild/sunos-x64': 0.27.7
|
|
||||||
'@esbuild/win32-arm64': 0.27.7
|
|
||||||
'@esbuild/win32-ia32': 0.27.7
|
|
||||||
'@esbuild/win32-x64': 0.27.7
|
|
||||||
|
|
||||||
typescript@5.9.3: {}
|
|
||||||
|
|
||||||
undici-types@6.21.0: {}
|
|
||||||
@ -1,128 +0,0 @@
|
|||||||
import { createReadStream } from "node:fs";
|
|
||||||
import { readdir } from "node:fs/promises";
|
|
||||||
import { homedir } from "node:os";
|
|
||||||
import { join } from "node:path";
|
|
||||||
import { createInterface } from "node:readline";
|
|
||||||
import type { LibSQLDatabase } from "drizzle-orm/libsql";
|
|
||||||
import { hermesSessionMessageStats } from "./schema.ts";
|
|
||||||
|
|
||||||
const MEASUREMENT_WINDOW_MS = 900_000;
|
|
||||||
const MEASUREMENT_WINDOW_SECONDS = 900;
|
|
||||||
|
|
||||||
interface MessageCounts {
|
|
||||||
user: number;
|
|
||||||
assistant: number;
|
|
||||||
tool: number;
|
|
||||||
fileHadActivity: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function aggregateJsonlFile(filePath: string, cutoffMs: number, nowMs: number): Promise<MessageCounts> {
|
|
||||||
let user = 0;
|
|
||||||
let assistant = 0;
|
|
||||||
let tool = 0;
|
|
||||||
let fileHadActivity = false;
|
|
||||||
|
|
||||||
const input = createReadStream(filePath, { encoding: "utf8" });
|
|
||||||
const rl = createInterface({ input, crlfDelay: Infinity });
|
|
||||||
try {
|
|
||||||
for await (const line of rl) {
|
|
||||||
const trimmed = line.trim();
|
|
||||||
if (!trimmed) continue;
|
|
||||||
let obj: unknown;
|
|
||||||
try {
|
|
||||||
obj = JSON.parse(trimmed);
|
|
||||||
} catch {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (
|
|
||||||
typeof obj !== "object" || obj === null ||
|
|
||||||
typeof (obj as Record<string, unknown>).role !== "string" ||
|
|
||||||
typeof (obj as Record<string, unknown>).timestamp !== "string"
|
|
||||||
) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
const record = obj as { role: string; timestamp: string };
|
|
||||||
const t = Date.parse(record.timestamp);
|
|
||||||
if (!Number.isFinite(t) || t < cutoffMs || t > nowMs) continue;
|
|
||||||
|
|
||||||
const roleNorm = record.role.trim().toLowerCase();
|
|
||||||
if (roleNorm === "user") {
|
|
||||||
user++;
|
|
||||||
fileHadActivity = true;
|
|
||||||
} else if (roleNorm === "assistant") {
|
|
||||||
assistant++;
|
|
||||||
fileHadActivity = true;
|
|
||||||
} else if (roleNorm === "tool") {
|
|
||||||
tool++;
|
|
||||||
fileHadActivity = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
rl.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
return { user, assistant, tool, fileHadActivity };
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function compute(db: LibSQLDatabase, _peers: unknown) {
|
|
||||||
const nowMs = Date.now();
|
|
||||||
const cutoffMs = nowMs - MEASUREMENT_WINDOW_MS;
|
|
||||||
const ts = nowMs;
|
|
||||||
|
|
||||||
let totalUserMessages = 0;
|
|
||||||
let totalAssistantMessages = 0;
|
|
||||||
let totalToolMessages = 0;
|
|
||||||
let activeSessions = 0;
|
|
||||||
|
|
||||||
const sessionsDir = join(homedir(), ".hermes", "sessions");
|
|
||||||
let files: string[] = [];
|
|
||||||
try {
|
|
||||||
const entries = await readdir(sessionsDir, { withFileTypes: true });
|
|
||||||
files = entries
|
|
||||||
.filter((e) => e.isFile() && e.name.endsWith(".jsonl"))
|
|
||||||
.map((e) => join(sessionsDir, e.name));
|
|
||||||
} catch (err) {
|
|
||||||
if (err && typeof err === "object" && "code" in err && (err as NodeJS.ErrnoException).code === "ENOENT") {
|
|
||||||
files = [];
|
|
||||||
} else {
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const filePath of files) {
|
|
||||||
const { user, assistant, tool, fileHadActivity } = await aggregateJsonlFile(
|
|
||||||
filePath,
|
|
||||||
cutoffMs,
|
|
||||||
nowMs,
|
|
||||||
);
|
|
||||||
totalUserMessages += user;
|
|
||||||
totalAssistantMessages += assistant;
|
|
||||||
totalToolMessages += tool;
|
|
||||||
if (fileHadActivity) activeSessions++;
|
|
||||||
}
|
|
||||||
|
|
||||||
const totalMessages =
|
|
||||||
totalUserMessages + totalAssistantMessages + totalToolMessages;
|
|
||||||
|
|
||||||
const row = {
|
|
||||||
ts,
|
|
||||||
totalUserMessages,
|
|
||||||
totalAssistantMessages,
|
|
||||||
totalToolMessages,
|
|
||||||
totalMessages,
|
|
||||||
activeSessions,
|
|
||||||
measurementWindowSeconds: MEASUREMENT_WINDOW_SECONDS,
|
|
||||||
};
|
|
||||||
|
|
||||||
await db.insert(hermesSessionMessageStats).values(row);
|
|
||||||
|
|
||||||
return {
|
|
||||||
ts: row.ts,
|
|
||||||
totalUserMessages: row.totalUserMessages,
|
|
||||||
totalAssistantMessages: row.totalAssistantMessages,
|
|
||||||
totalToolMessages: row.totalToolMessages,
|
|
||||||
totalMessages: row.totalMessages,
|
|
||||||
activeSessions: row.activeSessions,
|
|
||||||
measurementWindowSeconds: row.measurementWindowSeconds,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@ -1,12 +0,0 @@
|
|||||||
import { integer, sqliteTable } from "drizzle-orm/sqlite-core";
|
|
||||||
|
|
||||||
export const hermesSessionMessageStats = sqliteTable("hermes_session_message_stats", {
|
|
||||||
id: integer("id").primaryKey({ autoIncrement: true }),
|
|
||||||
ts: integer("ts").notNull(),
|
|
||||||
totalUserMessages: integer("total_user_messages").notNull(),
|
|
||||||
totalAssistantMessages: integer("total_assistant_messages").notNull(),
|
|
||||||
totalToolMessages: integer("total_tool_messages").notNull(),
|
|
||||||
totalMessages: integer("total_messages").notNull(),
|
|
||||||
activeSessions: integer("active_sessions").notNull(),
|
|
||||||
measurementWindowSeconds: integer("measurement_window_seconds").notNull(),
|
|
||||||
});
|
|
||||||
@ -1,112 +0,0 @@
|
|||||||
// src/index.ts
|
|
||||||
import { loadavg, totalmem, freemem, uptime } from "node:os";
|
|
||||||
import { execSync } from "node:child_process";
|
|
||||||
import { readFile } from "node:fs/promises";
|
|
||||||
|
|
||||||
// src/schema.ts
|
|
||||||
import { integer, real, sqliteTable } from "drizzle-orm/sqlite-core";
|
|
||||||
var snapshots = sqliteTable("snapshots", {
|
|
||||||
ts: integer("ts").primaryKey(),
|
|
||||||
cpuLoad1m: real("cpu_load_1m").notNull(),
|
|
||||||
cpuLoad5m: real("cpu_load_5m").notNull(),
|
|
||||||
cpuLoad15m: real("cpu_load_15m").notNull(),
|
|
||||||
memTotalMB: integer("mem_total_mb").notNull(),
|
|
||||||
memUsedMB: integer("mem_used_mb").notNull(),
|
|
||||||
memUsedPct: real("mem_used_pct").notNull(),
|
|
||||||
diskTotalGB: real("disk_total_gb").notNull(),
|
|
||||||
diskUsedGB: real("disk_used_gb").notNull(),
|
|
||||||
diskUsedPct: real("disk_used_pct").notNull(),
|
|
||||||
uptimeSec: integer("uptime_sec").notNull(),
|
|
||||||
// TCP socket stats (merged from linux-tcp-socket-stats)
|
|
||||||
socketsUsed: integer("sockets_used"),
|
|
||||||
tcpInuse: integer("tcp_inuse"),
|
|
||||||
tcpOrphan: integer("tcp_orphan"),
|
|
||||||
tcpTw: integer("tcp_tw"),
|
|
||||||
tcpAlloc: integer("tcp_alloc"),
|
|
||||||
tcpMemPages: integer("tcp_mem_pages")
|
|
||||||
});
|
|
||||||
|
|
||||||
// src/index.ts
|
|
||||||
var SOCKSTAT_PATH = "/proc/net/sockstat";
|
|
||||||
function parseSockstat(content) {
|
|
||||||
let socketsUsed = 0, tcpInuse = 0, tcpOrphan = 0, tcpTw = 0, tcpAlloc = 0, tcpMemPages = 0;
|
|
||||||
for (const line of content.split("\n")) {
|
|
||||||
const trimmed = line.trim();
|
|
||||||
if (trimmed.startsWith("sockets:")) {
|
|
||||||
const parts = trimmed.split(/\s+/);
|
|
||||||
const idx = parts.indexOf("used");
|
|
||||||
if (idx !== -1 && idx + 1 < parts.length) {
|
|
||||||
socketsUsed = Number.parseInt(parts[idx + 1], 10) || 0;
|
|
||||||
}
|
|
||||||
} else if (trimmed.startsWith("TCP:")) {
|
|
||||||
const parts = trimmed.split(/\s+/);
|
|
||||||
const map = {};
|
|
||||||
for (let i = 1; i + 1 < parts.length; i += 2) {
|
|
||||||
map[parts[i]] = Number.parseInt(parts[i + 1], 10) || 0;
|
|
||||||
}
|
|
||||||
tcpInuse = map.inuse ?? 0;
|
|
||||||
tcpOrphan = map.orphan ?? 0;
|
|
||||||
tcpTw = map.tw ?? 0;
|
|
||||||
tcpAlloc = map.alloc ?? 0;
|
|
||||||
tcpMemPages = map.mem ?? 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return { socketsUsed, tcpInuse, tcpOrphan, tcpTw, tcpAlloc, tcpMemPages };
|
|
||||||
}
|
|
||||||
async function compute(db, _peers) {
|
|
||||||
const [load1, load5, load15] = loadavg();
|
|
||||||
const memTotal = totalmem();
|
|
||||||
const memFree = freemem();
|
|
||||||
const memUsed = memTotal - memFree;
|
|
||||||
const memTotalMB = Math.round(memTotal / 1024 / 1024);
|
|
||||||
const memUsedMB = Math.round(memUsed / 1024 / 1024);
|
|
||||||
const memUsedPct = Math.round(memUsed / memTotal * 1e4) / 100;
|
|
||||||
let diskTotalGB = 0, diskUsedGB = 0, diskUsedPct = 0;
|
|
||||||
try {
|
|
||||||
const df = execSync("df -B1 / | tail -1", { encoding: "utf-8" }).trim();
|
|
||||||
const parts = df.split(/\s+/);
|
|
||||||
const total = Number(parts[1]);
|
|
||||||
const used = Number(parts[2]);
|
|
||||||
diskTotalGB = Math.round(total / 1024 / 1024 / 1024 * 100) / 100;
|
|
||||||
diskUsedGB = Math.round(used / 1024 / 1024 / 1024 * 100) / 100;
|
|
||||||
diskUsedPct = total > 0 ? Math.round(used / total * 1e4) / 100 : 0;
|
|
||||||
} catch {
|
|
||||||
}
|
|
||||||
let tcp = { socketsUsed: 0, tcpInuse: 0, tcpOrphan: 0, tcpTw: 0, tcpAlloc: 0, tcpMemPages: 0 };
|
|
||||||
try {
|
|
||||||
const content = await readFile(SOCKSTAT_PATH, "utf8");
|
|
||||||
tcp = parseSockstat(content);
|
|
||||||
} catch {
|
|
||||||
}
|
|
||||||
const ts = Date.now();
|
|
||||||
const uptimeSec = Math.round(uptime());
|
|
||||||
await db.insert(snapshots).values({
|
|
||||||
ts,
|
|
||||||
cpuLoad1m: load1,
|
|
||||||
cpuLoad5m: load5,
|
|
||||||
cpuLoad15m: load15,
|
|
||||||
memTotalMB,
|
|
||||||
memUsedMB,
|
|
||||||
memUsedPct,
|
|
||||||
diskTotalGB,
|
|
||||||
diskUsedGB,
|
|
||||||
diskUsedPct,
|
|
||||||
uptimeSec,
|
|
||||||
socketsUsed: tcp.socketsUsed,
|
|
||||||
tcpInuse: tcp.tcpInuse,
|
|
||||||
tcpOrphan: tcp.tcpOrphan,
|
|
||||||
tcpTw: tcp.tcpTw,
|
|
||||||
tcpAlloc: tcp.tcpAlloc,
|
|
||||||
tcpMemPages: tcp.tcpMemPages
|
|
||||||
});
|
|
||||||
return {
|
|
||||||
cpu: { load1m: load1, load5m: load5, load15m: load15 },
|
|
||||||
memory: { totalMB: memTotalMB, usedMB: memUsedMB, usedPct: memUsedPct },
|
|
||||||
disk: { totalGB: diskTotalGB, usedGB: diskUsedGB, usedPct: diskUsedPct },
|
|
||||||
tcp: { socketsUsed: tcp.socketsUsed, inuse: tcp.tcpInuse, orphan: tcp.tcpOrphan, tw: tcp.tcpTw, alloc: tcp.tcpAlloc, memPages: tcp.tcpMemPages },
|
|
||||||
uptimeSec
|
|
||||||
};
|
|
||||||
}
|
|
||||||
export {
|
|
||||||
compute
|
|
||||||
};
|
|
||||||
@ -1,16 +0,0 @@
|
|||||||
-- Migration: 0001_init
|
|
||||||
-- Creates the snapshots table for linux-system-health sense.
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS snapshots (
|
|
||||||
ts INTEGER PRIMARY KEY,
|
|
||||||
cpu_load_1m REAL NOT NULL,
|
|
||||||
cpu_load_5m REAL NOT NULL,
|
|
||||||
cpu_load_15m REAL NOT NULL,
|
|
||||||
mem_total_mb INTEGER NOT NULL,
|
|
||||||
mem_used_mb INTEGER NOT NULL,
|
|
||||||
mem_used_pct REAL NOT NULL,
|
|
||||||
disk_total_gb REAL NOT NULL,
|
|
||||||
disk_used_gb REAL NOT NULL,
|
|
||||||
disk_used_pct REAL NOT NULL,
|
|
||||||
uptime_sec INTEGER NOT NULL
|
|
||||||
);
|
|
||||||
@ -1,6 +0,0 @@
|
|||||||
ALTER TABLE snapshots ADD COLUMN sockets_used INTEGER;
|
|
||||||
ALTER TABLE snapshots ADD COLUMN tcp_inuse INTEGER;
|
|
||||||
ALTER TABLE snapshots ADD COLUMN tcp_orphan INTEGER;
|
|
||||||
ALTER TABLE snapshots ADD COLUMN tcp_tw INTEGER;
|
|
||||||
ALTER TABLE snapshots ADD COLUMN tcp_alloc INTEGER;
|
|
||||||
ALTER TABLE snapshots ADD COLUMN tcp_mem_pages INTEGER;
|
|
||||||
@ -1,17 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "sense-linux-system-health",
|
|
||||||
"version": "0.0.1",
|
|
||||||
"private": true,
|
|
||||||
"type": "module",
|
|
||||||
"scripts": {
|
|
||||||
"build": "esbuild src/index.ts --bundle --platform=node --format=esm --outfile=index.js --packages=external"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@types/node": "^22.0.0",
|
|
||||||
"esbuild": "^0.27.0",
|
|
||||||
"typescript": "^5.7.0"
|
|
||||||
},
|
|
||||||
"pnpm": {
|
|
||||||
"onlyBuiltDependencies": ["esbuild"]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
310
senses/linux-system-health/pnpm-lock.yaml
generated
310
senses/linux-system-health/pnpm-lock.yaml
generated
@ -1,310 +0,0 @@
|
|||||||
lockfileVersion: '9.0'
|
|
||||||
|
|
||||||
settings:
|
|
||||||
autoInstallPeers: true
|
|
||||||
excludeLinksFromLockfile: false
|
|
||||||
|
|
||||||
importers:
|
|
||||||
|
|
||||||
.:
|
|
||||||
devDependencies:
|
|
||||||
'@types/node':
|
|
||||||
specifier: ^22.0.0
|
|
||||||
version: 22.19.17
|
|
||||||
esbuild:
|
|
||||||
specifier: ^0.27.0
|
|
||||||
version: 0.27.7
|
|
||||||
typescript:
|
|
||||||
specifier: ^5.7.0
|
|
||||||
version: 5.9.3
|
|
||||||
|
|
||||||
packages:
|
|
||||||
|
|
||||||
'@esbuild/aix-ppc64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-EKX3Qwmhz1eMdEJokhALr0YiD0lhQNwDqkPYyPhiSwKrh7/4KRjQc04sZ8db+5DVVnZ1LmbNDI1uAMPEUBnQPg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [ppc64]
|
|
||||||
os: [aix]
|
|
||||||
|
|
||||||
'@esbuild/android-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-62dPZHpIXzvChfvfLJow3q5dDtiNMkwiRzPylSCfriLvZeq0a1bWChrGx/BbUbPwOrsWKMn8idSllklzBy+dgQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [android]
|
|
||||||
|
|
||||||
'@esbuild/android-arm@0.27.7':
|
|
||||||
resolution: {integrity: sha512-jbPXvB4Yj2yBV7HUfE2KHe4GJX51QplCN1pGbYjvsyCZbQmies29EoJbkEc+vYuU5o45AfQn37vZlyXy4YJ8RQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm]
|
|
||||||
os: [android]
|
|
||||||
|
|
||||||
'@esbuild/android-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-x5VpMODneVDb70PYV2VQOmIUUiBtY3D3mPBG8NxVk5CogneYhkR7MmM3yR/uMdITLrC1ml/NV1rj4bMJuy9MCg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [android]
|
|
||||||
|
|
||||||
'@esbuild/darwin-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-5lckdqeuBPlKUwvoCXIgI2D9/ABmPq3Rdp7IfL70393YgaASt7tbju3Ac+ePVi3KDH6N2RqePfHnXkaDtY9fkw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [darwin]
|
|
||||||
|
|
||||||
'@esbuild/darwin-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-rYnXrKcXuT7Z+WL5K980jVFdvVKhCHhUwid+dDYQpH+qu+TefcomiMAJpIiC2EM3Rjtq0sO3StMV/+3w3MyyqQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [darwin]
|
|
||||||
|
|
||||||
'@esbuild/freebsd-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-B48PqeCsEgOtzME2GbNM2roU29AMTuOIN91dsMO30t+Ydis3z/3Ngoj5hhnsOSSwNzS+6JppqWsuhTp6E82l2w==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [freebsd]
|
|
||||||
|
|
||||||
'@esbuild/freebsd-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-jOBDK5XEjA4m5IJK3bpAQF9/Lelu/Z9ZcdhTRLf4cajlB+8VEhFFRjWgfy3M1O4rO2GQ/b2dLwCUGpiF/eATNQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [freebsd]
|
|
||||||
|
|
||||||
'@esbuild/linux-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-RZPHBoxXuNnPQO9rvjh5jdkRmVizktkT7TCDkDmQ0W2SwHInKCAV95GRuvdSvA7w4VMwfCjUiPwDi0ZO6Nfe9A==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-arm@0.27.7':
|
|
||||||
resolution: {integrity: sha512-RkT/YXYBTSULo3+af8Ib0ykH8u2MBh57o7q/DAs3lTJlyVQkgQvlrPTnjIzzRPQyavxtPtfg0EopvDyIt0j1rA==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-ia32@0.27.7':
|
|
||||||
resolution: {integrity: sha512-GA48aKNkyQDbd3KtkplYWT102C5sn/EZTY4XROkxONgruHPU72l+gW+FfF8tf2cFjeHaRbWpOYa/uRBz/Xq1Pg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [ia32]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-loong64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-a4POruNM2oWsD4WKvBSEKGIiWQF8fZOAsycHOt6JBpZ+JN2n2JH9WAv56SOyu9X5IqAjqSIPTaJkqN8F7XOQ5Q==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [loong64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-mips64el@0.27.7':
|
|
||||||
resolution: {integrity: sha512-KabT5I6StirGfIz0FMgl1I+R1H73Gp0ofL9A3nG3i/cYFJzKHhouBV5VWK1CSgKvVaG4q1RNpCTR2LuTVB3fIw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [mips64el]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-ppc64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-gRsL4x6wsGHGRqhtI+ifpN/vpOFTQtnbsupUF5R5YTAg+y/lKelYR1hXbnBdzDjGbMYjVJLJTd2OFmMewAgwlQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [ppc64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-riscv64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-hL25LbxO1QOngGzu2U5xeXtxXcW+/GvMN3ejANqXkxZ/opySAZMrc+9LY/WyjAan41unrR3YrmtTsUpwT66InQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [riscv64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-s390x@0.27.7':
|
|
||||||
resolution: {integrity: sha512-2k8go8Ycu1Kb46vEelhu1vqEP+UeRVj2zY1pSuPdgvbd5ykAw82Lrro28vXUrRmzEsUV0NzCf54yARIK8r0fdw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [s390x]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-hzznmADPt+OmsYzw1EE33ccA+HPdIqiCRq7cQeL1Jlq2gb1+OyWBkMCrYGBJ+sxVzve2ZJEVeePbLM2iEIZSxA==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/netbsd-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-b6pqtrQdigZBwZxAn1UpazEisvwaIDvdbMbmrly7cDTMFnw/+3lVxxCTGOrkPVnsYIosJJXAsILG9XcQS+Yu6w==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [netbsd]
|
|
||||||
|
|
||||||
'@esbuild/netbsd-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-OfatkLojr6U+WN5EDYuoQhtM+1xco+/6FSzJJnuWiUw5eVcicbyK3dq5EeV/QHT1uy6GoDhGbFpprUiHUYggrw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [netbsd]
|
|
||||||
|
|
||||||
'@esbuild/openbsd-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-AFuojMQTxAz75Fo8idVcqoQWEHIXFRbOc1TrVcFSgCZtQfSdc1RXgB3tjOn/krRHENUB4j00bfGjyl2mJrU37A==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [openbsd]
|
|
||||||
|
|
||||||
'@esbuild/openbsd-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-+A1NJmfM8WNDv5CLVQYJ5PshuRm/4cI6WMZRg1by1GwPIQPCTs1GLEUHwiiQGT5zDdyLiRM/l1G0Pv54gvtKIg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [openbsd]
|
|
||||||
|
|
||||||
'@esbuild/openharmony-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-+KrvYb/C8zA9CU/g0sR6w2RBw7IGc5J2BPnc3dYc5VJxHCSF1yNMxTV5LQ7GuKteQXZtspjFbiuW5/dOj7H4Yw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [openharmony]
|
|
||||||
|
|
||||||
'@esbuild/sunos-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-ikktIhFBzQNt/QDyOL580ti9+5mL/YZeUPKU2ivGtGjdTYoqz6jObj6nOMfhASpS4GU4Q/Clh1QtxWAvcYKamA==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [sunos]
|
|
||||||
|
|
||||||
'@esbuild/win32-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-7yRhbHvPqSpRUV7Q20VuDwbjW5kIMwTHpptuUzV+AA46kiPze5Z7qgt6CLCK3pWFrHeNfDd1VKgyP4O+ng17CA==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [win32]
|
|
||||||
|
|
||||||
'@esbuild/win32-ia32@0.27.7':
|
|
||||||
resolution: {integrity: sha512-SmwKXe6VHIyZYbBLJrhOoCJRB/Z1tckzmgTLfFYOfpMAx63BJEaL9ExI8x7v0oAO3Zh6D/Oi1gVxEYr5oUCFhw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [ia32]
|
|
||||||
os: [win32]
|
|
||||||
|
|
||||||
'@esbuild/win32-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-56hiAJPhwQ1R4i+21FVF7V8kSD5zZTdHcVuRFMW0hn753vVfQN8xlx4uOPT4xoGH0Z/oVATuR82AiqSTDIpaHg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [win32]
|
|
||||||
|
|
||||||
'@types/node@22.19.17':
|
|
||||||
resolution: {integrity: sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==}
|
|
||||||
|
|
||||||
esbuild@0.27.7:
|
|
||||||
resolution: {integrity: sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
hasBin: true
|
|
||||||
|
|
||||||
typescript@5.9.3:
|
|
||||||
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
|
|
||||||
engines: {node: '>=14.17'}
|
|
||||||
hasBin: true
|
|
||||||
|
|
||||||
undici-types@6.21.0:
|
|
||||||
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
|
|
||||||
|
|
||||||
snapshots:
|
|
||||||
|
|
||||||
'@esbuild/aix-ppc64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/android-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/android-arm@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/android-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/darwin-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/darwin-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/freebsd-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/freebsd-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-arm@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-ia32@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-loong64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-mips64el@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-ppc64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-riscv64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-s390x@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/netbsd-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/netbsd-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/openbsd-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/openbsd-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/openharmony-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/sunos-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/win32-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/win32-ia32@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/win32-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@types/node@22.19.17':
|
|
||||||
dependencies:
|
|
||||||
undici-types: 6.21.0
|
|
||||||
|
|
||||||
esbuild@0.27.7:
|
|
||||||
optionalDependencies:
|
|
||||||
'@esbuild/aix-ppc64': 0.27.7
|
|
||||||
'@esbuild/android-arm': 0.27.7
|
|
||||||
'@esbuild/android-arm64': 0.27.7
|
|
||||||
'@esbuild/android-x64': 0.27.7
|
|
||||||
'@esbuild/darwin-arm64': 0.27.7
|
|
||||||
'@esbuild/darwin-x64': 0.27.7
|
|
||||||
'@esbuild/freebsd-arm64': 0.27.7
|
|
||||||
'@esbuild/freebsd-x64': 0.27.7
|
|
||||||
'@esbuild/linux-arm': 0.27.7
|
|
||||||
'@esbuild/linux-arm64': 0.27.7
|
|
||||||
'@esbuild/linux-ia32': 0.27.7
|
|
||||||
'@esbuild/linux-loong64': 0.27.7
|
|
||||||
'@esbuild/linux-mips64el': 0.27.7
|
|
||||||
'@esbuild/linux-ppc64': 0.27.7
|
|
||||||
'@esbuild/linux-riscv64': 0.27.7
|
|
||||||
'@esbuild/linux-s390x': 0.27.7
|
|
||||||
'@esbuild/linux-x64': 0.27.7
|
|
||||||
'@esbuild/netbsd-arm64': 0.27.7
|
|
||||||
'@esbuild/netbsd-x64': 0.27.7
|
|
||||||
'@esbuild/openbsd-arm64': 0.27.7
|
|
||||||
'@esbuild/openbsd-x64': 0.27.7
|
|
||||||
'@esbuild/openharmony-arm64': 0.27.7
|
|
||||||
'@esbuild/sunos-x64': 0.27.7
|
|
||||||
'@esbuild/win32-arm64': 0.27.7
|
|
||||||
'@esbuild/win32-ia32': 0.27.7
|
|
||||||
'@esbuild/win32-x64': 0.27.7
|
|
||||||
|
|
||||||
typescript@5.9.3: {}
|
|
||||||
|
|
||||||
undici-types@6.21.0: {}
|
|
||||||
@ -1,96 +0,0 @@
|
|||||||
import { loadavg, totalmem, freemem, uptime } from "node:os";
|
|
||||||
import { execSync } from "node:child_process";
|
|
||||||
import { readFile } from "node:fs/promises";
|
|
||||||
import type { LibSQLDatabase } from "drizzle-orm/libsql";
|
|
||||||
import { snapshots } from "./schema.ts";
|
|
||||||
|
|
||||||
const SOCKSTAT_PATH = "/proc/net/sockstat";
|
|
||||||
|
|
||||||
interface SockstatResult {
|
|
||||||
socketsUsed: number;
|
|
||||||
tcpInuse: number;
|
|
||||||
tcpOrphan: number;
|
|
||||||
tcpTw: number;
|
|
||||||
tcpAlloc: number;
|
|
||||||
tcpMemPages: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseSockstat(content: string): SockstatResult {
|
|
||||||
let socketsUsed = 0, tcpInuse = 0, tcpOrphan = 0, tcpTw = 0, tcpAlloc = 0, tcpMemPages = 0;
|
|
||||||
|
|
||||||
for (const line of content.split("\n")) {
|
|
||||||
const trimmed = line.trim();
|
|
||||||
if (trimmed.startsWith("sockets:")) {
|
|
||||||
const parts = trimmed.split(/\s+/);
|
|
||||||
const idx = parts.indexOf("used");
|
|
||||||
if (idx !== -1 && idx + 1 < parts.length) {
|
|
||||||
socketsUsed = Number.parseInt(parts[idx + 1], 10) || 0;
|
|
||||||
}
|
|
||||||
} else if (trimmed.startsWith("TCP:")) {
|
|
||||||
const parts = trimmed.split(/\s+/);
|
|
||||||
const map: Record<string, number> = {};
|
|
||||||
for (let i = 1; i + 1 < parts.length; i += 2) {
|
|
||||||
map[parts[i]] = Number.parseInt(parts[i + 1], 10) || 0;
|
|
||||||
}
|
|
||||||
tcpInuse = map.inuse ?? 0;
|
|
||||||
tcpOrphan = map.orphan ?? 0;
|
|
||||||
tcpTw = map.tw ?? 0;
|
|
||||||
tcpAlloc = map.alloc ?? 0;
|
|
||||||
tcpMemPages = map.mem ?? 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return { socketsUsed, tcpInuse, tcpOrphan, tcpTw, tcpAlloc, tcpMemPages };
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function compute(db: LibSQLDatabase, _peers: unknown) {
|
|
||||||
const [load1, load5, load15] = loadavg();
|
|
||||||
|
|
||||||
const memTotal = totalmem();
|
|
||||||
const memFree = freemem();
|
|
||||||
const memUsed = memTotal - memFree;
|
|
||||||
const memTotalMB = Math.round(memTotal / 1024 / 1024);
|
|
||||||
const memUsedMB = Math.round(memUsed / 1024 / 1024);
|
|
||||||
const memUsedPct = Math.round((memUsed / memTotal) * 10000) / 100;
|
|
||||||
|
|
||||||
let diskTotalGB = 0, diskUsedGB = 0, diskUsedPct = 0;
|
|
||||||
try {
|
|
||||||
const df = execSync("df -B1 / | tail -1", { encoding: "utf-8" }).trim();
|
|
||||||
const parts = df.split(/\s+/);
|
|
||||||
const total = Number(parts[1]);
|
|
||||||
const used = Number(parts[2]);
|
|
||||||
diskTotalGB = Math.round(total / 1024 / 1024 / 1024 * 100) / 100;
|
|
||||||
diskUsedGB = Math.round(used / 1024 / 1024 / 1024 * 100) / 100;
|
|
||||||
diskUsedPct = total > 0 ? Math.round((used / total) * 10000) / 100 : 0;
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
let tcp: SockstatResult = { socketsUsed: 0, tcpInuse: 0, tcpOrphan: 0, tcpTw: 0, tcpAlloc: 0, tcpMemPages: 0 };
|
|
||||||
try {
|
|
||||||
const content = await readFile(SOCKSTAT_PATH, "utf8");
|
|
||||||
tcp = parseSockstat(content);
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
const ts = Date.now();
|
|
||||||
const uptimeSec = Math.round(uptime());
|
|
||||||
|
|
||||||
await db.insert(snapshots).values({
|
|
||||||
ts, cpuLoad1m: load1, cpuLoad5m: load5, cpuLoad15m: load15,
|
|
||||||
memTotalMB, memUsedMB, memUsedPct,
|
|
||||||
diskTotalGB, diskUsedGB, diskUsedPct,
|
|
||||||
uptimeSec,
|
|
||||||
socketsUsed: tcp.socketsUsed,
|
|
||||||
tcpInuse: tcp.tcpInuse,
|
|
||||||
tcpOrphan: tcp.tcpOrphan,
|
|
||||||
tcpTw: tcp.tcpTw,
|
|
||||||
tcpAlloc: tcp.tcpAlloc,
|
|
||||||
tcpMemPages: tcp.tcpMemPages,
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
cpu: { load1m: load1, load5m: load5, load15m: load15 },
|
|
||||||
memory: { totalMB: memTotalMB, usedMB: memUsedMB, usedPct: memUsedPct },
|
|
||||||
disk: { totalGB: diskTotalGB, usedGB: diskUsedGB, usedPct: diskUsedPct },
|
|
||||||
tcp: { socketsUsed: tcp.socketsUsed, inuse: tcp.tcpInuse, orphan: tcp.tcpOrphan, tw: tcp.tcpTw, alloc: tcp.tcpAlloc, memPages: tcp.tcpMemPages },
|
|
||||||
uptimeSec,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@ -1,22 +0,0 @@
|
|||||||
import { integer, real, sqliteTable, text } from "drizzle-orm/sqlite-core";
|
|
||||||
|
|
||||||
export const snapshots = sqliteTable("snapshots", {
|
|
||||||
ts: integer("ts").primaryKey(),
|
|
||||||
cpuLoad1m: real("cpu_load_1m").notNull(),
|
|
||||||
cpuLoad5m: real("cpu_load_5m").notNull(),
|
|
||||||
cpuLoad15m: real("cpu_load_15m").notNull(),
|
|
||||||
memTotalMB: integer("mem_total_mb").notNull(),
|
|
||||||
memUsedMB: integer("mem_used_mb").notNull(),
|
|
||||||
memUsedPct: real("mem_used_pct").notNull(),
|
|
||||||
diskTotalGB: real("disk_total_gb").notNull(),
|
|
||||||
diskUsedGB: real("disk_used_gb").notNull(),
|
|
||||||
diskUsedPct: real("disk_used_pct").notNull(),
|
|
||||||
uptimeSec: integer("uptime_sec").notNull(),
|
|
||||||
// TCP socket stats (merged from linux-tcp-socket-stats)
|
|
||||||
socketsUsed: integer("sockets_used"),
|
|
||||||
tcpInuse: integer("tcp_inuse"),
|
|
||||||
tcpOrphan: integer("tcp_orphan"),
|
|
||||||
tcpTw: integer("tcp_tw"),
|
|
||||||
tcpAlloc: integer("tcp_alloc"),
|
|
||||||
tcpMemPages: integer("tcp_mem_pages"),
|
|
||||||
});
|
|
||||||
@ -1,44 +0,0 @@
|
|||||||
// src/schema.ts
|
|
||||||
import { integer, real, sqliteTable } from "drizzle-orm/sqlite-core";
|
|
||||||
var workerProcessMetrics = sqliteTable("worker_process_metrics", {
|
|
||||||
ts: integer("ts").primaryKey(),
|
|
||||||
pid: integer("pid").notNull(),
|
|
||||||
uptimeSec: real("uptime_sec").notNull(),
|
|
||||||
heapUsedMB: real("heap_used_mb").notNull(),
|
|
||||||
rssMB: real("rss_mb").notNull(),
|
|
||||||
externalMB: real("external_mb").notNull()
|
|
||||||
});
|
|
||||||
|
|
||||||
// src/index.ts
|
|
||||||
function round2(n) {
|
|
||||||
return Math.round(n * 100) / 100;
|
|
||||||
}
|
|
||||||
async function compute(db, _peers) {
|
|
||||||
const ts = Date.now();
|
|
||||||
const pid = process.pid;
|
|
||||||
const uptimeSec = process.uptime();
|
|
||||||
const m = process.memoryUsage();
|
|
||||||
const heapUsedMB = round2(m.heapUsed / 1024 / 1024);
|
|
||||||
const rssMB = round2(m.rss / 1024 / 1024);
|
|
||||||
const externalMB = round2(m.external / 1024 / 1024);
|
|
||||||
const row = {
|
|
||||||
ts,
|
|
||||||
pid,
|
|
||||||
uptimeSec,
|
|
||||||
heapUsedMB,
|
|
||||||
rssMB,
|
|
||||||
externalMB
|
|
||||||
};
|
|
||||||
await db.insert(workerProcessMetrics).values(row);
|
|
||||||
return {
|
|
||||||
ts: row.ts,
|
|
||||||
pid: row.pid,
|
|
||||||
uptimeSec: row.uptimeSec,
|
|
||||||
heapUsedMB: row.heapUsedMB,
|
|
||||||
rssMB: row.rssMB,
|
|
||||||
externalMB: row.externalMB
|
|
||||||
};
|
|
||||||
}
|
|
||||||
export {
|
|
||||||
compute
|
|
||||||
};
|
|
||||||
@ -1,11 +0,0 @@
|
|||||||
-- Migration: 0001_init
|
|
||||||
-- Creates the worker_process_metrics table for worker-process-metrics sense.
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS worker_process_metrics (
|
|
||||||
ts INTEGER PRIMARY KEY,
|
|
||||||
pid INTEGER NOT NULL,
|
|
||||||
uptime_sec REAL NOT NULL,
|
|
||||||
heap_used_mb REAL NOT NULL,
|
|
||||||
rss_mb REAL NOT NULL,
|
|
||||||
external_mb REAL NOT NULL
|
|
||||||
);
|
|
||||||
@ -1,17 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "sense-worker-process-metrics",
|
|
||||||
"version": "0.0.1",
|
|
||||||
"private": true,
|
|
||||||
"type": "module",
|
|
||||||
"scripts": {
|
|
||||||
"build": "esbuild src/index.ts --bundle --platform=node --format=esm --outfile=index.js --packages=external"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@types/node": "^22.0.0",
|
|
||||||
"esbuild": "^0.27.0",
|
|
||||||
"typescript": "^5.7.0"
|
|
||||||
},
|
|
||||||
"pnpm": {
|
|
||||||
"onlyBuiltDependencies": ["esbuild"]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
310
senses/worker-process-metrics/pnpm-lock.yaml
generated
310
senses/worker-process-metrics/pnpm-lock.yaml
generated
@ -1,310 +0,0 @@
|
|||||||
lockfileVersion: '9.0'
|
|
||||||
|
|
||||||
settings:
|
|
||||||
autoInstallPeers: true
|
|
||||||
excludeLinksFromLockfile: false
|
|
||||||
|
|
||||||
importers:
|
|
||||||
|
|
||||||
.:
|
|
||||||
devDependencies:
|
|
||||||
'@types/node':
|
|
||||||
specifier: ^22.0.0
|
|
||||||
version: 22.19.17
|
|
||||||
esbuild:
|
|
||||||
specifier: ^0.27.0
|
|
||||||
version: 0.27.7
|
|
||||||
typescript:
|
|
||||||
specifier: ^5.7.0
|
|
||||||
version: 5.9.3
|
|
||||||
|
|
||||||
packages:
|
|
||||||
|
|
||||||
'@esbuild/aix-ppc64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-EKX3Qwmhz1eMdEJokhALr0YiD0lhQNwDqkPYyPhiSwKrh7/4KRjQc04sZ8db+5DVVnZ1LmbNDI1uAMPEUBnQPg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [ppc64]
|
|
||||||
os: [aix]
|
|
||||||
|
|
||||||
'@esbuild/android-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-62dPZHpIXzvChfvfLJow3q5dDtiNMkwiRzPylSCfriLvZeq0a1bWChrGx/BbUbPwOrsWKMn8idSllklzBy+dgQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [android]
|
|
||||||
|
|
||||||
'@esbuild/android-arm@0.27.7':
|
|
||||||
resolution: {integrity: sha512-jbPXvB4Yj2yBV7HUfE2KHe4GJX51QplCN1pGbYjvsyCZbQmies29EoJbkEc+vYuU5o45AfQn37vZlyXy4YJ8RQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm]
|
|
||||||
os: [android]
|
|
||||||
|
|
||||||
'@esbuild/android-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-x5VpMODneVDb70PYV2VQOmIUUiBtY3D3mPBG8NxVk5CogneYhkR7MmM3yR/uMdITLrC1ml/NV1rj4bMJuy9MCg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [android]
|
|
||||||
|
|
||||||
'@esbuild/darwin-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-5lckdqeuBPlKUwvoCXIgI2D9/ABmPq3Rdp7IfL70393YgaASt7tbju3Ac+ePVi3KDH6N2RqePfHnXkaDtY9fkw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [darwin]
|
|
||||||
|
|
||||||
'@esbuild/darwin-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-rYnXrKcXuT7Z+WL5K980jVFdvVKhCHhUwid+dDYQpH+qu+TefcomiMAJpIiC2EM3Rjtq0sO3StMV/+3w3MyyqQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [darwin]
|
|
||||||
|
|
||||||
'@esbuild/freebsd-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-B48PqeCsEgOtzME2GbNM2roU29AMTuOIN91dsMO30t+Ydis3z/3Ngoj5hhnsOSSwNzS+6JppqWsuhTp6E82l2w==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [freebsd]
|
|
||||||
|
|
||||||
'@esbuild/freebsd-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-jOBDK5XEjA4m5IJK3bpAQF9/Lelu/Z9ZcdhTRLf4cajlB+8VEhFFRjWgfy3M1O4rO2GQ/b2dLwCUGpiF/eATNQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [freebsd]
|
|
||||||
|
|
||||||
'@esbuild/linux-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-RZPHBoxXuNnPQO9rvjh5jdkRmVizktkT7TCDkDmQ0W2SwHInKCAV95GRuvdSvA7w4VMwfCjUiPwDi0ZO6Nfe9A==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-arm@0.27.7':
|
|
||||||
resolution: {integrity: sha512-RkT/YXYBTSULo3+af8Ib0ykH8u2MBh57o7q/DAs3lTJlyVQkgQvlrPTnjIzzRPQyavxtPtfg0EopvDyIt0j1rA==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-ia32@0.27.7':
|
|
||||||
resolution: {integrity: sha512-GA48aKNkyQDbd3KtkplYWT102C5sn/EZTY4XROkxONgruHPU72l+gW+FfF8tf2cFjeHaRbWpOYa/uRBz/Xq1Pg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [ia32]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-loong64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-a4POruNM2oWsD4WKvBSEKGIiWQF8fZOAsycHOt6JBpZ+JN2n2JH9WAv56SOyu9X5IqAjqSIPTaJkqN8F7XOQ5Q==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [loong64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-mips64el@0.27.7':
|
|
||||||
resolution: {integrity: sha512-KabT5I6StirGfIz0FMgl1I+R1H73Gp0ofL9A3nG3i/cYFJzKHhouBV5VWK1CSgKvVaG4q1RNpCTR2LuTVB3fIw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [mips64el]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-ppc64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-gRsL4x6wsGHGRqhtI+ifpN/vpOFTQtnbsupUF5R5YTAg+y/lKelYR1hXbnBdzDjGbMYjVJLJTd2OFmMewAgwlQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [ppc64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-riscv64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-hL25LbxO1QOngGzu2U5xeXtxXcW+/GvMN3ejANqXkxZ/opySAZMrc+9LY/WyjAan41unrR3YrmtTsUpwT66InQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [riscv64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-s390x@0.27.7':
|
|
||||||
resolution: {integrity: sha512-2k8go8Ycu1Kb46vEelhu1vqEP+UeRVj2zY1pSuPdgvbd5ykAw82Lrro28vXUrRmzEsUV0NzCf54yARIK8r0fdw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [s390x]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-hzznmADPt+OmsYzw1EE33ccA+HPdIqiCRq7cQeL1Jlq2gb1+OyWBkMCrYGBJ+sxVzve2ZJEVeePbLM2iEIZSxA==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/netbsd-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-b6pqtrQdigZBwZxAn1UpazEisvwaIDvdbMbmrly7cDTMFnw/+3lVxxCTGOrkPVnsYIosJJXAsILG9XcQS+Yu6w==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [netbsd]
|
|
||||||
|
|
||||||
'@esbuild/netbsd-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-OfatkLojr6U+WN5EDYuoQhtM+1xco+/6FSzJJnuWiUw5eVcicbyK3dq5EeV/QHT1uy6GoDhGbFpprUiHUYggrw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [netbsd]
|
|
||||||
|
|
||||||
'@esbuild/openbsd-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-AFuojMQTxAz75Fo8idVcqoQWEHIXFRbOc1TrVcFSgCZtQfSdc1RXgB3tjOn/krRHENUB4j00bfGjyl2mJrU37A==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [openbsd]
|
|
||||||
|
|
||||||
'@esbuild/openbsd-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-+A1NJmfM8WNDv5CLVQYJ5PshuRm/4cI6WMZRg1by1GwPIQPCTs1GLEUHwiiQGT5zDdyLiRM/l1G0Pv54gvtKIg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [openbsd]
|
|
||||||
|
|
||||||
'@esbuild/openharmony-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-+KrvYb/C8zA9CU/g0sR6w2RBw7IGc5J2BPnc3dYc5VJxHCSF1yNMxTV5LQ7GuKteQXZtspjFbiuW5/dOj7H4Yw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [openharmony]
|
|
||||||
|
|
||||||
'@esbuild/sunos-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-ikktIhFBzQNt/QDyOL580ti9+5mL/YZeUPKU2ivGtGjdTYoqz6jObj6nOMfhASpS4GU4Q/Clh1QtxWAvcYKamA==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [sunos]
|
|
||||||
|
|
||||||
'@esbuild/win32-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-7yRhbHvPqSpRUV7Q20VuDwbjW5kIMwTHpptuUzV+AA46kiPze5Z7qgt6CLCK3pWFrHeNfDd1VKgyP4O+ng17CA==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [win32]
|
|
||||||
|
|
||||||
'@esbuild/win32-ia32@0.27.7':
|
|
||||||
resolution: {integrity: sha512-SmwKXe6VHIyZYbBLJrhOoCJRB/Z1tckzmgTLfFYOfpMAx63BJEaL9ExI8x7v0oAO3Zh6D/Oi1gVxEYr5oUCFhw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [ia32]
|
|
||||||
os: [win32]
|
|
||||||
|
|
||||||
'@esbuild/win32-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-56hiAJPhwQ1R4i+21FVF7V8kSD5zZTdHcVuRFMW0hn753vVfQN8xlx4uOPT4xoGH0Z/oVATuR82AiqSTDIpaHg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [win32]
|
|
||||||
|
|
||||||
'@types/node@22.19.17':
|
|
||||||
resolution: {integrity: sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==}
|
|
||||||
|
|
||||||
esbuild@0.27.7:
|
|
||||||
resolution: {integrity: sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
hasBin: true
|
|
||||||
|
|
||||||
typescript@5.9.3:
|
|
||||||
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
|
|
||||||
engines: {node: '>=14.17'}
|
|
||||||
hasBin: true
|
|
||||||
|
|
||||||
undici-types@6.21.0:
|
|
||||||
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
|
|
||||||
|
|
||||||
snapshots:
|
|
||||||
|
|
||||||
'@esbuild/aix-ppc64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/android-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/android-arm@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/android-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/darwin-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/darwin-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/freebsd-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/freebsd-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-arm@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-ia32@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-loong64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-mips64el@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-ppc64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-riscv64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-s390x@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/netbsd-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/netbsd-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/openbsd-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/openbsd-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/openharmony-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/sunos-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/win32-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/win32-ia32@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/win32-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@types/node@22.19.17':
|
|
||||||
dependencies:
|
|
||||||
undici-types: 6.21.0
|
|
||||||
|
|
||||||
esbuild@0.27.7:
|
|
||||||
optionalDependencies:
|
|
||||||
'@esbuild/aix-ppc64': 0.27.7
|
|
||||||
'@esbuild/android-arm': 0.27.7
|
|
||||||
'@esbuild/android-arm64': 0.27.7
|
|
||||||
'@esbuild/android-x64': 0.27.7
|
|
||||||
'@esbuild/darwin-arm64': 0.27.7
|
|
||||||
'@esbuild/darwin-x64': 0.27.7
|
|
||||||
'@esbuild/freebsd-arm64': 0.27.7
|
|
||||||
'@esbuild/freebsd-x64': 0.27.7
|
|
||||||
'@esbuild/linux-arm': 0.27.7
|
|
||||||
'@esbuild/linux-arm64': 0.27.7
|
|
||||||
'@esbuild/linux-ia32': 0.27.7
|
|
||||||
'@esbuild/linux-loong64': 0.27.7
|
|
||||||
'@esbuild/linux-mips64el': 0.27.7
|
|
||||||
'@esbuild/linux-ppc64': 0.27.7
|
|
||||||
'@esbuild/linux-riscv64': 0.27.7
|
|
||||||
'@esbuild/linux-s390x': 0.27.7
|
|
||||||
'@esbuild/linux-x64': 0.27.7
|
|
||||||
'@esbuild/netbsd-arm64': 0.27.7
|
|
||||||
'@esbuild/netbsd-x64': 0.27.7
|
|
||||||
'@esbuild/openbsd-arm64': 0.27.7
|
|
||||||
'@esbuild/openbsd-x64': 0.27.7
|
|
||||||
'@esbuild/openharmony-arm64': 0.27.7
|
|
||||||
'@esbuild/sunos-x64': 0.27.7
|
|
||||||
'@esbuild/win32-arm64': 0.27.7
|
|
||||||
'@esbuild/win32-ia32': 0.27.7
|
|
||||||
'@esbuild/win32-x64': 0.27.7
|
|
||||||
|
|
||||||
typescript@5.9.3: {}
|
|
||||||
|
|
||||||
undici-types@6.21.0: {}
|
|
||||||
@ -1,36 +0,0 @@
|
|||||||
import type { LibSQLDatabase } from "drizzle-orm/libsql";
|
|
||||||
import { workerProcessMetrics } from "./schema.ts";
|
|
||||||
|
|
||||||
function round2(n: number): number {
|
|
||||||
return Math.round(n * 100) / 100;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function compute(db: LibSQLDatabase, _peers: unknown) {
|
|
||||||
const ts = Date.now();
|
|
||||||
const pid = process.pid;
|
|
||||||
const uptimeSec = process.uptime();
|
|
||||||
const m = process.memoryUsage();
|
|
||||||
const heapUsedMB = round2(m.heapUsed / 1024 / 1024);
|
|
||||||
const rssMB = round2(m.rss / 1024 / 1024);
|
|
||||||
const externalMB = round2(m.external / 1024 / 1024);
|
|
||||||
|
|
||||||
const row = {
|
|
||||||
ts,
|
|
||||||
pid,
|
|
||||||
uptimeSec,
|
|
||||||
heapUsedMB,
|
|
||||||
rssMB,
|
|
||||||
externalMB,
|
|
||||||
};
|
|
||||||
|
|
||||||
await db.insert(workerProcessMetrics).values(row);
|
|
||||||
|
|
||||||
return {
|
|
||||||
ts: row.ts,
|
|
||||||
pid: row.pid,
|
|
||||||
uptimeSec: row.uptimeSec,
|
|
||||||
heapUsedMB: row.heapUsedMB,
|
|
||||||
rssMB: row.rssMB,
|
|
||||||
externalMB: row.externalMB,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@ -1,10 +0,0 @@
|
|||||||
import { integer, real, sqliteTable } from "drizzle-orm/sqlite-core";
|
|
||||||
|
|
||||||
export const workerProcessMetrics = sqliteTable("worker_process_metrics", {
|
|
||||||
ts: integer("ts").primaryKey(),
|
|
||||||
pid: integer("pid").notNull(),
|
|
||||||
uptimeSec: real("uptime_sec").notNull(),
|
|
||||||
heapUsedMB: real("heap_used_mb").notNull(),
|
|
||||||
rssMB: real("rss_mb").notNull(),
|
|
||||||
externalMB: real("external_mb").notNull(),
|
|
||||||
});
|
|
||||||
@ -7,7 +7,13 @@
|
|||||||
"strict": true,
|
"strict": true,
|
||||||
"skipLibCheck": true,
|
"skipLibCheck": true,
|
||||||
"noEmit": true,
|
"noEmit": true,
|
||||||
|
"allowImportingTsExtensions": true,
|
||||||
"types": ["node"]
|
"types": ["node"]
|
||||||
},
|
},
|
||||||
"include": ["./**/*.ts"]
|
"include": [
|
||||||
|
"senses/**/*.ts",
|
||||||
|
"workflows/**/*.ts",
|
||||||
|
"scripts/**/*.ts",
|
||||||
|
"workflows/_shared/**/*.ts"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
33
workflows/develop-sense/index.ts
Normal file
33
workflows/develop-sense/index.ts
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
import { join } from "node:path";
|
||||||
|
import { createCursorAdapter, cursorAdapter } from "@uncaged/nerve-adapter-cursor";
|
||||||
|
import { hermesAdapter } from "@uncaged/nerve-adapter-hermes";
|
||||||
|
import { createDevelopSenseWorkflow } from "@uncaged/nerve-workflow-meta";
|
||||||
|
|
||||||
|
const HOME = process.env.HOME ?? "/home/azureuser";
|
||||||
|
const NERVE_ROOT = join(HOME, ".uncaged-nerve");
|
||||||
|
|
||||||
|
const apiKey = process.env.DASHSCOPE_API_KEY;
|
||||||
|
const baseUrl = process.env.DASHSCOPE_BASE_URL;
|
||||||
|
const model = process.env.DASHSCOPE_MODEL ?? "qwen-plus";
|
||||||
|
if (!apiKey || !baseUrl) {
|
||||||
|
throw new Error("Set DASHSCOPE_API_KEY and DASHSCOPE_BASE_URL");
|
||||||
|
}
|
||||||
|
|
||||||
|
const CURSOR_TIMEOUT_MS = 300_000;
|
||||||
|
|
||||||
|
const workflow = createDevelopSenseWorkflow({
|
||||||
|
defaultAdapter: hermesAdapter,
|
||||||
|
adapters: {
|
||||||
|
planner: createCursorAdapter({
|
||||||
|
type: "cursor",
|
||||||
|
mode: "ask",
|
||||||
|
model: "auto",
|
||||||
|
timeout: CURSOR_TIMEOUT_MS,
|
||||||
|
}),
|
||||||
|
coder: cursorAdapter,
|
||||||
|
},
|
||||||
|
extract: { provider: { apiKey, baseUrl, model } },
|
||||||
|
cwd: NERVE_ROOT,
|
||||||
|
});
|
||||||
|
|
||||||
|
export default workflow;
|
||||||
34
workflows/develop-workflow/index.ts
Normal file
34
workflows/develop-workflow/index.ts
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
import { join } from "node:path";
|
||||||
|
import { createCursorAdapter, cursorAdapter } from "@uncaged/nerve-adapter-cursor";
|
||||||
|
import { hermesAdapter } from "@uncaged/nerve-adapter-hermes";
|
||||||
|
import { createDevelopWorkflowWorkflow } from "@uncaged/nerve-workflow-meta";
|
||||||
|
|
||||||
|
const HOME = process.env.HOME ?? "/home/azureuser";
|
||||||
|
const NERVE_ROOT = join(HOME, ".uncaged-nerve");
|
||||||
|
|
||||||
|
const apiKey = process.env.DASHSCOPE_API_KEY;
|
||||||
|
const baseUrl = process.env.DASHSCOPE_BASE_URL;
|
||||||
|
const model = process.env.DASHSCOPE_MODEL ?? "qwen-plus";
|
||||||
|
|
||||||
|
if (!apiKey || !baseUrl) {
|
||||||
|
throw new Error("Set DASHSCOPE_API_KEY and DASHSCOPE_BASE_URL");
|
||||||
|
}
|
||||||
|
|
||||||
|
const CURSOR_TIMEOUT_MS = 300_000;
|
||||||
|
|
||||||
|
const workflow = createDevelopWorkflowWorkflow({
|
||||||
|
defaultAdapter: hermesAdapter,
|
||||||
|
adapters: {
|
||||||
|
planner: createCursorAdapter({
|
||||||
|
type: "cursor",
|
||||||
|
mode: "ask",
|
||||||
|
model: "auto",
|
||||||
|
timeout: CURSOR_TIMEOUT_MS,
|
||||||
|
}),
|
||||||
|
coder: cursorAdapter,
|
||||||
|
},
|
||||||
|
extract: { provider: { apiKey, baseUrl, model } },
|
||||||
|
nerveRoot: NERVE_ROOT,
|
||||||
|
});
|
||||||
|
|
||||||
|
export default workflow;
|
||||||
33
workflows/extract-knowledge/build.ts
Normal file
33
workflows/extract-knowledge/build.ts
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
import type { AgentFn, WorkflowDefinition } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createLlmAdapter } from "@uncaged/nerve-workflow-utils";
|
||||||
|
|
||||||
|
import { moderator } from "./moderator.js";
|
||||||
|
import type { WorkflowMeta } from "./moderator.js";
|
||||||
|
import { createAnswererRole } from "./roles/answerer.js";
|
||||||
|
import { createExplorerRole } from "./roles/explorer.js";
|
||||||
|
import { createQuestionerRole } from "./roles/questioner.js";
|
||||||
|
|
||||||
|
export type CreateKnowledgeExtractionDeps = {
|
||||||
|
defaultAdapter: AgentFn;
|
||||||
|
adapters?: Partial<Record<keyof WorkflowMeta, AgentFn>>;
|
||||||
|
extract: LlmExtractorConfig;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function createKnowledgeExtractionWorkflow({
|
||||||
|
defaultAdapter,
|
||||||
|
adapters,
|
||||||
|
extract,
|
||||||
|
}: CreateKnowledgeExtractionDeps): WorkflowDefinition<WorkflowMeta> {
|
||||||
|
const a = (role: keyof WorkflowMeta) => adapters?.[role] ?? defaultAdapter;
|
||||||
|
const llmAdapter = createLlmAdapter(extract.provider);
|
||||||
|
return {
|
||||||
|
name: "extract-knowledge",
|
||||||
|
roles: {
|
||||||
|
questioner: createQuestionerRole(adapters?.questioner ?? llmAdapter, { extract }),
|
||||||
|
answerer: createAnswererRole(adapters?.answerer ?? llmAdapter, { extract }),
|
||||||
|
explorer: createExplorerRole(a("explorer"), { extract }),
|
||||||
|
},
|
||||||
|
moderator,
|
||||||
|
};
|
||||||
|
}
|
||||||
30
workflows/extract-knowledge/index.ts
Normal file
30
workflows/extract-knowledge/index.ts
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
import { join } from "node:path";
|
||||||
|
import { createCursorAdapter } from "@uncaged/nerve-adapter-cursor";
|
||||||
|
import { hermesAdapter } from "@uncaged/nerve-adapter-hermes";
|
||||||
|
import { createKnowledgeExtractionWorkflow } from "./build.js";
|
||||||
|
import { resolveDashScopeProvider } from "../solve-issue/lib/provider.js";
|
||||||
|
|
||||||
|
const HOME = process.env.HOME ?? "/home/azureuser";
|
||||||
|
const NERVE_ROOT = join(HOME, ".uncaged-nerve");
|
||||||
|
|
||||||
|
const provider = await resolveDashScopeProvider(NERVE_ROOT);
|
||||||
|
|
||||||
|
if (provider === null) {
|
||||||
|
throw new Error("Set DASHSCOPE_API_KEY and DASHSCOPE_BASE_URL (or cfg get equivalents)");
|
||||||
|
}
|
||||||
|
|
||||||
|
const CURSOR_TIMEOUT_MS = 300_000;
|
||||||
|
|
||||||
|
const workflow = createKnowledgeExtractionWorkflow({
|
||||||
|
defaultAdapter: hermesAdapter,
|
||||||
|
adapters: {
|
||||||
|
explorer: createCursorAdapter({
|
||||||
|
type: "cursor",
|
||||||
|
model: "claude-sonnet-4",
|
||||||
|
timeout: CURSOR_TIMEOUT_MS,
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
extract: { provider },
|
||||||
|
});
|
||||||
|
|
||||||
|
export default workflow;
|
||||||
74
workflows/extract-knowledge/lib/knowledge-queue.ts
Normal file
74
workflows/extract-knowledge/lib/knowledge-queue.ts
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
import type { Dirent } from "node:fs";
|
||||||
|
import { readdir } from "node:fs/promises";
|
||||||
|
import { join } from "node:path";
|
||||||
|
|
||||||
|
import type { StartStep, WorkflowMessage } from "@uncaged/nerve-core";
|
||||||
|
|
||||||
|
import type { ExplorerMeta } from "../roles/explorer.js";
|
||||||
|
import type { QuestionerMeta } from "../roles/questioner.js";
|
||||||
|
|
||||||
|
async function walkMarkdownFiles(rootDir: string, base: string): Promise<string[]> {
|
||||||
|
const out: string[] = [];
|
||||||
|
let entries: Dirent[];
|
||||||
|
try {
|
||||||
|
entries = (await readdir(rootDir, { withFileTypes: true })) as Dirent[];
|
||||||
|
} catch {
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
for (const e of entries) {
|
||||||
|
const name = e.name;
|
||||||
|
const rel = base ? `${base}/${name}` : name;
|
||||||
|
const full = join(rootDir, name);
|
||||||
|
if (e.isDirectory()) {
|
||||||
|
out.push(...(await walkMarkdownFiles(full, rel)));
|
||||||
|
} else if (e.isFile() && name.endsWith(".md")) {
|
||||||
|
out.push(rel.replace(/\\/g, "/"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Enumerate all markdown files under `.knowledge/` as repo-relative paths; seed line first if present. */
|
||||||
|
export async function bootstrapKnowledgeQueue(cwd: string, startContent: string): Promise<string[]> {
|
||||||
|
const knowledgeDir = join(cwd, ".knowledge");
|
||||||
|
const relFiles = await walkMarkdownFiles(knowledgeDir, "");
|
||||||
|
const paths = relFiles.map((f) => `.knowledge/${f}`);
|
||||||
|
const seed = startContent.trim().split(/\r?\n/u)[0]?.trim() ?? "";
|
||||||
|
if (paths.length === 0 && seed.length > 0) {
|
||||||
|
return [seed];
|
||||||
|
}
|
||||||
|
if (seed.length > 0 && paths.includes(seed)) {
|
||||||
|
return [seed, ...paths.filter((p) => p !== seed)];
|
||||||
|
}
|
||||||
|
if (seed.length > 0 && !paths.includes(seed)) {
|
||||||
|
return [seed, ...paths];
|
||||||
|
}
|
||||||
|
return [...paths].sort();
|
||||||
|
}
|
||||||
|
|
||||||
|
function lastIndexOfRole(messages: WorkflowMessage[], role: string): number {
|
||||||
|
for (let i = messages.length - 1; i >= 0; i--) {
|
||||||
|
if (messages[i].role === role) return i;
|
||||||
|
}
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Next queue for questioner: bootstrap, or continue after answerer / explorer. */
|
||||||
|
export async function resolveQueueForQuestioner(
|
||||||
|
start: StartStep,
|
||||||
|
messages: WorkflowMessage[],
|
||||||
|
cwd: string,
|
||||||
|
): Promise<string[]> {
|
||||||
|
const lastQi = lastIndexOfRole(messages, "questioner");
|
||||||
|
if (lastQi === -1) {
|
||||||
|
return bootstrapKnowledgeQueue(cwd, start.content);
|
||||||
|
}
|
||||||
|
const qMeta = messages[lastQi].meta as QuestionerMeta;
|
||||||
|
const tail = messages.slice(lastQi + 1);
|
||||||
|
const explorerMsg = tail.find((m) => m.role === "explorer");
|
||||||
|
if (explorerMsg) {
|
||||||
|
const eMeta = explorerMsg.meta as ExplorerMeta;
|
||||||
|
return [...qMeta.remaining_queue, ...eMeta.new_cards];
|
||||||
|
}
|
||||||
|
return qMeta.remaining_queue;
|
||||||
|
}
|
||||||
21
workflows/extract-knowledge/lib/workdir.ts
Normal file
21
workflows/extract-knowledge/lib/workdir.ts
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
import type { StartStep } from "@uncaged/nerve-core";
|
||||||
|
|
||||||
|
type StartMetaWithWorkdir = StartStep["meta"] & { workdir?: string | null };
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve the target repo working directory.
|
||||||
|
* Priority: start.meta.workdir → prompt second line (if absolute path) → cwd.
|
||||||
|
*/
|
||||||
|
export function resolveWorkdir(start: StartStep): string {
|
||||||
|
const m = start.meta as StartMetaWithWorkdir;
|
||||||
|
if (m.workdir) return m.workdir;
|
||||||
|
|
||||||
|
// Allow prompt to carry workdir on the second line: "seed\n/abs/path"
|
||||||
|
const lines = start.content.split(/\r?\n/);
|
||||||
|
if (lines.length >= 2) {
|
||||||
|
const candidate = lines[1]!.trim();
|
||||||
|
if (candidate.startsWith("/")) return candidate;
|
||||||
|
}
|
||||||
|
|
||||||
|
return process.cwd();
|
||||||
|
}
|
||||||
84
workflows/extract-knowledge/moderator.ts
Normal file
84
workflows/extract-knowledge/moderator.ts
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
import { END } from "@uncaged/nerve-core";
|
||||||
|
import type { Moderator, ThreadContext } from "@uncaged/nerve-core";
|
||||||
|
|
||||||
|
import type { AnswererMeta } from "./roles/answerer.js";
|
||||||
|
import type { ExplorerMeta } from "./roles/explorer.js";
|
||||||
|
import type { QuestionerMeta } from "./roles/questioner.js";
|
||||||
|
|
||||||
|
export type WorkflowMeta = {
|
||||||
|
questioner: QuestionerMeta;
|
||||||
|
answerer: AnswererMeta;
|
||||||
|
explorer: ExplorerMeta;
|
||||||
|
};
|
||||||
|
|
||||||
|
type Steps = ThreadContext<WorkflowMeta>["steps"];
|
||||||
|
|
||||||
|
function lastQuestionerRemaining(steps: Steps): QuestionerMeta | undefined {
|
||||||
|
for (let i = steps.length - 1; i >= 0; i--) {
|
||||||
|
const s = steps[i];
|
||||||
|
if (s.role === "questioner") return s.meta;
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** End when the last two explorer invocations both added no new cards (issue #266 stagnation rule). */
|
||||||
|
function lastTwoExplorerRunsBothEmpty(steps: Steps): boolean {
|
||||||
|
const explorerSteps = steps.filter((s) => s.role === "explorer");
|
||||||
|
if (explorerSteps.length < 2) return false;
|
||||||
|
const e1 = explorerSteps[explorerSteps.length - 1].meta as ExplorerMeta;
|
||||||
|
const e2 = explorerSteps[explorerSteps.length - 2].meta as ExplorerMeta;
|
||||||
|
return e1.new_cards.length === 0 && e2.new_cards.length === 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
function queueAfterSkippedExplorer(steps: Steps): string[] {
|
||||||
|
const q = lastQuestionerRemaining(steps);
|
||||||
|
return q?.remaining_queue ?? [];
|
||||||
|
}
|
||||||
|
|
||||||
|
function queueAfterExplorerStep(steps: Steps): string[] {
|
||||||
|
const last = steps[steps.length - 1];
|
||||||
|
if (!last || last.role !== "explorer") return [];
|
||||||
|
const q = lastQuestionerRemaining(steps);
|
||||||
|
if (!q) return [];
|
||||||
|
const e = last.meta as ExplorerMeta;
|
||||||
|
return [...q.remaining_queue, ...e.new_cards];
|
||||||
|
}
|
||||||
|
|
||||||
|
export const moderator: Moderator<WorkflowMeta> = (context) => {
|
||||||
|
const { steps } = context;
|
||||||
|
|
||||||
|
if (steps.length === 0) {
|
||||||
|
return "questioner";
|
||||||
|
}
|
||||||
|
|
||||||
|
const last = steps[steps.length - 1];
|
||||||
|
|
||||||
|
if (last.role === "questioner") {
|
||||||
|
return "answerer";
|
||||||
|
}
|
||||||
|
|
||||||
|
if (last.role === "answerer") {
|
||||||
|
const am = last.meta as AnswererMeta;
|
||||||
|
if (am.has_unanswered) {
|
||||||
|
return "explorer";
|
||||||
|
}
|
||||||
|
const q = queueAfterSkippedExplorer(steps);
|
||||||
|
if (q.length === 0) {
|
||||||
|
return END;
|
||||||
|
}
|
||||||
|
return "questioner";
|
||||||
|
}
|
||||||
|
|
||||||
|
if (last.role === "explorer") {
|
||||||
|
if (lastTwoExplorerRunsBothEmpty(steps)) {
|
||||||
|
return END;
|
||||||
|
}
|
||||||
|
const q = queueAfterExplorerStep(steps);
|
||||||
|
if (q.length === 0) {
|
||||||
|
return END;
|
||||||
|
}
|
||||||
|
return "questioner";
|
||||||
|
}
|
||||||
|
|
||||||
|
return END;
|
||||||
|
};
|
||||||
102
workflows/extract-knowledge/roles/answerer.ts
Normal file
102
workflows/extract-knowledge/roles/answerer.ts
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
import type { AgentFn, Role, ThreadContext, WorkflowMessage } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createRole, nerveCommandEnv, spawnSafe } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { resolveWorkdir } from "../lib/workdir.js";
|
||||||
|
|
||||||
|
import type { QuestionerMeta } from "./questioner.js";
|
||||||
|
|
||||||
|
export const answererMetaSchema = z.object({
|
||||||
|
results: z.array(
|
||||||
|
z.object({
|
||||||
|
id: z.string(),
|
||||||
|
found: z.boolean(),
|
||||||
|
source: z.string(),
|
||||||
|
note: z.string(),
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
has_unanswered: z.boolean(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export type AnswererMeta = z.infer<typeof answererMetaSchema>;
|
||||||
|
|
||||||
|
export type CreateAnswererRoleDeps = {
|
||||||
|
extract: LlmExtractorConfig;
|
||||||
|
};
|
||||||
|
|
||||||
|
function lastQuestionerMeta(messages: WorkflowMessage[]): QuestionerMeta | undefined {
|
||||||
|
for (let i = messages.length - 1; i >= 0; i--) {
|
||||||
|
if (messages[i].role === "questioner") {
|
||||||
|
return messages[i].meta as QuestionerMeta;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function answererPrompt(ctx: ThreadContext): Promise<string> {
|
||||||
|
const messages = ctx.steps as unknown as WorkflowMessage[];
|
||||||
|
const cwd = resolveWorkdir(ctx.start);
|
||||||
|
const qm = lastQuestionerMeta(messages);
|
||||||
|
if (!qm || qm.questions.length === 0) {
|
||||||
|
throw new Error("answerer: prompt invoked without questioner questions — wrapped role should short-circuit");
|
||||||
|
}
|
||||||
|
|
||||||
|
const blocks: string[] = [];
|
||||||
|
for (const q of qm.questions) {
|
||||||
|
if ((ctx.start.meta as Record<string, unknown>).dryRun) {
|
||||||
|
blocks.push(`### ${q.id}\n[dryRun] skipped nerve knowledge query\n`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const res = await spawnSafe(
|
||||||
|
"nerve",
|
||||||
|
["knowledge", "query", q.question],
|
||||||
|
{
|
||||||
|
cwd,
|
||||||
|
env: nerveCommandEnv(),
|
||||||
|
timeoutMs: 120_000,
|
||||||
|
dryRun: false,
|
||||||
|
abortSignal: null,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
if (res.ok) {
|
||||||
|
blocks.push(`### ${q.id} (${q.domain})\nQuestion: ${q.question}\n---\n${res.value.stdout}\n`);
|
||||||
|
} else {
|
||||||
|
const err = res.error;
|
||||||
|
const detail =
|
||||||
|
err.kind === "non_zero_exit"
|
||||||
|
? `exit ${err.exitCode}\n${err.stderr}`
|
||||||
|
: err.kind === "timeout"
|
||||||
|
? `timeout\n${err.stderr}`
|
||||||
|
: err.kind === "spawn_failed"
|
||||||
|
? err.message
|
||||||
|
: "aborted";
|
||||||
|
blocks.push(`### ${q.id}\nnerve knowledge query failed: ${detail}\n`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return [
|
||||||
|
"You are the **answerer**. You MUST NOT read repository source code — only the CLI retrieval excerpts below.",
|
||||||
|
"For each question id, decide whether the knowledge base already answers it.",
|
||||||
|
"Set found=true only when the excerpt supports a confident answer; otherwise found=false.",
|
||||||
|
"Set has_unanswered=true if any question remains unanswered by the knowledge base.",
|
||||||
|
"",
|
||||||
|
...blocks,
|
||||||
|
].join("\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createAnswererRole(adapter: AgentFn, { extract }: CreateAnswererRoleDeps): Role<AnswererMeta> {
|
||||||
|
const inner = createRole(adapter, answererPrompt, answererMetaSchema, extract);
|
||||||
|
|
||||||
|
return async (ctx: ThreadContext) => {
|
||||||
|
const messages = ctx.steps as unknown as WorkflowMessage[];
|
||||||
|
const qm = lastQuestionerMeta(messages);
|
||||||
|
if (!qm || qm.questions.length === 0) {
|
||||||
|
return {
|
||||||
|
content: "answerer: no questions from questioner; skipping CLI lookup.",
|
||||||
|
meta: { results: [], has_unanswered: false },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return inner(ctx);
|
||||||
|
};
|
||||||
|
}
|
||||||
93
workflows/extract-knowledge/roles/explorer.ts
Normal file
93
workflows/extract-knowledge/roles/explorer.ts
Normal file
@ -0,0 +1,93 @@
|
|||||||
|
import type { AgentFn, Role, ThreadContext, WorkflowMessage } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { resolveWorkdir } from "../lib/workdir.js";
|
||||||
|
|
||||||
|
import type { AnswererMeta } from "./answerer.js";
|
||||||
|
import type { QuestionerMeta } from "./questioner.js";
|
||||||
|
|
||||||
|
export const explorerMetaSchema = z.object({
|
||||||
|
patches: z.array(
|
||||||
|
z.object({
|
||||||
|
card: z.string(),
|
||||||
|
section: z.string(),
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
new_cards: z.array(z.string()),
|
||||||
|
});
|
||||||
|
|
||||||
|
export type ExplorerMeta = z.infer<typeof explorerMetaSchema>;
|
||||||
|
|
||||||
|
export type CreateExplorerRoleDeps = {
|
||||||
|
extract: LlmExtractorConfig;
|
||||||
|
};
|
||||||
|
|
||||||
|
function lastMeta<M>(messages: WorkflowMessage[], role: string): M | undefined {
|
||||||
|
for (let i = messages.length - 1; i >= 0; i--) {
|
||||||
|
if (messages[i].role === role) {
|
||||||
|
return messages[i].meta as M;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function explorerPrompt(ctx: ThreadContext): string {
|
||||||
|
const messages = ctx.steps as unknown as WorkflowMessage[];
|
||||||
|
const threadId = ctx.start.meta.threadId;
|
||||||
|
const qm = lastMeta<QuestionerMeta>(messages, "questioner");
|
||||||
|
const am = lastMeta<AnswererMeta>(messages, "answerer");
|
||||||
|
const cwd = resolveWorkdir(ctx.start);
|
||||||
|
|
||||||
|
const unanswered =
|
||||||
|
am?.results.filter((r) => !r.found).map((r) => r.id) ?? [];
|
||||||
|
|
||||||
|
return `You are the **explorer** in an extract-knowledge workflow.
|
||||||
|
|
||||||
|
## Context
|
||||||
|
|
||||||
|
- Thread: \`nerve thread ${threadId}\`
|
||||||
|
- Working directory (repo root for paths): ${cwd}
|
||||||
|
- Current knowledge card (questioner): ${qm?.card ?? "(unknown)"}
|
||||||
|
|
||||||
|
## Unanswered question ids
|
||||||
|
|
||||||
|
${JSON.stringify(unanswered)}
|
||||||
|
|
||||||
|
Use the prior answerer results in the thread to map ids to full question text when you read messages above.
|
||||||
|
|
||||||
|
## Task
|
||||||
|
|
||||||
|
For each unanswered question, **read the codebase** as needed, then either:
|
||||||
|
|
||||||
|
- Add a new markdown file under \`.knowledge/\`, or
|
||||||
|
- Patch an existing card (prefer updating the card listed above when appropriate).
|
||||||
|
|
||||||
|
After any write or patch to \`.knowledge\`, run:
|
||||||
|
|
||||||
|
\`\`\`bash
|
||||||
|
nerve knowledge sync
|
||||||
|
\`\`\`
|
||||||
|
|
||||||
|
from this repo root (${cwd}), and fix failures until sync succeeds.
|
||||||
|
|
||||||
|
## Output meta
|
||||||
|
|
||||||
|
Report \`patches\` as { card, section } entries for cards you edited (section is a short heading or path hint).
|
||||||
|
Report \`new_cards\` as repo-relative paths for brand-new files you created (e.g. \`.knowledge/new-topic.md\`).
|
||||||
|
|
||||||
|
Do not claim work you did not perform.`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createExplorerRole(
|
||||||
|
adapter: AgentFn,
|
||||||
|
{ extract }: CreateExplorerRoleDeps,
|
||||||
|
): Role<ExplorerMeta> {
|
||||||
|
return createRole(
|
||||||
|
adapter,
|
||||||
|
async (ctx: ThreadContext) => explorerPrompt(ctx),
|
||||||
|
explorerMetaSchema,
|
||||||
|
extract,
|
||||||
|
);
|
||||||
|
}
|
||||||
108
workflows/extract-knowledge/roles/questioner.ts
Normal file
108
workflows/extract-knowledge/roles/questioner.ts
Normal file
@ -0,0 +1,108 @@
|
|||||||
|
import { readFile } from "node:fs/promises";
|
||||||
|
import { join } from "node:path";
|
||||||
|
|
||||||
|
import type { AgentFn, Role, ThreadContext, WorkflowMessage } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { resolveQueueForQuestioner } from "../lib/knowledge-queue.js";
|
||||||
|
import { resolveWorkdir } from "../lib/workdir.js";
|
||||||
|
|
||||||
|
const questionerExtractSchema = z.object({
|
||||||
|
questions: z
|
||||||
|
.array(
|
||||||
|
z.object({
|
||||||
|
id: z.string(),
|
||||||
|
question: z.string(),
|
||||||
|
domain: z.string(),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.length(5),
|
||||||
|
});
|
||||||
|
|
||||||
|
export type QuestionerMeta = {
|
||||||
|
/** Empty when no .knowledge cards and no work to do. */
|
||||||
|
card: string;
|
||||||
|
questions: { id: string; question: string; domain: string }[];
|
||||||
|
remaining_queue: string[];
|
||||||
|
};
|
||||||
|
|
||||||
|
export type CreateQuestionerRoleDeps = {
|
||||||
|
extract: LlmExtractorConfig;
|
||||||
|
};
|
||||||
|
|
||||||
|
function questionerSystem(): string {
|
||||||
|
return `You are the **questioner** in an extract-knowledge workflow.
|
||||||
|
|
||||||
|
Read the given markdown knowledge card. Propose exactly **five** technical questions that are **not** already answered or covered by that card.
|
||||||
|
|
||||||
|
Rules:
|
||||||
|
- Questions must be concrete and technical.
|
||||||
|
- Each question needs a stable string id (e.g. q1, q2, q3, q4, q5), a short domain label (e.g. routing, storage), and the question text.
|
||||||
|
- Do not assume access to other files or tools — reason only from the card content shown.`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function questionerUser(card: string, cardBody: string, remainingHint: string[]): string {
|
||||||
|
return `Current card path: ${card}
|
||||||
|
|
||||||
|
Remaining queue after this card (paths, may be empty): ${JSON.stringify(remainingHint)}
|
||||||
|
|
||||||
|
--- Card content ---
|
||||||
|
|
||||||
|
${cardBody}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function questionerPrompt(ctx: ThreadContext): Promise<string> {
|
||||||
|
const messages = ctx.steps as unknown as WorkflowMessage[];
|
||||||
|
const cwd = resolveWorkdir(ctx.start);
|
||||||
|
const queue = await resolveQueueForQuestioner(ctx.start, messages, cwd);
|
||||||
|
if (queue.length === 0) {
|
||||||
|
throw new Error(
|
||||||
|
"questioner: prompt invoked with empty queue — wrapped role should short-circuit before LLM",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
const card = queue[0]!;
|
||||||
|
const remaining_queue = queue.slice(1);
|
||||||
|
let cardBody: string;
|
||||||
|
try {
|
||||||
|
cardBody = await readFile(join(cwd, card), "utf8");
|
||||||
|
} catch (e) {
|
||||||
|
const msg = e instanceof Error ? e.message : String(e);
|
||||||
|
throw new Error(`questioner: failed to read ${card}: ${msg}`);
|
||||||
|
}
|
||||||
|
return `${questionerSystem()}\n\n${questionerUser(card, cardBody, remaining_queue)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createQuestionerRole(adapter: AgentFn, { extract }: CreateQuestionerRoleDeps): Role<QuestionerMeta> {
|
||||||
|
const inner = createRole(adapter, questionerPrompt, questionerExtractSchema, extract);
|
||||||
|
|
||||||
|
return async (ctx: ThreadContext) => {
|
||||||
|
const messages = ctx.steps as unknown as WorkflowMessage[];
|
||||||
|
const cwd = resolveWorkdir(ctx.start);
|
||||||
|
const queue = await resolveQueueForQuestioner(ctx.start, messages, cwd);
|
||||||
|
if (queue.length === 0) {
|
||||||
|
return {
|
||||||
|
content:
|
||||||
|
"questioner: no `.knowledge` markdown files found and no seed path in the trigger prompt; queue is empty.",
|
||||||
|
meta: {
|
||||||
|
card: "",
|
||||||
|
questions: [],
|
||||||
|
remaining_queue: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const card = queue[0]!;
|
||||||
|
const remaining_queue = queue.slice(1);
|
||||||
|
const r = await inner(ctx);
|
||||||
|
return {
|
||||||
|
content: r.content,
|
||||||
|
meta: {
|
||||||
|
card,
|
||||||
|
questions: r.meta.questions,
|
||||||
|
remaining_queue,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
@ -1,22 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "gitea-issue-solver-workflow",
|
|
||||||
"version": "0.0.1",
|
|
||||||
"private": true,
|
|
||||||
"type": "module",
|
|
||||||
"dependencies": {
|
|
||||||
"@uncaged/nerve-core": "latest",
|
|
||||||
"@uncaged/nerve-workflow-utils": "latest",
|
|
||||||
"zod": "^4.3.6"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@types/node": "^22.0.0",
|
|
||||||
"typescript": "^5.7.0"
|
|
||||||
},
|
|
||||||
"pnpm": {
|
|
||||||
"overrides": {
|
|
||||||
"@uncaged/nerve-daemon": "link:../../../repos/nerve/packages/daemon",
|
|
||||||
"@uncaged/nerve-core": "link:../../../repos/nerve/packages/core",
|
|
||||||
"@uncaged/nerve-workflow-utils": "link:../../../repos/nerve/packages/workflow-utils"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
59
workflows/gitea-issue-solver/pnpm-lock.yaml
generated
59
workflows/gitea-issue-solver/pnpm-lock.yaml
generated
@ -1,59 +0,0 @@
|
|||||||
lockfileVersion: '9.0'
|
|
||||||
|
|
||||||
settings:
|
|
||||||
autoInstallPeers: true
|
|
||||||
excludeLinksFromLockfile: false
|
|
||||||
|
|
||||||
overrides:
|
|
||||||
'@uncaged/nerve-daemon': link:../../../repos/nerve/packages/daemon
|
|
||||||
'@uncaged/nerve-core': link:../../../repos/nerve/packages/core
|
|
||||||
'@uncaged/nerve-workflow-utils': link:../../../repos/nerve/packages/workflow-utils
|
|
||||||
|
|
||||||
importers:
|
|
||||||
|
|
||||||
.:
|
|
||||||
dependencies:
|
|
||||||
'@uncaged/nerve-core':
|
|
||||||
specifier: link:../../../repos/nerve/packages/core
|
|
||||||
version: link:../../../repos/nerve/packages/core
|
|
||||||
'@uncaged/nerve-workflow-utils':
|
|
||||||
specifier: link:../../../repos/nerve/packages/workflow-utils
|
|
||||||
version: link:../../../repos/nerve/packages/workflow-utils
|
|
||||||
zod:
|
|
||||||
specifier: ^4.3.6
|
|
||||||
version: 4.3.6
|
|
||||||
devDependencies:
|
|
||||||
'@types/node':
|
|
||||||
specifier: ^22.0.0
|
|
||||||
version: 22.19.17
|
|
||||||
typescript:
|
|
||||||
specifier: ^5.7.0
|
|
||||||
version: 5.9.3
|
|
||||||
|
|
||||||
packages:
|
|
||||||
|
|
||||||
'@types/node@22.19.17':
|
|
||||||
resolution: {integrity: sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==}
|
|
||||||
|
|
||||||
typescript@5.9.3:
|
|
||||||
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
|
|
||||||
engines: {node: '>=14.17'}
|
|
||||||
hasBin: true
|
|
||||||
|
|
||||||
undici-types@6.21.0:
|
|
||||||
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
|
|
||||||
|
|
||||||
zod@4.3.6:
|
|
||||||
resolution: {integrity: sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==}
|
|
||||||
|
|
||||||
snapshots:
|
|
||||||
|
|
||||||
'@types/node@22.19.17':
|
|
||||||
dependencies:
|
|
||||||
undici-types: 6.21.0
|
|
||||||
|
|
||||||
typescript@5.9.3: {}
|
|
||||||
|
|
||||||
undici-types@6.21.0: {}
|
|
||||||
|
|
||||||
zod@4.3.6: {}
|
|
||||||
@ -1,13 +0,0 @@
|
|||||||
{
|
|
||||||
"compilerOptions": {
|
|
||||||
"target": "ES2022",
|
|
||||||
"lib": ["ES2022"],
|
|
||||||
"module": "NodeNext",
|
|
||||||
"moduleResolution": "NodeNext",
|
|
||||||
"strict": true,
|
|
||||||
"skipLibCheck": true,
|
|
||||||
"noEmit": true,
|
|
||||||
"types": ["node"]
|
|
||||||
},
|
|
||||||
"include": ["./**/*.ts"]
|
|
||||||
}
|
|
||||||
1
workflows/sense-generator/.gitignore
vendored
1
workflows/sense-generator/.gitignore
vendored
@ -1 +0,0 @@
|
|||||||
dist/
|
|
||||||
@ -1,27 +0,0 @@
|
|||||||
import type { WorkflowDefinition } from "@uncaged/nerve-core";
|
|
||||||
import type { LlmProvider } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { buildPlannerRole } from "./roles/planner/index.js";
|
|
||||||
import { buildCoderRole } from "./roles/coder/index.js";
|
|
||||||
import { buildTesterRole } from "./roles/tester/index.js";
|
|
||||||
import { moderator } from "./moderator.js";
|
|
||||||
import type { SenseMeta } from "./moderator.js";
|
|
||||||
|
|
||||||
export type BuildSenseGeneratorDeps = {
|
|
||||||
provider: LlmProvider;
|
|
||||||
cwd: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export function buildSenseGenerator({
|
|
||||||
provider,
|
|
||||||
cwd,
|
|
||||||
}: BuildSenseGeneratorDeps): WorkflowDefinition<SenseMeta> {
|
|
||||||
return {
|
|
||||||
name: "sense-generator",
|
|
||||||
roles: {
|
|
||||||
planner: buildPlannerRole({ provider, cwd }),
|
|
||||||
coder: buildCoderRole({ provider, cwd }),
|
|
||||||
tester: buildTesterRole({ provider }),
|
|
||||||
},
|
|
||||||
moderator,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@ -1,19 +0,0 @@
|
|||||||
import { join } from "node:path";
|
|
||||||
import { buildSenseGenerator } from "./build.js";
|
|
||||||
|
|
||||||
const HOME = process.env.HOME ?? "/home/azureuser";
|
|
||||||
const NERVE_ROOT = join(HOME, ".uncaged-nerve");
|
|
||||||
|
|
||||||
const apiKey = process.env.DASHSCOPE_API_KEY;
|
|
||||||
const baseUrl = process.env.DASHSCOPE_BASE_URL;
|
|
||||||
const model = process.env.DASHSCOPE_MODEL ?? "qwen-plus";
|
|
||||||
if (!apiKey || !baseUrl) {
|
|
||||||
throw new Error("Set DASHSCOPE_API_KEY and DASHSCOPE_BASE_URL");
|
|
||||||
}
|
|
||||||
|
|
||||||
const workflow = buildSenseGenerator({
|
|
||||||
provider: { apiKey, baseUrl, model },
|
|
||||||
cwd: NERVE_ROOT,
|
|
||||||
});
|
|
||||||
|
|
||||||
export default workflow;
|
|
||||||
@ -1,27 +0,0 @@
|
|||||||
import { END } from "@uncaged/nerve-core";
|
|
||||||
import type { Moderator } from "@uncaged/nerve-core";
|
|
||||||
import type { PlannerMeta } from "./roles/planner/index.js";
|
|
||||||
import type { CoderMeta } from "./roles/coder/index.js";
|
|
||||||
import type { TesterMeta } from "./roles/tester/index.js";
|
|
||||||
|
|
||||||
export type SenseMeta = {
|
|
||||||
planner: PlannerMeta;
|
|
||||||
coder: CoderMeta;
|
|
||||||
tester: TesterMeta;
|
|
||||||
};
|
|
||||||
|
|
||||||
function countRole(steps: { role: string }[], name: string): number {
|
|
||||||
return steps.filter((s) => s.role === name).length;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const moderator: Moderator<SenseMeta> = (context) => {
|
|
||||||
if (context.steps.length === 0) return "planner";
|
|
||||||
const last = context.steps[context.steps.length - 1];
|
|
||||||
if (last.role === "planner") return "coder";
|
|
||||||
if (last.role === "coder") return "tester";
|
|
||||||
if (last.role === "tester") {
|
|
||||||
if (last.meta.passed) return END;
|
|
||||||
return countRole(context.steps, "tester") < 3 ? "coder" : END;
|
|
||||||
}
|
|
||||||
return END;
|
|
||||||
};
|
|
||||||
@ -1,26 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "sense-generator-workflow",
|
|
||||||
"version": "0.0.1",
|
|
||||||
"private": true,
|
|
||||||
"type": "module",
|
|
||||||
"scripts": {
|
|
||||||
"build": "esbuild index.ts --bundle --platform=node --format=esm --outdir=dist --packages=external"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"@uncaged/nerve-core": "latest",
|
|
||||||
"@uncaged/nerve-workflow-utils": "latest",
|
|
||||||
"zod": "^4.3.6"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@types/node": "^22.0.0",
|
|
||||||
"esbuild": "^0.27.0",
|
|
||||||
"typescript": "^5.7.0"
|
|
||||||
},
|
|
||||||
"pnpm": {
|
|
||||||
"overrides": {
|
|
||||||
"@uncaged/nerve-daemon": "link:../../../repos/nerve/packages/daemon",
|
|
||||||
"@uncaged/nerve-core": "link:../../../repos/nerve/packages/core",
|
|
||||||
"@uncaged/nerve-workflow-utils": "link:../../../repos/nerve/packages/workflow-utils"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
330
workflows/sense-generator/pnpm-lock.yaml
generated
330
workflows/sense-generator/pnpm-lock.yaml
generated
@ -1,330 +0,0 @@
|
|||||||
lockfileVersion: '9.0'
|
|
||||||
|
|
||||||
settings:
|
|
||||||
autoInstallPeers: true
|
|
||||||
excludeLinksFromLockfile: false
|
|
||||||
|
|
||||||
overrides:
|
|
||||||
'@uncaged/nerve-daemon': link:../../../repos/nerve/packages/daemon
|
|
||||||
'@uncaged/nerve-core': link:../../../repos/nerve/packages/core
|
|
||||||
'@uncaged/nerve-workflow-utils': link:../../../repos/nerve/packages/workflow-utils
|
|
||||||
|
|
||||||
importers:
|
|
||||||
|
|
||||||
.:
|
|
||||||
dependencies:
|
|
||||||
'@uncaged/nerve-core':
|
|
||||||
specifier: link:../../../repos/nerve/packages/core
|
|
||||||
version: link:../../../repos/nerve/packages/core
|
|
||||||
'@uncaged/nerve-workflow-utils':
|
|
||||||
specifier: link:../../../repos/nerve/packages/workflow-utils
|
|
||||||
version: link:../../../repos/nerve/packages/workflow-utils
|
|
||||||
zod:
|
|
||||||
specifier: ^4.3.6
|
|
||||||
version: 4.3.6
|
|
||||||
devDependencies:
|
|
||||||
'@types/node':
|
|
||||||
specifier: ^22.0.0
|
|
||||||
version: 22.19.17
|
|
||||||
esbuild:
|
|
||||||
specifier: ^0.27.0
|
|
||||||
version: 0.27.7
|
|
||||||
typescript:
|
|
||||||
specifier: ^5.7.0
|
|
||||||
version: 5.9.3
|
|
||||||
|
|
||||||
packages:
|
|
||||||
|
|
||||||
'@esbuild/aix-ppc64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-EKX3Qwmhz1eMdEJokhALr0YiD0lhQNwDqkPYyPhiSwKrh7/4KRjQc04sZ8db+5DVVnZ1LmbNDI1uAMPEUBnQPg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [ppc64]
|
|
||||||
os: [aix]
|
|
||||||
|
|
||||||
'@esbuild/android-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-62dPZHpIXzvChfvfLJow3q5dDtiNMkwiRzPylSCfriLvZeq0a1bWChrGx/BbUbPwOrsWKMn8idSllklzBy+dgQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [android]
|
|
||||||
|
|
||||||
'@esbuild/android-arm@0.27.7':
|
|
||||||
resolution: {integrity: sha512-jbPXvB4Yj2yBV7HUfE2KHe4GJX51QplCN1pGbYjvsyCZbQmies29EoJbkEc+vYuU5o45AfQn37vZlyXy4YJ8RQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm]
|
|
||||||
os: [android]
|
|
||||||
|
|
||||||
'@esbuild/android-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-x5VpMODneVDb70PYV2VQOmIUUiBtY3D3mPBG8NxVk5CogneYhkR7MmM3yR/uMdITLrC1ml/NV1rj4bMJuy9MCg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [android]
|
|
||||||
|
|
||||||
'@esbuild/darwin-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-5lckdqeuBPlKUwvoCXIgI2D9/ABmPq3Rdp7IfL70393YgaASt7tbju3Ac+ePVi3KDH6N2RqePfHnXkaDtY9fkw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [darwin]
|
|
||||||
|
|
||||||
'@esbuild/darwin-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-rYnXrKcXuT7Z+WL5K980jVFdvVKhCHhUwid+dDYQpH+qu+TefcomiMAJpIiC2EM3Rjtq0sO3StMV/+3w3MyyqQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [darwin]
|
|
||||||
|
|
||||||
'@esbuild/freebsd-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-B48PqeCsEgOtzME2GbNM2roU29AMTuOIN91dsMO30t+Ydis3z/3Ngoj5hhnsOSSwNzS+6JppqWsuhTp6E82l2w==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [freebsd]
|
|
||||||
|
|
||||||
'@esbuild/freebsd-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-jOBDK5XEjA4m5IJK3bpAQF9/Lelu/Z9ZcdhTRLf4cajlB+8VEhFFRjWgfy3M1O4rO2GQ/b2dLwCUGpiF/eATNQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [freebsd]
|
|
||||||
|
|
||||||
'@esbuild/linux-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-RZPHBoxXuNnPQO9rvjh5jdkRmVizktkT7TCDkDmQ0W2SwHInKCAV95GRuvdSvA7w4VMwfCjUiPwDi0ZO6Nfe9A==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-arm@0.27.7':
|
|
||||||
resolution: {integrity: sha512-RkT/YXYBTSULo3+af8Ib0ykH8u2MBh57o7q/DAs3lTJlyVQkgQvlrPTnjIzzRPQyavxtPtfg0EopvDyIt0j1rA==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-ia32@0.27.7':
|
|
||||||
resolution: {integrity: sha512-GA48aKNkyQDbd3KtkplYWT102C5sn/EZTY4XROkxONgruHPU72l+gW+FfF8tf2cFjeHaRbWpOYa/uRBz/Xq1Pg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [ia32]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-loong64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-a4POruNM2oWsD4WKvBSEKGIiWQF8fZOAsycHOt6JBpZ+JN2n2JH9WAv56SOyu9X5IqAjqSIPTaJkqN8F7XOQ5Q==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [loong64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-mips64el@0.27.7':
|
|
||||||
resolution: {integrity: sha512-KabT5I6StirGfIz0FMgl1I+R1H73Gp0ofL9A3nG3i/cYFJzKHhouBV5VWK1CSgKvVaG4q1RNpCTR2LuTVB3fIw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [mips64el]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-ppc64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-gRsL4x6wsGHGRqhtI+ifpN/vpOFTQtnbsupUF5R5YTAg+y/lKelYR1hXbnBdzDjGbMYjVJLJTd2OFmMewAgwlQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [ppc64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-riscv64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-hL25LbxO1QOngGzu2U5xeXtxXcW+/GvMN3ejANqXkxZ/opySAZMrc+9LY/WyjAan41unrR3YrmtTsUpwT66InQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [riscv64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-s390x@0.27.7':
|
|
||||||
resolution: {integrity: sha512-2k8go8Ycu1Kb46vEelhu1vqEP+UeRVj2zY1pSuPdgvbd5ykAw82Lrro28vXUrRmzEsUV0NzCf54yARIK8r0fdw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [s390x]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-hzznmADPt+OmsYzw1EE33ccA+HPdIqiCRq7cQeL1Jlq2gb1+OyWBkMCrYGBJ+sxVzve2ZJEVeePbLM2iEIZSxA==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/netbsd-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-b6pqtrQdigZBwZxAn1UpazEisvwaIDvdbMbmrly7cDTMFnw/+3lVxxCTGOrkPVnsYIosJJXAsILG9XcQS+Yu6w==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [netbsd]
|
|
||||||
|
|
||||||
'@esbuild/netbsd-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-OfatkLojr6U+WN5EDYuoQhtM+1xco+/6FSzJJnuWiUw5eVcicbyK3dq5EeV/QHT1uy6GoDhGbFpprUiHUYggrw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [netbsd]
|
|
||||||
|
|
||||||
'@esbuild/openbsd-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-AFuojMQTxAz75Fo8idVcqoQWEHIXFRbOc1TrVcFSgCZtQfSdc1RXgB3tjOn/krRHENUB4j00bfGjyl2mJrU37A==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [openbsd]
|
|
||||||
|
|
||||||
'@esbuild/openbsd-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-+A1NJmfM8WNDv5CLVQYJ5PshuRm/4cI6WMZRg1by1GwPIQPCTs1GLEUHwiiQGT5zDdyLiRM/l1G0Pv54gvtKIg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [openbsd]
|
|
||||||
|
|
||||||
'@esbuild/openharmony-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-+KrvYb/C8zA9CU/g0sR6w2RBw7IGc5J2BPnc3dYc5VJxHCSF1yNMxTV5LQ7GuKteQXZtspjFbiuW5/dOj7H4Yw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [openharmony]
|
|
||||||
|
|
||||||
'@esbuild/sunos-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-ikktIhFBzQNt/QDyOL580ti9+5mL/YZeUPKU2ivGtGjdTYoqz6jObj6nOMfhASpS4GU4Q/Clh1QtxWAvcYKamA==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [sunos]
|
|
||||||
|
|
||||||
'@esbuild/win32-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-7yRhbHvPqSpRUV7Q20VuDwbjW5kIMwTHpptuUzV+AA46kiPze5Z7qgt6CLCK3pWFrHeNfDd1VKgyP4O+ng17CA==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [win32]
|
|
||||||
|
|
||||||
'@esbuild/win32-ia32@0.27.7':
|
|
||||||
resolution: {integrity: sha512-SmwKXe6VHIyZYbBLJrhOoCJRB/Z1tckzmgTLfFYOfpMAx63BJEaL9ExI8x7v0oAO3Zh6D/Oi1gVxEYr5oUCFhw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [ia32]
|
|
||||||
os: [win32]
|
|
||||||
|
|
||||||
'@esbuild/win32-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-56hiAJPhwQ1R4i+21FVF7V8kSD5zZTdHcVuRFMW0hn753vVfQN8xlx4uOPT4xoGH0Z/oVATuR82AiqSTDIpaHg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [win32]
|
|
||||||
|
|
||||||
'@types/node@22.19.17':
|
|
||||||
resolution: {integrity: sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==}
|
|
||||||
|
|
||||||
esbuild@0.27.7:
|
|
||||||
resolution: {integrity: sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
hasBin: true
|
|
||||||
|
|
||||||
typescript@5.9.3:
|
|
||||||
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
|
|
||||||
engines: {node: '>=14.17'}
|
|
||||||
hasBin: true
|
|
||||||
|
|
||||||
undici-types@6.21.0:
|
|
||||||
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
|
|
||||||
|
|
||||||
zod@4.3.6:
|
|
||||||
resolution: {integrity: sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==}
|
|
||||||
|
|
||||||
snapshots:
|
|
||||||
|
|
||||||
'@esbuild/aix-ppc64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/android-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/android-arm@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/android-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/darwin-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/darwin-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/freebsd-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/freebsd-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-arm@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-ia32@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-loong64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-mips64el@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-ppc64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-riscv64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-s390x@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/netbsd-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/netbsd-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/openbsd-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/openbsd-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/openharmony-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/sunos-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/win32-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/win32-ia32@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/win32-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@types/node@22.19.17':
|
|
||||||
dependencies:
|
|
||||||
undici-types: 6.21.0
|
|
||||||
|
|
||||||
esbuild@0.27.7:
|
|
||||||
optionalDependencies:
|
|
||||||
'@esbuild/aix-ppc64': 0.27.7
|
|
||||||
'@esbuild/android-arm': 0.27.7
|
|
||||||
'@esbuild/android-arm64': 0.27.7
|
|
||||||
'@esbuild/android-x64': 0.27.7
|
|
||||||
'@esbuild/darwin-arm64': 0.27.7
|
|
||||||
'@esbuild/darwin-x64': 0.27.7
|
|
||||||
'@esbuild/freebsd-arm64': 0.27.7
|
|
||||||
'@esbuild/freebsd-x64': 0.27.7
|
|
||||||
'@esbuild/linux-arm': 0.27.7
|
|
||||||
'@esbuild/linux-arm64': 0.27.7
|
|
||||||
'@esbuild/linux-ia32': 0.27.7
|
|
||||||
'@esbuild/linux-loong64': 0.27.7
|
|
||||||
'@esbuild/linux-mips64el': 0.27.7
|
|
||||||
'@esbuild/linux-ppc64': 0.27.7
|
|
||||||
'@esbuild/linux-riscv64': 0.27.7
|
|
||||||
'@esbuild/linux-s390x': 0.27.7
|
|
||||||
'@esbuild/linux-x64': 0.27.7
|
|
||||||
'@esbuild/netbsd-arm64': 0.27.7
|
|
||||||
'@esbuild/netbsd-x64': 0.27.7
|
|
||||||
'@esbuild/openbsd-arm64': 0.27.7
|
|
||||||
'@esbuild/openbsd-x64': 0.27.7
|
|
||||||
'@esbuild/openharmony-arm64': 0.27.7
|
|
||||||
'@esbuild/sunos-x64': 0.27.7
|
|
||||||
'@esbuild/win32-arm64': 0.27.7
|
|
||||||
'@esbuild/win32-ia32': 0.27.7
|
|
||||||
'@esbuild/win32-x64': 0.27.7
|
|
||||||
|
|
||||||
typescript@5.9.3: {}
|
|
||||||
|
|
||||||
undici-types@6.21.0: {}
|
|
||||||
|
|
||||||
zod@4.3.6: {}
|
|
||||||
@ -1,23 +0,0 @@
|
|||||||
import type { LlmProvider } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { createCursorRole } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { coderPrompt } from "./prompt.js";
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
export const coderMetaSchema = z.object({
|
|
||||||
filesCreated: z.boolean().describe("true if the sense files were created"),
|
|
||||||
});
|
|
||||||
export type CoderMeta = z.infer<typeof coderMetaSchema>;
|
|
||||||
|
|
||||||
export type BuildCoderDeps = {
|
|
||||||
provider: LlmProvider;
|
|
||||||
cwd: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export function buildCoderRole({ provider, cwd }: BuildCoderDeps) {
|
|
||||||
return createCursorRole<CoderMeta>({
|
|
||||||
cwd,
|
|
||||||
mode: "default",
|
|
||||||
prompt: async (threadId) => coderPrompt({ threadId }),
|
|
||||||
extract: { provider, schema: coderMetaSchema },
|
|
||||||
});
|
|
||||||
}
|
|
||||||
@ -1,43 +0,0 @@
|
|||||||
export function coderPrompt({ threadId }: { threadId: string }): string {
|
|
||||||
return `Read the workflow thread for the planner's sense design: \`nerve thread ${threadId}\`
|
|
||||||
Read the nerve-dev skill for sense file structure and conventions: \`cat node_modules/@uncaged/nerve-skills/nerve-dev/SKILL.md\`
|
|
||||||
|
|
||||||
Implement the sense following the patterns from existing senses and the skill guide.
|
|
||||||
|
|
||||||
File structure for each sense:
|
|
||||||
- \`senses/<name>/src/index.ts\` — TypeScript source with proper types; import schema as \`./schema.ts\`
|
|
||||||
- \`senses/<name>/src/schema.ts\` — Drizzle schema (TypeScript)
|
|
||||||
- \`senses/<name>/migrations/\` — Drizzle migration files (at sense root, not inside src/)
|
|
||||||
- \`senses/<name>/package.json\` — with esbuild build script (see below)
|
|
||||||
- \`senses/<name>/index.js\` — bundled output generated by \`pnpm build\` (do NOT edit by hand)
|
|
||||||
|
|
||||||
package.json template for each sense:
|
|
||||||
\`\`\`json
|
|
||||||
{
|
|
||||||
"name": "sense-<name>",
|
|
||||||
"version": "0.0.1",
|
|
||||||
"private": true,
|
|
||||||
"type": "module",
|
|
||||||
"scripts": {
|
|
||||||
"build": "esbuild src/index.ts --bundle --platform=node --format=esm --outfile=index.js --packages=external"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@types/node": "^22.0.0",
|
|
||||||
"esbuild": "^0.27.0",
|
|
||||||
"typescript": "^5.7.0"
|
|
||||||
},
|
|
||||||
"pnpm": {
|
|
||||||
"onlyBuiltDependencies": ["esbuild"]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
\`\`\`
|
|
||||||
|
|
||||||
After creating all files, run inside the sense directory:
|
|
||||||
\`\`\`
|
|
||||||
pnpm install --no-cache && pnpm build
|
|
||||||
\`\`\`
|
|
||||||
|
|
||||||
This generates the bundled \`index.js\` at the sense root that the daemon loads.
|
|
||||||
|
|
||||||
Then update nerve.yaml and run any required migrations.`;
|
|
||||||
}
|
|
||||||
@ -1,23 +0,0 @@
|
|||||||
import type { LlmProvider } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { createCursorRole } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { plannerPrompt } from "./prompt.js";
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
export const plannerMetaSchema = z.object({
|
|
||||||
senseName: z.string().describe("kebab-case sense name from the plan"),
|
|
||||||
});
|
|
||||||
export type PlannerMeta = z.infer<typeof plannerMetaSchema>;
|
|
||||||
|
|
||||||
export type BuildPlannerDeps = {
|
|
||||||
provider: LlmProvider;
|
|
||||||
cwd: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export function buildPlannerRole({ provider, cwd }: BuildPlannerDeps) {
|
|
||||||
return createCursorRole<PlannerMeta>({
|
|
||||||
cwd,
|
|
||||||
mode: "ask",
|
|
||||||
prompt: async (threadId) => plannerPrompt({ threadId }),
|
|
||||||
extract: { provider, schema: plannerMetaSchema },
|
|
||||||
});
|
|
||||||
}
|
|
||||||
@ -1,17 +0,0 @@
|
|||||||
export function plannerPrompt({ threadId }: { threadId: string }): string {
|
|
||||||
return `You are planning a new Nerve sense.
|
|
||||||
|
|
||||||
Read the workflow thread for the user's request: \`nerve thread ${threadId}\`
|
|
||||||
Read the nerve-dev skill for sense conventions: \`cat node_modules/@uncaged/nerve-skills/nerve-dev/SKILL.md\`
|
|
||||||
Also look at existing senses in the \`senses/\` directory for patterns.
|
|
||||||
|
|
||||||
Pick a good kebab-case name for this sense. Produce a PLAN (not code) in markdown:
|
|
||||||
|
|
||||||
## Sense Design
|
|
||||||
### Name — kebab-case
|
|
||||||
### Fields — name, type (integer/real/text), description
|
|
||||||
### Compute Logic — step-by-step, specific Node.js APIs or shell commands
|
|
||||||
### Trigger Config — group, interval, throttle, timeout
|
|
||||||
|
|
||||||
Output ONLY the plan. Be precise and implementation-ready.`;
|
|
||||||
}
|
|
||||||
@ -1,20 +0,0 @@
|
|||||||
import type { LlmProvider } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { createHermesRole } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { testerPrompt } from "./prompt.js";
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
export const testerMetaSchema = z.object({
|
|
||||||
passed: z.boolean().describe("true if all e2e checks passed"),
|
|
||||||
});
|
|
||||||
export type TesterMeta = z.infer<typeof testerMetaSchema>;
|
|
||||||
|
|
||||||
export type BuildTesterDeps = {
|
|
||||||
provider: LlmProvider;
|
|
||||||
};
|
|
||||||
|
|
||||||
export function buildTesterRole({ provider }: BuildTesterDeps) {
|
|
||||||
return createHermesRole<TesterMeta>({
|
|
||||||
prompt: async (threadId) => testerPrompt({ threadId }),
|
|
||||||
extract: { provider, schema: testerMetaSchema },
|
|
||||||
});
|
|
||||||
}
|
|
||||||
@ -1,16 +0,0 @@
|
|||||||
export function testerPrompt({ threadId }: { threadId: string }): string {
|
|
||||||
return `You are testing a newly created Nerve sense end-to-end.
|
|
||||||
|
|
||||||
Read the workflow thread for context: \`nerve thread ${threadId}\`
|
|
||||||
Read the nerve-dev skill for expected file structure: \`cat node_modules/@uncaged/nerve-skills/nerve-dev/SKILL.md\`
|
|
||||||
|
|
||||||
Verify the full lifecycle:
|
|
||||||
1. Check all required sense files exist
|
|
||||||
2. Check nerve.yaml has the sense config
|
|
||||||
3. Run \`nerve sense list\` — confirm the sense appears
|
|
||||||
4. Run \`nerve sense trigger <sense-name>\` — should complete without error
|
|
||||||
5. Run \`nerve sense query <sense-name>\` — retry up to 20s until rows appear
|
|
||||||
6. If any step fails, run \`nerve logs\` and include relevant errors
|
|
||||||
|
|
||||||
Output a clear summary: what you checked, what passed, what failed, and why.`;
|
|
||||||
}
|
|
||||||
@ -1,14 +0,0 @@
|
|||||||
{
|
|
||||||
"compilerOptions": {
|
|
||||||
"target": "ES2022",
|
|
||||||
"lib": ["ES2022"],
|
|
||||||
"module": "NodeNext",
|
|
||||||
"moduleResolution": "NodeNext",
|
|
||||||
"strict": true,
|
|
||||||
"skipLibCheck": true,
|
|
||||||
"noEmit": false,
|
|
||||||
"declaration": false,
|
|
||||||
"types": ["node"]
|
|
||||||
},
|
|
||||||
"include": ["./**/*.ts"]
|
|
||||||
}
|
|
||||||
43
workflows/solve-issue/build.ts
Normal file
43
workflows/solve-issue/build.ts
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
import type { AgentFn, WorkflowDefinition } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
|
||||||
|
import { moderator } from "./moderator.js";
|
||||||
|
import type { WorkflowMeta } from "./moderator.js";
|
||||||
|
import { createCommitterRole } from "./roles/committer.js";
|
||||||
|
import { createImplementRole } from "./roles/implement.js";
|
||||||
|
import { createPlanRole } from "./roles/plan.js";
|
||||||
|
import { createPrepareRole } from "./roles/prepare.js";
|
||||||
|
import { createPublishRole } from "./roles/publish.js";
|
||||||
|
import { createReadIssueRole } from "./roles/read-issue.js";
|
||||||
|
import { createReviewRole } from "./roles/review.js";
|
||||||
|
import { createTestRole } from "./roles/test.js";
|
||||||
|
|
||||||
|
export type CreateSolveIssueDeps = {
|
||||||
|
defaultAdapter: AgentFn;
|
||||||
|
adapters?: Partial<Record<keyof WorkflowMeta, AgentFn>>;
|
||||||
|
nerveRoot: string;
|
||||||
|
extract: LlmExtractorConfig;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function createSolveIssueWorkflow({
|
||||||
|
defaultAdapter,
|
||||||
|
adapters,
|
||||||
|
nerveRoot,
|
||||||
|
extract,
|
||||||
|
}: CreateSolveIssueDeps): WorkflowDefinition<WorkflowMeta> {
|
||||||
|
const a = (role: keyof WorkflowMeta) => adapters?.[role] ?? defaultAdapter;
|
||||||
|
return {
|
||||||
|
name: "solve-issue",
|
||||||
|
roles: {
|
||||||
|
"read-issue": createReadIssueRole(a("read-issue"), extract),
|
||||||
|
prepare: createPrepareRole(a("prepare"), extract),
|
||||||
|
plan: createPlanRole(a("plan"), { extract, nerveRoot }),
|
||||||
|
implement: createImplementRole(a("implement"), { extract, nerveRoot }),
|
||||||
|
committer: createCommitterRole(a("committer"), extract),
|
||||||
|
review: createReviewRole(a("review"), extract, nerveRoot),
|
||||||
|
test: createTestRole(a("test"), extract),
|
||||||
|
publish: createPublishRole(a("publish"), { extract, nerveRoot }),
|
||||||
|
},
|
||||||
|
moderator,
|
||||||
|
};
|
||||||
|
}
|
||||||
37
workflows/solve-issue/index.ts
Normal file
37
workflows/solve-issue/index.ts
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
import { join } from "node:path";
|
||||||
|
import { createCursorAdapter } from "@uncaged/nerve-adapter-cursor";
|
||||||
|
import { hermesAdapter } from "@uncaged/nerve-adapter-hermes";
|
||||||
|
import { createSolveIssueWorkflow } from "./build.js";
|
||||||
|
import { resolveDashScopeProvider } from "./lib/provider.js";
|
||||||
|
|
||||||
|
const HOME = process.env.HOME ?? "/home/azureuser";
|
||||||
|
const NERVE_ROOT = join(HOME, ".uncaged-nerve");
|
||||||
|
|
||||||
|
const provider = await resolveDashScopeProvider(NERVE_ROOT);
|
||||||
|
|
||||||
|
if (provider === null) {
|
||||||
|
throw new Error("Set DASHSCOPE_API_KEY and DASHSCOPE_BASE_URL (or cfg get equivalents)");
|
||||||
|
}
|
||||||
|
|
||||||
|
const CURSOR_TIMEOUT_MS = 300_000;
|
||||||
|
|
||||||
|
const workflow = createSolveIssueWorkflow({
|
||||||
|
defaultAdapter: hermesAdapter,
|
||||||
|
adapters: {
|
||||||
|
plan: createCursorAdapter({
|
||||||
|
type: "cursor",
|
||||||
|
mode: "ask",
|
||||||
|
model: "auto",
|
||||||
|
timeout: CURSOR_TIMEOUT_MS,
|
||||||
|
}),
|
||||||
|
implement: createCursorAdapter({
|
||||||
|
type: "cursor",
|
||||||
|
model: "auto",
|
||||||
|
timeout: CURSOR_TIMEOUT_MS,
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
nerveRoot: NERVE_ROOT,
|
||||||
|
extract: { provider },
|
||||||
|
});
|
||||||
|
|
||||||
|
export default workflow;
|
||||||
26
workflows/solve-issue/lib/provider.ts
Normal file
26
workflows/solve-issue/lib/provider.ts
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
import type { LlmProvider } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { spawnSafe } from "@uncaged/nerve-workflow-utils";
|
||||||
|
|
||||||
|
export async function cfgGet(nerveRoot: string, key: string): Promise<string | null> {
|
||||||
|
const result = await spawnSafe("cfg", ["get", key], {
|
||||||
|
cwd: nerveRoot,
|
||||||
|
env: null,
|
||||||
|
timeoutMs: 10_000,
|
||||||
|
abortSignal: null,
|
||||||
|
});
|
||||||
|
if (!result.ok) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
const value = result.value.stdout.trim();
|
||||||
|
return value.length > 0 ? value : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function resolveDashScopeProvider(nerveRoot: string): Promise<LlmProvider | null> {
|
||||||
|
const apiKey = process.env.DASHSCOPE_API_KEY ?? (await cfgGet(nerveRoot, "DASHSCOPE_API_KEY"));
|
||||||
|
const baseUrl = process.env.DASHSCOPE_BASE_URL ?? (await cfgGet(nerveRoot, "DASHSCOPE_BASE_URL"));
|
||||||
|
const model = process.env.DASHSCOPE_MODEL ?? (await cfgGet(nerveRoot, "DASHSCOPE_MODEL")) ?? "qwen-plus";
|
||||||
|
if (!apiKey || !baseUrl) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return { apiKey, baseUrl, model };
|
||||||
|
}
|
||||||
86
workflows/solve-issue/lib/repo-context.ts
Normal file
86
workflows/solve-issue/lib/repo-context.ts
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
import { join } from "node:path";
|
||||||
|
import type { RoleStep, WorkflowMessage } from "@uncaged/nerve-core";
|
||||||
|
|
||||||
|
type SolveIssueParse = {
|
||||||
|
host: string;
|
||||||
|
owner: string;
|
||||||
|
repo: string;
|
||||||
|
number: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
type SolveIssueRepo = {
|
||||||
|
path: string;
|
||||||
|
defaultBranch: string;
|
||||||
|
packageManager: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
const HOME = process.env.HOME ?? "/home/azureuser";
|
||||||
|
|
||||||
|
function extractMarkedSection(text: string, marker: string): Record<string, string> | null {
|
||||||
|
const escaped = marker.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||||
|
const re = new RegExp(`---${escaped}---\\s*([\\s\\S]*?)(?:\\n---|$)`);
|
||||||
|
const m = text.match(re);
|
||||||
|
if (m === null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
const rec: Record<string, string> = {};
|
||||||
|
for (const line of m[1].split("\n")) {
|
||||||
|
const kv = line.match(/^([a-zA-Z]+):\s*(.+)$/);
|
||||||
|
if (kv !== null) {
|
||||||
|
rec[kv[1]] = kv[2].trim();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return Object.keys(rec).length > 0 ? rec : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseSolveIssueParse(text: string): SolveIssueParse | null {
|
||||||
|
const rec = extractMarkedSection(text, "SOLVE_ISSUE_PARSE");
|
||||||
|
if (rec === null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
const host = rec.host ?? "";
|
||||||
|
const owner = rec.owner ?? "";
|
||||||
|
const repo = rec.repo ?? "";
|
||||||
|
const num = Number(rec.number ?? "");
|
||||||
|
if (host.length === 0 || owner.length === 0 || repo.length === 0 || !Number.isFinite(num) || num <= 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return { host, owner, repo, number: num };
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseSolveIssueRepo(text: string): SolveIssueRepo | null {
|
||||||
|
const rec = extractMarkedSection(text, "SOLVE_ISSUE_REPO");
|
||||||
|
if (rec === null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
const path = rec.path ?? "";
|
||||||
|
if (path.length === 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
path,
|
||||||
|
defaultBranch: rec.defaultBranch ?? "main",
|
||||||
|
packageManager: rec.packageManager ?? "pnpm",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Prefer explicit prepare marker; else ~/Code/<owner>/<repo> from read-issue parse block. */
|
||||||
|
export function resolveRepoCwd(messages: WorkflowMessage[]): string | null {
|
||||||
|
for (let i = messages.length - 1; i >= 0; i--) {
|
||||||
|
if (messages[i].role === "prepare") {
|
||||||
|
const repo = parseSolveIssueRepo(messages[i].content);
|
||||||
|
if (repo !== null) {
|
||||||
|
return repo.path;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (let i = messages.length - 1; i >= 0; i--) {
|
||||||
|
if (messages[i].role === "read-issue") {
|
||||||
|
const parsed = parseSolveIssueParse(messages[i].content);
|
||||||
|
if (parsed !== null) {
|
||||||
|
return join(HOME, "Code", parsed.owner, parsed.repo);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
99
workflows/solve-issue/moderator.ts
Normal file
99
workflows/solve-issue/moderator.ts
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
import { END } from "@uncaged/nerve-core";
|
||||||
|
import type { Moderator } from "@uncaged/nerve-core";
|
||||||
|
import type { ReadIssueMeta } from "./roles/read-issue.js";
|
||||||
|
import type { PrepareMeta } from "./roles/prepare.js";
|
||||||
|
import type { PlanMeta } from "./roles/plan.js";
|
||||||
|
import type { ImplementMeta } from "./roles/implement.js";
|
||||||
|
import type { CommitterMeta } from "./roles/committer.js";
|
||||||
|
import type { ReviewMeta } from "./roles/review.js";
|
||||||
|
import type { TestMeta } from "./roles/test.js";
|
||||||
|
import type { PublishMeta } from "./roles/publish.js";
|
||||||
|
|
||||||
|
export type WorkflowMeta = {
|
||||||
|
"read-issue": ReadIssueMeta;
|
||||||
|
prepare: PrepareMeta;
|
||||||
|
plan: PlanMeta;
|
||||||
|
implement: ImplementMeta;
|
||||||
|
committer: CommitterMeta;
|
||||||
|
review: ReviewMeta;
|
||||||
|
test: TestMeta;
|
||||||
|
publish: PublishMeta;
|
||||||
|
};
|
||||||
|
|
||||||
|
const MAX_IMPLEMENT_ROUNDS = 20;
|
||||||
|
const MAX_TOTAL_REJECTIONS = 10;
|
||||||
|
|
||||||
|
function implementRounds(steps: { role: string }[]): number {
|
||||||
|
return steps.filter((s) => s.role === "implement").length;
|
||||||
|
}
|
||||||
|
|
||||||
|
function totalRejections(steps: { role: string; meta: unknown }[]): number {
|
||||||
|
return steps.filter((s) => {
|
||||||
|
if (s.role === "review") return !(s.meta as Record<string, boolean>).approved;
|
||||||
|
if (s.role === "test") return !(s.meta as Record<string, boolean>).passed;
|
||||||
|
if (s.role === "committer") return !(s.meta as Record<string, boolean>).committed;
|
||||||
|
if (s.role === "publish") return !(s.meta as Record<string, boolean>).success;
|
||||||
|
return false;
|
||||||
|
}).length;
|
||||||
|
}
|
||||||
|
|
||||||
|
function canRetryImplement(steps: { role: string; meta: unknown }[]): boolean {
|
||||||
|
return implementRounds(steps) < MAX_IMPLEMENT_ROUNDS && totalRejections(steps) < MAX_TOTAL_REJECTIONS;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const moderator: Moderator<WorkflowMeta> = (context) => {
|
||||||
|
if (context.steps.length === 0) {
|
||||||
|
return "read-issue";
|
||||||
|
}
|
||||||
|
|
||||||
|
const last = context.steps[context.steps.length - 1];
|
||||||
|
|
||||||
|
if (last.role === "read-issue") {
|
||||||
|
return last.meta.ready ? "prepare" : END;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (last.role === "prepare") {
|
||||||
|
return last.meta.ready ? "plan" : END;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (last.role === "plan") {
|
||||||
|
return last.meta.ready ? "implement" : END;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (last.role === "implement") {
|
||||||
|
if (last.meta.done) {
|
||||||
|
return "committer";
|
||||||
|
}
|
||||||
|
return canRetryImplement(context.steps) ? "implement" : END;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (last.role === "committer") {
|
||||||
|
if (last.meta.committed) {
|
||||||
|
return "review";
|
||||||
|
}
|
||||||
|
return canRetryImplement(context.steps) ? "implement" : END;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (last.role === "review") {
|
||||||
|
if (last.meta.approved) {
|
||||||
|
return "test";
|
||||||
|
}
|
||||||
|
return canRetryImplement(context.steps) ? "implement" : END;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (last.role === "test") {
|
||||||
|
if (last.meta.passed) {
|
||||||
|
return "publish";
|
||||||
|
}
|
||||||
|
return canRetryImplement(context.steps) ? "implement" : END;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (last.role === "publish") {
|
||||||
|
if (last.meta.success) {
|
||||||
|
return END;
|
||||||
|
}
|
||||||
|
return canRetryImplement(context.steps) ? "implement" : END;
|
||||||
|
}
|
||||||
|
|
||||||
|
return END;
|
||||||
|
};
|
||||||
57
workflows/solve-issue/roles/committer.ts
Normal file
57
workflows/solve-issue/roles/committer.ts
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
import type { AgentFn, Role, ThreadContext } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createRole, decorateRole, withDryRun, onFail } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
function committerPrompt({ threadId }: { threadId: string }): string {
|
||||||
|
return `You are the committer agent. The **implement** step finished with a passing build; your job is to branch, commit, and push.
|
||||||
|
|
||||||
|
1. Read the workflow thread: \`nerve thread show ${threadId}\` — understand what was planned, implemented, and reviewed.
|
||||||
|
2. In the thread, locate \`---SOLVE_ISSUE_PARSE---\` and \`---SOLVE_ISSUE_REPO---\`. From them you need issue **number**, **title** (for the branch slug), repo **path**, and **defaultBranch**.
|
||||||
|
3. \`cd\` to the repo **path** from the markers. Optionally read \`CONVENTIONS.md\` in that repo root if present.
|
||||||
|
4. Run \`git rev-parse --abbrev-ref HEAD\` and compare with **defaultBranch** from the markers. Implement leaves changes uncommitted on the default branch — you should be on that branch with a dirty working tree. If you are not on the default branch, or the tree is clean when you expected changes, set **committed** to false and explain.
|
||||||
|
5. Run \`git status\`. If there is nothing to commit, set **committed** to false and explain.
|
||||||
|
6. Create a feature branch (do not commit directly on the default branch if it would mix unrelated work):
|
||||||
|
- Name: \`fix/<number>-<short-slug>\` for fixes, or \`feat/<number>-<short-slug>\` if the issue is clearly a feature.
|
||||||
|
- **slug**: lowercase, hyphens only, short (from issue title words).
|
||||||
|
- Example: \`git checkout -b fix/42-auth-timeout\`
|
||||||
|
7. \`git add -A\`
|
||||||
|
8. Write a **conventional commit** message describing what changed and why, using the thread context.
|
||||||
|
9. \`git commit -m "<message>"\` — do NOT pass \`--author\`, use repo git config.
|
||||||
|
10. \`git push -u origin <branch-name>\`
|
||||||
|
|
||||||
|
**committed=true** only if branch was created, commit succeeded, and **push** succeeded.
|
||||||
|
|
||||||
|
End your reply with a JSON line:
|
||||||
|
\`\`\`json
|
||||||
|
{ "committed": true }
|
||||||
|
\`\`\`
|
||||||
|
or
|
||||||
|
\`\`\`json
|
||||||
|
{ "committed": false }
|
||||||
|
\`\`\``;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const committerMetaSchema = z.object({
|
||||||
|
committed: z
|
||||||
|
.boolean()
|
||||||
|
.describe("true if branch created, changes committed, and pushed successfully"),
|
||||||
|
});
|
||||||
|
export type CommitterMeta = z.infer<typeof committerMetaSchema>;
|
||||||
|
|
||||||
|
export function createCommitterRole(
|
||||||
|
adapter: AgentFn,
|
||||||
|
extract: LlmExtractorConfig,
|
||||||
|
): Role<CommitterMeta> {
|
||||||
|
const inner = createRole(
|
||||||
|
adapter,
|
||||||
|
async (ctx: ThreadContext) => committerPrompt({ threadId: ctx.start.meta.threadId }),
|
||||||
|
committerMetaSchema,
|
||||||
|
extract,
|
||||||
|
);
|
||||||
|
|
||||||
|
return decorateRole(inner, [
|
||||||
|
withDryRun({ label: "committer", meta: { committed: true } as CommitterMeta }),
|
||||||
|
onFail({ label: "committer", meta: { committed: false } as CommitterMeta }),
|
||||||
|
]) as Role<CommitterMeta>;
|
||||||
|
}
|
||||||
86
workflows/solve-issue/roles/implement.ts
Normal file
86
workflows/solve-issue/roles/implement.ts
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
import type { AgentFn, Role, RoleResult, ThreadContext, WorkflowMessage } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { resolveRepoCwd } from "../lib/repo-context.js";
|
||||||
|
|
||||||
|
function buildImplementPrompt({ threadId, nerveRoot }: { threadId: string; nerveRoot: string }): string {
|
||||||
|
return `You are the **implement** agent. You apply code changes for the issue.
|
||||||
|
|
||||||
|
Read workflow context (plan, reviewer/test feedback): \`nerve thread show ${threadId}\`
|
||||||
|
|
||||||
|
Read Nerve workspace conventions: \`cat ${nerveRoot}/CONVENTIONS.md\`
|
||||||
|
|
||||||
|
Your cwd is the target repository.
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
1. Implement the planned changes; address reviewer/tester feedback from the thread if any.
|
||||||
|
2. Run the project **build** (\`pnpm build\`, \`npm run build\`, etc.) and fix issues until build passes.
|
||||||
|
3. Multi-step: if you cannot finish this round, explain why and set **done** to false.
|
||||||
|
|
||||||
|
Do **not** run \`git checkout -b\`, \`git add\`, \`git commit\`, or \`git push\`. **Never** create commits on any branch — branching and commits are handled by the **committer** step after you finish.
|
||||||
|
|
||||||
|
Then close with JSON:
|
||||||
|
\`\`\`json
|
||||||
|
{ "done": true }
|
||||||
|
\`\`\`
|
||||||
|
or \`{ "done": false }\` matching whether implementation is complete.
|
||||||
|
|
||||||
|
**done=true** only when changes are complete **and** build passes in this round.`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const implementMetaSchema = z.object({
|
||||||
|
done: z.boolean().describe("true when changes are complete and build passes this round"),
|
||||||
|
});
|
||||||
|
export type ImplementMeta = z.infer<typeof implementMetaSchema>;
|
||||||
|
|
||||||
|
export type CreateImplementRoleDeps = {
|
||||||
|
extract: LlmExtractorConfig;
|
||||||
|
nerveRoot: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function createImplementRole(
|
||||||
|
adapter: AgentFn,
|
||||||
|
{ extract, nerveRoot }: CreateImplementRoleDeps,
|
||||||
|
): Role<ImplementMeta> {
|
||||||
|
return async (ctx: ThreadContext): Promise<RoleResult<ImplementMeta>> => {
|
||||||
|
const messages = ctx.steps as unknown as WorkflowMessage[];
|
||||||
|
const cwd = resolveRepoCwd(messages);
|
||||||
|
if (cwd === null) {
|
||||||
|
return {
|
||||||
|
content: "implement cannot run: missing repo path in thread markers",
|
||||||
|
meta: { done: false },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const innerRole = createRole(
|
||||||
|
adapter,
|
||||||
|
async (innerCtx: ThreadContext) =>
|
||||||
|
buildImplementPrompt({
|
||||||
|
threadId: innerCtx.start.meta.threadId,
|
||||||
|
nerveRoot,
|
||||||
|
}),
|
||||||
|
implementMetaSchema,
|
||||||
|
extract,
|
||||||
|
);
|
||||||
|
|
||||||
|
const innerCtx: ThreadContext = {
|
||||||
|
...ctx,
|
||||||
|
start: {
|
||||||
|
...ctx.start,
|
||||||
|
meta: { ...ctx.start.meta, workdir: cwd },
|
||||||
|
},
|
||||||
|
};
|
||||||
|
try {
|
||||||
|
return await innerRole(innerCtx);
|
||||||
|
} catch (e) {
|
||||||
|
const msg = e instanceof Error ? e.message : String(e);
|
||||||
|
return {
|
||||||
|
content: `implement failed: ${msg}`,
|
||||||
|
meta: { done: false },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
88
workflows/solve-issue/roles/plan.ts
Normal file
88
workflows/solve-issue/roles/plan.ts
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
import type { AgentFn, Role, RoleResult, ThreadContext, WorkflowMessage } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { resolveRepoCwd } from "../lib/repo-context.js";
|
||||||
|
|
||||||
|
function buildPlanPrompt({ threadId, nerveRoot }: { threadId: string; nerveRoot: string }): string {
|
||||||
|
return `You are the **plan** agent (analysis only — ask mode). You produce an implementation plan for fixing the issue.
|
||||||
|
|
||||||
|
Read workflow context: \`nerve thread show ${threadId}\`
|
||||||
|
|
||||||
|
Read Nerve workspace conventions (coding rules for agents): \`cat ${nerveRoot}/CONVENTIONS.md\`
|
||||||
|
|
||||||
|
In the **target repository** (your cwd), skim relevant files and read \`CONVENTIONS.md\` **if it exists** there.
|
||||||
|
|
||||||
|
## Output
|
||||||
|
|
||||||
|
Write an implementation plan in **markdown** with:
|
||||||
|
|
||||||
|
1. Problem understanding
|
||||||
|
2. Change strategy
|
||||||
|
3. Target files (paths)
|
||||||
|
4. **Test commands** to run (explicit shell commands, e.g. \`pnpm test\`, \`pnpm vitest run\`)
|
||||||
|
5. Risks
|
||||||
|
|
||||||
|
End your reply with a JSON code block (meta signal):
|
||||||
|
\`\`\`json
|
||||||
|
{ "ready": true }
|
||||||
|
\`\`\`
|
||||||
|
Use \`{ "ready": false }\` if the plan cannot be made actionable.
|
||||||
|
|
||||||
|
**ready=true** only when the plan is clear and actionable.`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const planMetaSchema = z.object({
|
||||||
|
ready: z.boolean().describe("true if plan is clear and actionable"),
|
||||||
|
});
|
||||||
|
export type PlanMeta = z.infer<typeof planMetaSchema>;
|
||||||
|
|
||||||
|
export type CreatePlanRoleDeps = {
|
||||||
|
extract: LlmExtractorConfig;
|
||||||
|
nerveRoot: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function createPlanRole(
|
||||||
|
adapter: AgentFn,
|
||||||
|
{ extract, nerveRoot }: CreatePlanRoleDeps,
|
||||||
|
): Role<PlanMeta> {
|
||||||
|
return async (ctx: ThreadContext): Promise<RoleResult<PlanMeta>> => {
|
||||||
|
const messages = ctx.steps as unknown as WorkflowMessage[];
|
||||||
|
const cwd = resolveRepoCwd(messages);
|
||||||
|
if (cwd === null) {
|
||||||
|
return {
|
||||||
|
content: "plan cannot run: missing ---SOLVE_ISSUE_REPO--- or ---SOLVE_ISSUE_PARSE--- in thread",
|
||||||
|
meta: { ready: false },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const innerRole = createRole(
|
||||||
|
adapter,
|
||||||
|
async (innerCtx: ThreadContext) =>
|
||||||
|
buildPlanPrompt({
|
||||||
|
threadId: innerCtx.start.meta.threadId,
|
||||||
|
nerveRoot,
|
||||||
|
}),
|
||||||
|
planMetaSchema,
|
||||||
|
extract,
|
||||||
|
);
|
||||||
|
|
||||||
|
const innerCtx: ThreadContext = {
|
||||||
|
...ctx,
|
||||||
|
start: {
|
||||||
|
...ctx.start,
|
||||||
|
meta: { ...ctx.start.meta, workdir: cwd },
|
||||||
|
},
|
||||||
|
};
|
||||||
|
try {
|
||||||
|
return await innerRole(innerCtx);
|
||||||
|
} catch (e) {
|
||||||
|
const msg = e instanceof Error ? e.message : String(e);
|
||||||
|
return {
|
||||||
|
content: `plan failed: ${msg}`,
|
||||||
|
meta: { ready: false },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
73
workflows/solve-issue/roles/prepare.ts
Normal file
73
workflows/solve-issue/roles/prepare.ts
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
import type { AgentFn, Role, ThreadContext } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
function preparePrompt({ threadId }: { threadId: string }): string {
|
||||||
|
return `You are the **prepare** agent. You ensure the target repository is ready for work.
|
||||||
|
|
||||||
|
Read prior messages / thread for issue markers: \`nerve thread show ${threadId}\`
|
||||||
|
|
||||||
|
## Goal
|
||||||
|
|
||||||
|
Find **owner**, **repo**, and **host** from \`---SOLVE_ISSUE_PARSE---\` in the thread (from read-issue).
|
||||||
|
|
||||||
|
Check the **initial user prompt** (the trigger message) for a local repo path. The user may specify it like:
|
||||||
|
- \`--repo /path/to/repo\`
|
||||||
|
- \`repo: /path/to/repo\`
|
||||||
|
- or just mention an absolute path to the local clone
|
||||||
|
|
||||||
|
## Steps
|
||||||
|
|
||||||
|
### If a local path is provided in the trigger prompt:
|
||||||
|
1. Verify \`<path>/.git\` exists — if not, fail with \`ready: false\`
|
||||||
|
2. \`cd "<path>" && git fetch --all\`
|
||||||
|
3. Ensure working tree clean: if \`git status --porcelain\` is non-empty, \`git stash push -u -m "solve-issue stash"\`
|
||||||
|
4. Detect default branch (\`main\` or \`master\`) and \`git checkout <default> && git pull --ff-only\`
|
||||||
|
5. Use this path as REPOPATH
|
||||||
|
|
||||||
|
### If no local path is provided:
|
||||||
|
1. Let \`REPOPATH=$HOME/Code/<owner>/<repo>\` (expand \`$HOME\`)
|
||||||
|
2. \`mkdir -p "$HOME/Code/<owner>"\`
|
||||||
|
3. If \`REPOPATH/.git\` is missing: \`git clone https://<host>/<owner>/<repo>.git "$REPOPATH"\`
|
||||||
|
Else: \`cd "$REPOPATH" && git fetch --all && git pull --ff-only\`
|
||||||
|
4. Ensure working tree clean: if \`git status --porcelain\` is non-empty, \`git stash push -u -m "solve-issue stash"\`
|
||||||
|
5. Detect default branch and \`git checkout <default>\`
|
||||||
|
|
||||||
|
### Then (both paths):
|
||||||
|
6. Detect package manager: \`pnpm-lock.yaml\` → pnpm, \`yarn.lock\` → yarn, \`package-lock.json\` → npm; run install (\`pnpm install --no-frozen-lockfile\` / \`npm ci\` or \`npm install\` / \`yarn\`).
|
||||||
|
7. If \`package.json\` has a \`build\` script, run the build (\`pnpm build\`, etc.) and fix nothing — only verify baseline passes.
|
||||||
|
|
||||||
|
## Required marker block
|
||||||
|
|
||||||
|
Emit **exactly**:
|
||||||
|
\`\`\`
|
||||||
|
---SOLVE_ISSUE_REPO---
|
||||||
|
path: <absolute path to REPOPATH>
|
||||||
|
defaultBranch: <main or master>
|
||||||
|
packageManager: <pnpm|npm|yarn>
|
||||||
|
---
|
||||||
|
\`\`\`
|
||||||
|
|
||||||
|
End with:
|
||||||
|
\`\`\`json
|
||||||
|
{ "ready": true }
|
||||||
|
\`\`\`
|
||||||
|
or \`{ "ready": false }\` if the repo is invalid, or install/build baseline failed.
|
||||||
|
|
||||||
|
**ready=true** only when the repo exists at \`path\`, is clean, dependencies installed, and baseline build succeeded (or no build script).`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const prepareMetaSchema = z.object({
|
||||||
|
ready: z.boolean().describe("true if repo is ready and baseline build ok"),
|
||||||
|
});
|
||||||
|
export type PrepareMeta = z.infer<typeof prepareMetaSchema>;
|
||||||
|
|
||||||
|
export function createPrepareRole(adapter: AgentFn, extract: LlmExtractorConfig): Role<PrepareMeta> {
|
||||||
|
return createRole(
|
||||||
|
adapter,
|
||||||
|
async (ctx: ThreadContext) => preparePrompt({ threadId: ctx.start.meta.threadId }),
|
||||||
|
prepareMetaSchema,
|
||||||
|
extract,
|
||||||
|
);
|
||||||
|
}
|
||||||
110
workflows/solve-issue/roles/publish.ts
Normal file
110
workflows/solve-issue/roles/publish.ts
Normal file
@ -0,0 +1,110 @@
|
|||||||
|
import { mkdirSync, writeFileSync } from "node:fs";
|
||||||
|
import { join } from "node:path";
|
||||||
|
import type { AgentFn, Role, RoleResult, ThreadContext } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createRole, isDryRun } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
function buildPublishPrompt({ threadId, nerveRoot }: { threadId: string; nerveRoot: string }): string {
|
||||||
|
return `You are the **publish** agent (Hermes). Test has passed. Open a pull request for the current branch using the **tea** CLI.
|
||||||
|
|
||||||
|
## Context
|
||||||
|
|
||||||
|
- Read the full workflow thread: \`nerve thread show ${threadId}\`
|
||||||
|
- Nerve workspace conventions (for tone/consistency, optional): \`cat ${nerveRoot}/CONVENTIONS.md\`
|
||||||
|
|
||||||
|
## Repo and issue (from the thread)
|
||||||
|
|
||||||
|
Find \`---SOLVE_ISSUE_PARSE---\` and \`---SOLVE_ISSUE_REPO---\` in prior messages. You need:
|
||||||
|
- \`path\` — clone checkout directory (this is your working copy)
|
||||||
|
- \`host\`, \`owner\`, \`repo\`, \`number\` for the issue
|
||||||
|
- \`defaultBranch\` (for PR base) from SOLVE_ISSUE_REPO
|
||||||
|
|
||||||
|
**Issue link** for the Ref section: \`https://<host>/<owner>/<repo>/issues/<number>\`
|
||||||
|
|
||||||
|
## Steps (in order)
|
||||||
|
|
||||||
|
1. \`cd\` to the **repo \`path\`**. Run \`git rev-parse --abbrev-ref HEAD\` to get the current branch name. The **committer** step should already have pushed this branch; run \`git push -u origin <that-branch>\` only if the branch is not yet on the remote.
|
||||||
|
2. Choose a **PR title** that reflects the real change (not a generic \`fix: issue #N\`): derive it from the issue title, plan, and thread summary (keep it concise; Conventional Commits style is fine, e.g. \`fix(auth): handle session expiry\`).
|
||||||
|
3. Write a **PR body** in Markdown with exactly these sections, in this order, each with a \`##\` heading (fill with concise content based on the thread: plan, implement, review, test):
|
||||||
|
- **## What** — one short paragraph: what this PR does
|
||||||
|
- **## Why** — one short paragraph: motivation / issue
|
||||||
|
- **## Changes** — bullet list of notable changes
|
||||||
|
- **## Ref** — include one line \`Fixes #<number>\` (same \`number\` from SOLVE_ISSUE_PARSE; closes/links the issue where supported) **and** the issue URL \`https://<host>/<owner>/<repo>/issues/<number>\`
|
||||||
|
4. Create the PR with **tea** (not curl/fetch to Gitea):
|
||||||
|
- \`tea pr create --repo <owner>/<repo> --base <defaultBranch> --head <branch> --title "<your meaningful title>" --body <your markdown body>\`
|
||||||
|
- You may use a heredoc or a temp file for \`--body\` if the shell requires it; keep the four sections in the body.
|
||||||
|
5. Confirm the PR was created (tea prints a URL or PR number in typical setups).
|
||||||
|
|
||||||
|
**success=true** only if both **push** and **tea** PR creation succeed. If any step fails, set **success=false** and say why.
|
||||||
|
|
||||||
|
End your reply with a JSON line:
|
||||||
|
\`\`\`json
|
||||||
|
{ "success": true }
|
||||||
|
\`\`\`
|
||||||
|
or
|
||||||
|
\`\`\`json
|
||||||
|
{ "success": false }
|
||||||
|
\`\`\``;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const publishMetaSchema = z.object({
|
||||||
|
success: z.boolean().describe("true if git push and tea pr create both succeeded"),
|
||||||
|
});
|
||||||
|
export type PublishMeta = z.infer<typeof publishMetaSchema>;
|
||||||
|
|
||||||
|
export type CreatePublishRoleDeps = {
|
||||||
|
extract: LlmExtractorConfig;
|
||||||
|
nerveRoot: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
function logPath(nerveRoot: string): string {
|
||||||
|
return join(nerveRoot, "logs", `solve-issue-publish-${Date.now()}.log`);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createPublishRole(
|
||||||
|
adapter: AgentFn,
|
||||||
|
{ extract, nerveRoot }: CreatePublishRoleDeps,
|
||||||
|
): Role<PublishMeta> {
|
||||||
|
const innerRole = createRole(
|
||||||
|
adapter,
|
||||||
|
async (ctx: ThreadContext) =>
|
||||||
|
buildPublishPrompt({ threadId: ctx.start.meta.threadId, nerveRoot }),
|
||||||
|
publishMetaSchema,
|
||||||
|
extract,
|
||||||
|
);
|
||||||
|
|
||||||
|
return async (ctx: ThreadContext): Promise<RoleResult<PublishMeta>> => {
|
||||||
|
const file = logPath(nerveRoot);
|
||||||
|
mkdirSync(join(file, ".."), { recursive: true });
|
||||||
|
|
||||||
|
if (isDryRun(ctx.start)) {
|
||||||
|
const msg = "[dry-run] publish skipped (no git push / PR)";
|
||||||
|
writeFileSync(file, `${msg}\n`, "utf-8");
|
||||||
|
return {
|
||||||
|
content: `[dry-run] publish skipped — log: ${file}`,
|
||||||
|
meta: { success: true },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const innerCtx: ThreadContext = {
|
||||||
|
...ctx,
|
||||||
|
start: {
|
||||||
|
...ctx.start,
|
||||||
|
meta: { ...ctx.start.meta, workdir: nerveRoot },
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
return await innerRole(innerCtx);
|
||||||
|
} catch (e) {
|
||||||
|
const msg = e instanceof Error ? e.message : String(e);
|
||||||
|
const body = `publish failed: ${msg}\n`;
|
||||||
|
writeFileSync(file, body, "utf-8");
|
||||||
|
return {
|
||||||
|
content: `publish failed: ${msg}\nLog: ${file}`,
|
||||||
|
meta: { success: false },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
53
workflows/solve-issue/roles/read-issue.ts
Normal file
53
workflows/solve-issue/roles/read-issue.ts
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
import type { AgentFn, Role, ThreadContext } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
function readIssuePrompt({ threadId }: { threadId: string }): string {
|
||||||
|
return `You are the **read-issue** agent. You fetch Gitea issue content via the \`tea\` CLI.
|
||||||
|
|
||||||
|
Read the workflow thread start prompt for the issue URL (same run): \`nerve thread show ${threadId}\`
|
||||||
|
|
||||||
|
## Steps
|
||||||
|
|
||||||
|
1. From the **initial user prompt** (issue URL), extract **host**, **owner**, **repo**, and **issue number**. Supported shape:
|
||||||
|
\`https://<host>/<owner>/<repo>/issues/<number>\`
|
||||||
|
|
||||||
|
2. Run:
|
||||||
|
\`tea issue show <number> --repo <owner>/<repo> --comments\`
|
||||||
|
(Add \`--json\` if helpful for parsing.)
|
||||||
|
|
||||||
|
3. In your reply, include **structured issue text**: title, body, labels, and each comment (author + body + time).
|
||||||
|
|
||||||
|
4. You **must** emit this marker block **exactly** (fill in real values):
|
||||||
|
\`\`\`
|
||||||
|
---SOLVE_ISSUE_PARSE---
|
||||||
|
host: <host>
|
||||||
|
owner: <owner>
|
||||||
|
repo: <repo>
|
||||||
|
number: <number>
|
||||||
|
---
|
||||||
|
\`\`\`
|
||||||
|
|
||||||
|
5. End with JSON meta (verbatim block):
|
||||||
|
\`\`\`json
|
||||||
|
{ "ready": true }
|
||||||
|
\`\`\`
|
||||||
|
Use \`{ "ready": false }\` if you could not fetch or parse the issue.
|
||||||
|
|
||||||
|
**ready=true** only if the issue was fetched successfully and the marker block is correct.`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const readIssueMetaSchema = z.object({
|
||||||
|
ready: z.boolean().describe("true if issue content was fetched and markers are present"),
|
||||||
|
});
|
||||||
|
export type ReadIssueMeta = z.infer<typeof readIssueMetaSchema>;
|
||||||
|
|
||||||
|
export function createReadIssueRole(adapter: AgentFn, extract: LlmExtractorConfig): Role<ReadIssueMeta> {
|
||||||
|
return createRole(
|
||||||
|
adapter,
|
||||||
|
async (ctx: ThreadContext) => readIssuePrompt({ threadId: ctx.start.meta.threadId }),
|
||||||
|
readIssueMetaSchema,
|
||||||
|
extract,
|
||||||
|
);
|
||||||
|
}
|
||||||
59
workflows/solve-issue/roles/review.ts
Normal file
59
workflows/solve-issue/roles/review.ts
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
import type { AgentFn, Role, ThreadContext } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
function reviewPrompt({ threadId, nerveRoot }: { threadId: string; nerveRoot: string }): string {
|
||||||
|
return `You are a **code reviewer** (Hermes). You run after implement and before test.
|
||||||
|
|
||||||
|
Read Nerve workspace conventions: \`cat ${nerveRoot}/CONVENTIONS.md\`
|
||||||
|
|
||||||
|
Read workflow context: \`nerve thread show ${threadId}\`
|
||||||
|
|
||||||
|
Find **repo path** from \`---SOLVE_ISSUE_REPO--- path:\` in the thread (prepare step). \`cd\` there before any git commands.
|
||||||
|
|
||||||
|
## Static analysis
|
||||||
|
|
||||||
|
Run:
|
||||||
|
|
||||||
|
1. \`cd <repo-path> && git diff --stat\`
|
||||||
|
2. \`cd <repo-path> && git diff\`
|
||||||
|
3. \`cd <repo-path> && git status --short\`
|
||||||
|
|
||||||
|
## Checklist
|
||||||
|
|
||||||
|
Reject (**approved: false**) if you find:
|
||||||
|
|
||||||
|
- Garbage files, secrets/credentials, unrelated changes
|
||||||
|
- Violations of CONVENTIONS.md (e.g. \`interface\` vs \`type\`, dynamic \`import()\`)
|
||||||
|
|
||||||
|
Approve (**approved: true**) if the diff is clean and focused.
|
||||||
|
|
||||||
|
End with:
|
||||||
|
\`\`\`json
|
||||||
|
{ "approved": true }
|
||||||
|
\`\`\`
|
||||||
|
or
|
||||||
|
\`\`\`json
|
||||||
|
{ "approved": false }
|
||||||
|
\`\`\``;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const reviewMetaSchema = z.object({
|
||||||
|
approved: z.boolean().describe("true if diff is clean and ready for tests"),
|
||||||
|
});
|
||||||
|
export type ReviewMeta = z.infer<typeof reviewMetaSchema>;
|
||||||
|
|
||||||
|
export function createReviewRole(
|
||||||
|
adapter: AgentFn,
|
||||||
|
extract: LlmExtractorConfig,
|
||||||
|
nerveRoot: string,
|
||||||
|
): Role<ReviewMeta> {
|
||||||
|
return createRole(
|
||||||
|
adapter,
|
||||||
|
async (ctx: ThreadContext) =>
|
||||||
|
reviewPrompt({ threadId: ctx.start.meta.threadId, nerveRoot }),
|
||||||
|
reviewMetaSchema,
|
||||||
|
extract,
|
||||||
|
);
|
||||||
|
}
|
||||||
40
workflows/solve-issue/roles/test.ts
Normal file
40
workflows/solve-issue/roles/test.ts
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
import type { AgentFn, Role, ThreadContext } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
function testPrompt({ threadId }: { threadId: string }): string {
|
||||||
|
return `You are the **test** agent (Hermes). You execute automated tests for the change.
|
||||||
|
|
||||||
|
Read workflow context: \`nerve thread show ${threadId}\`
|
||||||
|
|
||||||
|
Find **repo path** from \`---SOLVE_ISSUE_REPO--- path:\` in the thread.
|
||||||
|
|
||||||
|
From the **plan** step output, locate **Test commands** (explicit shell commands). Run each command with cwd = repo path, in order.
|
||||||
|
|
||||||
|
If the plan lists **no** test commands, try **pnpm test**, then **npm test** if pnpm is unavailable; if neither applies, explain skip.
|
||||||
|
|
||||||
|
Collect stdout/stderr snippets on failure.
|
||||||
|
|
||||||
|
End with JSON only:
|
||||||
|
\`\`\`json
|
||||||
|
{ "passed": true }
|
||||||
|
\`\`\`
|
||||||
|
or \`{ "passed": false }\`
|
||||||
|
|
||||||
|
**passed=true** only if every executed command exited 0 (or skip was justified with no failing command).`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const testMetaSchema = z.object({
|
||||||
|
passed: z.boolean().describe("true if all test commands passed"),
|
||||||
|
});
|
||||||
|
export type TestMeta = z.infer<typeof testMetaSchema>;
|
||||||
|
|
||||||
|
export function createTestRole(adapter: AgentFn, extract: LlmExtractorConfig): Role<TestMeta> {
|
||||||
|
return createRole(
|
||||||
|
adapter,
|
||||||
|
async (ctx: ThreadContext) => testPrompt({ threadId: ctx.start.meta.threadId }),
|
||||||
|
testMetaSchema,
|
||||||
|
extract,
|
||||||
|
);
|
||||||
|
}
|
||||||
@ -1,31 +0,0 @@
|
|||||||
import { join } from "node:path";
|
|
||||||
import type { WorkflowDefinition } from "@uncaged/nerve-core";
|
|
||||||
import type { LlmProvider } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { buildPlannerRole } from "./roles/planner/index.js";
|
|
||||||
import { buildCoderRole } from "./roles/coder/index.js";
|
|
||||||
import { buildTesterRole } from "./roles/tester/index.js";
|
|
||||||
import { buildCommitterRole } from "./roles/committer/index.js";
|
|
||||||
import { moderator } from "./moderator.js";
|
|
||||||
import type { WorkflowMeta } from "./moderator.js";
|
|
||||||
|
|
||||||
export type BuildWorkflowGeneratorDeps = {
|
|
||||||
provider: LlmProvider;
|
|
||||||
nerveRoot: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export function buildWorkflowGenerator({
|
|
||||||
provider,
|
|
||||||
nerveRoot,
|
|
||||||
}: BuildWorkflowGeneratorDeps): WorkflowDefinition<WorkflowMeta> {
|
|
||||||
const workflowsDir = join(nerveRoot, "workflows");
|
|
||||||
return {
|
|
||||||
name: "workflow-generator",
|
|
||||||
roles: {
|
|
||||||
planner: buildPlannerRole({ provider, nerveRoot, workflowsDir }),
|
|
||||||
coder: buildCoderRole({ nerveRoot, workflowsDir }),
|
|
||||||
tester: buildTesterRole({ nerveRoot }),
|
|
||||||
committer: buildCommitterRole({ nerveRoot }),
|
|
||||||
},
|
|
||||||
moderator,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
846
workflows/workflow-generator/dist/index.js
vendored
846
workflows/workflow-generator/dist/index.js
vendored
@ -1,846 +0,0 @@
|
|||||||
// index.ts
|
|
||||||
import { join as join4 } from "node:path";
|
|
||||||
|
|
||||||
// build.ts
|
|
||||||
import { join as join3 } from "node:path";
|
|
||||||
|
|
||||||
// roles/planner/index.ts
|
|
||||||
import { existsSync, readFileSync } from "node:fs";
|
|
||||||
import { join } from "node:path";
|
|
||||||
import { isDryRun, llmExtract, nerveAgentContext, readNerveYaml } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
// roles/planner/prompt.ts
|
|
||||||
function plannerPrompt({
|
|
||||||
nerveAgentContext: nerveAgentContext2,
|
|
||||||
userPrompt,
|
|
||||||
nerveRoot,
|
|
||||||
workflowsDir,
|
|
||||||
senseGeneratorReference,
|
|
||||||
nerveYaml
|
|
||||||
}) {
|
|
||||||
const content = `Design a Nerve workflow plan from this request.
|
|
||||||
|
|
||||||
${nerveAgentContext2}
|
|
||||||
|
|
||||||
User request:
|
|
||||||
${userPrompt}
|
|
||||||
|
|
||||||
Target root: ${nerveRoot}
|
|
||||||
Workflow dir root: ${workflowsDir}
|
|
||||||
|
|
||||||
Reference structure:
|
|
||||||
\`\`\`ts
|
|
||||||
${senseGeneratorReference.slice(0, 18e3)}
|
|
||||||
\`\`\`
|
|
||||||
|
|
||||||
Current nerve.yaml:
|
|
||||||
\`\`\`yaml
|
|
||||||
${nerveYaml}
|
|
||||||
\`\`\`
|
|
||||||
|
|
||||||
Produce a complete markdown plan that includes:
|
|
||||||
- workflow name
|
|
||||||
- roles list
|
|
||||||
- flow/transitions
|
|
||||||
- validation loops design
|
|
||||||
- external deps
|
|
||||||
- data flow`;
|
|
||||||
return [{ role: "user", content }];
|
|
||||||
}
|
|
||||||
|
|
||||||
// roles/planner/index.ts
|
|
||||||
var roleSchema = z.object({
|
|
||||||
name: z.string().default(""),
|
|
||||||
goal: z.string().default(""),
|
|
||||||
io: z.string().default("")
|
|
||||||
}).default({ name: "", goal: "", io: "" });
|
|
||||||
var plannerMetaSchema = z.object({
|
|
||||||
userPrompt: z.string().default(""),
|
|
||||||
workflowName: z.string().default("").describe("kebab-case workflow name under workflows/, e.g. issue-fixer"),
|
|
||||||
roles: z.array(roleSchema).default([]),
|
|
||||||
flowTransitions: z.preprocess((v) => Array.isArray(v) ? v.join("\n") : v, z.string().default("")),
|
|
||||||
validationLoopsDesign: z.preprocess(
|
|
||||||
(v) => Array.isArray(v) ? v.join("\n") : v,
|
|
||||||
z.string().default("")
|
|
||||||
),
|
|
||||||
externalDeps: z.preprocess(
|
|
||||||
(v) => Array.isArray(v) ? v.join(", ") : v,
|
|
||||||
z.string().default("")
|
|
||||||
),
|
|
||||||
dataFlow: z.preprocess((v) => Array.isArray(v) ? v.join("\n") : v, z.string().default("")),
|
|
||||||
planMarkdown: z.preprocess(
|
|
||||||
(v) => Array.isArray(v) ? v.join("\n") : v,
|
|
||||||
z.string().default("")
|
|
||||||
)
|
|
||||||
});
|
|
||||||
function getNerveYaml(nerveRoot) {
|
|
||||||
const result = readNerveYaml({ nerveRoot });
|
|
||||||
return result.ok ? result.value : "# nerve.yaml unavailable";
|
|
||||||
}
|
|
||||||
function getSenseGeneratorReference(workflowsDir) {
|
|
||||||
const p = join(workflowsDir, "sense-generator", "index.ts");
|
|
||||||
if (!existsSync(p)) {
|
|
||||||
return "(missing workflows/sense-generator/index.ts)";
|
|
||||||
}
|
|
||||||
return readFileSync(p, "utf-8");
|
|
||||||
}
|
|
||||||
function buildPlannerRole({
|
|
||||||
provider,
|
|
||||||
nerveRoot,
|
|
||||||
workflowsDir
|
|
||||||
}) {
|
|
||||||
return async (start, _messages) => {
|
|
||||||
const dry = isDryRun(start);
|
|
||||||
const userPrompt = start.content;
|
|
||||||
const messages = plannerPrompt({
|
|
||||||
nerveAgentContext,
|
|
||||||
userPrompt,
|
|
||||||
nerveRoot,
|
|
||||||
workflowsDir,
|
|
||||||
senseGeneratorReference: getSenseGeneratorReference(workflowsDir),
|
|
||||||
nerveYaml: getNerveYaml(nerveRoot)
|
|
||||||
});
|
|
||||||
const extracted = await llmExtract({
|
|
||||||
text: messages.map((m) => m.content).join("\n"),
|
|
||||||
schema: plannerMetaSchema,
|
|
||||||
provider,
|
|
||||||
dryRun: dry
|
|
||||||
});
|
|
||||||
const emptyMeta = {
|
|
||||||
userPrompt,
|
|
||||||
workflowName: "",
|
|
||||||
roles: [],
|
|
||||||
flowTransitions: "",
|
|
||||||
validationLoopsDesign: "",
|
|
||||||
externalDeps: "",
|
|
||||||
dataFlow: "",
|
|
||||||
planMarkdown: ""
|
|
||||||
};
|
|
||||||
if (!extracted.ok) {
|
|
||||||
return {
|
|
||||||
content: `[planner] llmExtract failed: ${JSON.stringify(extracted.error)}`,
|
|
||||||
meta: emptyMeta
|
|
||||||
};
|
|
||||||
}
|
|
||||||
const value = extracted.value;
|
|
||||||
const planMarkdown = value.planMarkdown.length > 0 ? value.planMarkdown : [
|
|
||||||
`# Workflow Plan`,
|
|
||||||
`- workflowName: ${value.workflowName}`,
|
|
||||||
``,
|
|
||||||
`## Roles`,
|
|
||||||
...value.roles.map((r) => `- ${r.name}: ${r.goal} (${r.io})`),
|
|
||||||
``,
|
|
||||||
`## Flow Transitions`,
|
|
||||||
value.flowTransitions,
|
|
||||||
``,
|
|
||||||
`## Validation Loops`,
|
|
||||||
value.validationLoopsDesign,
|
|
||||||
``,
|
|
||||||
`## External Dependencies`,
|
|
||||||
value.externalDeps,
|
|
||||||
``,
|
|
||||||
`## Data Flow`,
|
|
||||||
value.dataFlow
|
|
||||||
].join("\n");
|
|
||||||
return {
|
|
||||||
content: planMarkdown,
|
|
||||||
meta: {
|
|
||||||
userPrompt,
|
|
||||||
workflowName: value.workflowName,
|
|
||||||
roles: value.roles,
|
|
||||||
flowTransitions: value.flowTransitions,
|
|
||||||
validationLoopsDesign: value.validationLoopsDesign,
|
|
||||||
externalDeps: value.externalDeps,
|
|
||||||
dataFlow: value.dataFlow,
|
|
||||||
planMarkdown
|
|
||||||
}
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// roles/coder/index.ts
|
|
||||||
import { existsSync as existsSync2, readFileSync as readFileSync2 } from "node:fs";
|
|
||||||
import { join as join2 } from "node:path";
|
|
||||||
import { cursorAgent, isDryRun as isDryRun2, spawnSafe } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { z as z2 } from "zod";
|
|
||||||
|
|
||||||
// roles/coder/prompt.ts
|
|
||||||
function coderPrompt({
|
|
||||||
workflowsDir,
|
|
||||||
wfName,
|
|
||||||
planMarkdown,
|
|
||||||
plannerStructured,
|
|
||||||
feedback,
|
|
||||||
nerveRoot
|
|
||||||
}) {
|
|
||||||
return `Implement a Nerve workflow package under ${workflowsDir}/${wfName}/.
|
|
||||||
|
|
||||||
Planner output:
|
|
||||||
${planMarkdown}
|
|
||||||
|
|
||||||
Structured planner fields:
|
|
||||||
${JSON.stringify(plannerStructured, null, 2)}
|
|
||||||
${feedback}
|
|
||||||
|
|
||||||
Required files:
|
|
||||||
1) ${workflowsDir}/${wfName}/index.ts
|
|
||||||
2) ${workflowsDir}/${wfName}/package.json
|
|
||||||
3) ${workflowsDir}/${wfName}/tsconfig.json
|
|
||||||
4) update ${nerveRoot}/nerve.yaml with workflows.${wfName}
|
|
||||||
|
|
||||||
Rules:
|
|
||||||
- keep WorkflowDefinition<WorkflowMeta> pattern
|
|
||||||
- no dynamic import()
|
|
||||||
- use types (not interfaces)
|
|
||||||
- include retry-aware moderator routing
|
|
||||||
- write compile-ready TypeScript`;
|
|
||||||
}
|
|
||||||
|
|
||||||
// roles/coder/index.ts
|
|
||||||
var coderMetaSchema = z2.object({
|
|
||||||
workflowName: z2.string().default(""),
|
|
||||||
attempt: z2.number().default(1),
|
|
||||||
files: z2.object({
|
|
||||||
indexTs: z2.boolean().default(false),
|
|
||||||
packageJson: z2.boolean().default(false),
|
|
||||||
tsconfigJson: z2.boolean().default(false)
|
|
||||||
}).default({ indexTs: false, packageJson: false, tsconfigJson: false }),
|
|
||||||
lintPassed: z2.boolean().default(false),
|
|
||||||
buildPassed: z2.boolean().default(false),
|
|
||||||
lintLog: z2.string().default(""),
|
|
||||||
buildLog: z2.string().default(""),
|
|
||||||
cursorOutput: z2.string().default(""),
|
|
||||||
reason: z2.string().nullable().default(null)
|
|
||||||
});
|
|
||||||
function formatSpawnFailure(error) {
|
|
||||||
if (error.kind === "spawn_failed") {
|
|
||||||
return error.message;
|
|
||||||
}
|
|
||||||
if (error.kind === "timeout") {
|
|
||||||
return `timeout stdout=${error.stdout.slice(0, 300)} stderr=${error.stderr.slice(0, 300)}`;
|
|
||||||
}
|
|
||||||
return `exit ${error.exitCode} stderr=${error.stderr.slice(0, 500)}`;
|
|
||||||
}
|
|
||||||
function scanGeneratedCodePitfalls(source) {
|
|
||||||
const issues = [];
|
|
||||||
if (/\bawait\s+import\s*\(/.test(source)) {
|
|
||||||
issues.push("Found await import() in generated workflow code");
|
|
||||||
}
|
|
||||||
if (/\bimport\s*\(\s*["'`]/.test(source) && !source.includes("Dynamic import required")) {
|
|
||||||
issues.push("Found undocumented dynamic import() call");
|
|
||||||
}
|
|
||||||
if (!/\bexport\s+default\s+/.test(source)) {
|
|
||||||
issues.push("Missing default export of WorkflowDefinition");
|
|
||||||
}
|
|
||||||
return issues;
|
|
||||||
}
|
|
||||||
async function runLintAndBuild(workflowDir, dry) {
|
|
||||||
const lintRun = await spawnSafe("pnpm", ["run", "check"], {
|
|
||||||
cwd: workflowDir,
|
|
||||||
env: null,
|
|
||||||
timeoutMs: 3e5,
|
|
||||||
dryRun: dry
|
|
||||||
});
|
|
||||||
if (!lintRun.ok) {
|
|
||||||
return {
|
|
||||||
lintPassed: false,
|
|
||||||
buildPassed: false,
|
|
||||||
lintLog: formatSpawnFailure(lintRun.error),
|
|
||||||
buildLog: "",
|
|
||||||
reason: `lint failed: ${formatSpawnFailure(lintRun.error)}`
|
|
||||||
};
|
|
||||||
}
|
|
||||||
const lintLog = lintRun.value.stderr.trim() || lintRun.value.stdout.trim() || "(no output)";
|
|
||||||
const tscRun = await spawnSafe("npx", ["tsc", "--noEmit"], {
|
|
||||||
cwd: workflowDir,
|
|
||||||
env: null,
|
|
||||||
timeoutMs: 3e5,
|
|
||||||
dryRun: dry
|
|
||||||
});
|
|
||||||
if (!tscRun.ok) {
|
|
||||||
return {
|
|
||||||
lintPassed: true,
|
|
||||||
buildPassed: false,
|
|
||||||
lintLog,
|
|
||||||
buildLog: formatSpawnFailure(tscRun.error),
|
|
||||||
reason: `build failed: ${formatSpawnFailure(tscRun.error)}`
|
|
||||||
};
|
|
||||||
}
|
|
||||||
const buildLog = tscRun.value.stderr.trim() || tscRun.value.stdout.trim() || "(no output)";
|
|
||||||
return { lintPassed: true, buildPassed: true, lintLog, buildLog, reason: null };
|
|
||||||
}
|
|
||||||
function lastMetaForRole(messages, role) {
|
|
||||||
for (let i = messages.length - 1; i >= 0; i--) {
|
|
||||||
if (messages[i].role === role) {
|
|
||||||
return messages[i].meta;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
function buildCoderRole({ nerveRoot, workflowsDir }) {
|
|
||||||
return async (start, messages) => {
|
|
||||||
const dry = isDryRun2(start);
|
|
||||||
const plannerMeta = lastMetaForRole(messages, "planner");
|
|
||||||
const previousTester = lastMetaForRole(messages, "tester");
|
|
||||||
const attempt = messages.filter((m) => m.role === "coder").length + 1;
|
|
||||||
if (plannerMeta === null || plannerMeta.workflowName.trim().length === 0) {
|
|
||||||
return {
|
|
||||||
content: "coder cannot continue: missing planner output",
|
|
||||||
meta: {
|
|
||||||
workflowName: "",
|
|
||||||
attempt,
|
|
||||||
files: { indexTs: false, packageJson: false, tsconfigJson: false },
|
|
||||||
lintPassed: false,
|
|
||||||
buildPassed: false,
|
|
||||||
lintLog: "",
|
|
||||||
buildLog: "",
|
|
||||||
cursorOutput: "",
|
|
||||||
reason: "missing planner output"
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
const wfName = plannerMeta.workflowName.trim();
|
|
||||||
const feedback = previousTester !== null && previousTester.passed === false ? `
|
|
||||||
|
|
||||||
Previous tester failure to fix:
|
|
||||||
${previousTester.reason}
|
|
||||||
${previousTester.dryRunLog}
|
|
||||||
` : "";
|
|
||||||
const prompt = coderPrompt({
|
|
||||||
workflowsDir,
|
|
||||||
wfName,
|
|
||||||
planMarkdown: plannerMeta.planMarkdown,
|
|
||||||
plannerStructured: {
|
|
||||||
workflowName: plannerMeta.workflowName,
|
|
||||||
roles: plannerMeta.roles,
|
|
||||||
flowTransitions: plannerMeta.flowTransitions,
|
|
||||||
validationLoopsDesign: plannerMeta.validationLoopsDesign,
|
|
||||||
externalDeps: plannerMeta.externalDeps,
|
|
||||||
dataFlow: plannerMeta.dataFlow
|
|
||||||
},
|
|
||||||
feedback,
|
|
||||||
nerveRoot
|
|
||||||
});
|
|
||||||
const agentRun = await cursorAgent({
|
|
||||||
prompt,
|
|
||||||
mode: "default",
|
|
||||||
cwd: nerveRoot,
|
|
||||||
env: null,
|
|
||||||
timeoutMs: null,
|
|
||||||
dryRun: dry
|
|
||||||
});
|
|
||||||
const workflowDir = join2(workflowsDir, wfName);
|
|
||||||
const files = {
|
|
||||||
indexTs: existsSync2(join2(workflowDir, "index.ts")),
|
|
||||||
packageJson: existsSync2(join2(workflowDir, "package.json")),
|
|
||||||
tsconfigJson: existsSync2(join2(workflowDir, "tsconfig.json"))
|
|
||||||
};
|
|
||||||
const missing = [
|
|
||||||
files.indexTs ? null : "index.ts",
|
|
||||||
files.packageJson ? null : "package.json",
|
|
||||||
files.tsconfigJson ? null : "tsconfig.json"
|
|
||||||
].filter((x) => x !== null);
|
|
||||||
if (!agentRun.ok) {
|
|
||||||
return {
|
|
||||||
content: `coder failed: ${formatSpawnFailure(agentRun.error)}`,
|
|
||||||
meta: {
|
|
||||||
workflowName: wfName,
|
|
||||||
attempt,
|
|
||||||
files,
|
|
||||||
lintPassed: false,
|
|
||||||
buildPassed: false,
|
|
||||||
lintLog: "",
|
|
||||||
buildLog: "",
|
|
||||||
cursorOutput: "",
|
|
||||||
reason: formatSpawnFailure(agentRun.error)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
if (missing.length > 0) {
|
|
||||||
return {
|
|
||||||
content: `coder failed: missing required files (${missing.join(", ")})`,
|
|
||||||
meta: {
|
|
||||||
workflowName: wfName,
|
|
||||||
attempt,
|
|
||||||
files,
|
|
||||||
lintPassed: false,
|
|
||||||
buildPassed: false,
|
|
||||||
lintLog: "",
|
|
||||||
buildLog: "",
|
|
||||||
cursorOutput: agentRun.value,
|
|
||||||
reason: `missing files: ${missing.join(", ")}`
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
const source = readFileSync2(join2(workflowDir, "index.ts"), "utf-8");
|
|
||||||
const pitfalls = scanGeneratedCodePitfalls(source);
|
|
||||||
if (pitfalls.length > 0) {
|
|
||||||
return {
|
|
||||||
content: `coder static check failed:
|
|
||||||
${pitfalls.join("\n")}`,
|
|
||||||
meta: {
|
|
||||||
workflowName: wfName,
|
|
||||||
attempt,
|
|
||||||
files,
|
|
||||||
lintPassed: false,
|
|
||||||
buildPassed: false,
|
|
||||||
lintLog: pitfalls.join("\n"),
|
|
||||||
buildLog: "",
|
|
||||||
cursorOutput: agentRun.value,
|
|
||||||
reason: pitfalls.join("; ")
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
const check = await runLintAndBuild(workflowDir, dry);
|
|
||||||
const passed = check.lintPassed && check.buildPassed;
|
|
||||||
return {
|
|
||||||
content: passed ? `coder PASS: lint+build ok
|
|
||||||
|
|
||||||
${check.lintLog}
|
|
||||||
|
|
||||||
${check.buildLog}` : `coder FAIL: ${check.reason ?? "unknown error"}`,
|
|
||||||
meta: {
|
|
||||||
workflowName: wfName,
|
|
||||||
attempt,
|
|
||||||
files,
|
|
||||||
lintPassed: check.lintPassed,
|
|
||||||
buildPassed: check.buildPassed,
|
|
||||||
lintLog: check.lintLog,
|
|
||||||
buildLog: check.buildLog,
|
|
||||||
cursorOutput: agentRun.value,
|
|
||||||
reason: check.reason
|
|
||||||
}
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// roles/tester/index.ts
|
|
||||||
import { cursorAgent as cursorAgent2, isDryRun as isDryRun3 } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { z as z3 } from "zod";
|
|
||||||
|
|
||||||
// roles/tester/prompt.ts
|
|
||||||
function testerPrompt({
|
|
||||||
workflowName,
|
|
||||||
plannerSpec,
|
|
||||||
coderOutput,
|
|
||||||
nerveRoot: _nerveRoot
|
|
||||||
}) {
|
|
||||||
return `You are testing a generated Nerve workflow by doing a dry-run review.
|
|
||||||
|
|
||||||
Workflow: ${workflowName}
|
|
||||||
|
|
||||||
Planner specification:
|
|
||||||
${JSON.stringify(plannerSpec, null, 2)}
|
|
||||||
|
|
||||||
Coder output summary:
|
|
||||||
${coderOutput.slice(0, 6e3)}
|
|
||||||
|
|
||||||
Required checks:
|
|
||||||
1) Verify role transitions are coherent and terminates to END.
|
|
||||||
2) Verify generated workflow adheres to planner intent.
|
|
||||||
3) Verify retry loops are explicit for recoverable failures.
|
|
||||||
4) Verify no obvious runtime-breaking issue in generated index.ts.
|
|
||||||
|
|
||||||
Return exactly:
|
|
||||||
PASS|<reason>|<compact markdown log>
|
|
||||||
or
|
|
||||||
FAIL|<reason>|<compact markdown log>`;
|
|
||||||
}
|
|
||||||
|
|
||||||
// roles/tester/index.ts
|
|
||||||
var testerMetaSchema = z3.object({
|
|
||||||
workflowName: z3.string().default(""),
|
|
||||||
attempt: z3.number().default(1),
|
|
||||||
passed: z3.boolean().default(false),
|
|
||||||
dryRunLog: z3.string().default(""),
|
|
||||||
reason: z3.string().default("")
|
|
||||||
});
|
|
||||||
function formatSpawnFailure2(error) {
|
|
||||||
if (error.kind === "spawn_failed") {
|
|
||||||
return error.message;
|
|
||||||
}
|
|
||||||
if (error.kind === "timeout") {
|
|
||||||
return `timeout stdout=${error.stdout.slice(0, 300)} stderr=${error.stderr.slice(0, 300)}`;
|
|
||||||
}
|
|
||||||
return `exit ${error.exitCode} stderr=${error.stderr.slice(0, 500)}`;
|
|
||||||
}
|
|
||||||
function lastMetaForRole2(messages, role) {
|
|
||||||
for (let i = messages.length - 1; i >= 0; i--) {
|
|
||||||
if (messages[i].role === role) {
|
|
||||||
return messages[i].meta;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
function buildTesterRole({ nerveRoot }) {
|
|
||||||
return async (start, messages) => {
|
|
||||||
const dry = isDryRun3(start);
|
|
||||||
const plannerMeta = lastMetaForRole2(messages, "planner");
|
|
||||||
const coderMeta = lastMetaForRole2(messages, "coder");
|
|
||||||
const attempt = messages.filter((m) => m.role === "tester").length + 1;
|
|
||||||
if (plannerMeta === null || coderMeta === null) {
|
|
||||||
return {
|
|
||||||
content: "tester cannot continue: missing planner/coder output",
|
|
||||||
meta: {
|
|
||||||
workflowName: "",
|
|
||||||
attempt,
|
|
||||||
passed: false,
|
|
||||||
dryRunLog: "",
|
|
||||||
reason: "missing planner/coder output"
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
if (!coderMeta.lintPassed || !coderMeta.buildPassed) {
|
|
||||||
return {
|
|
||||||
content: "tester blocked: coder has not passed lint+build",
|
|
||||||
meta: {
|
|
||||||
workflowName: coderMeta.workflowName,
|
|
||||||
attempt,
|
|
||||||
passed: false,
|
|
||||||
dryRunLog: `${coderMeta.lintLog}
|
|
||||||
|
|
||||||
${coderMeta.buildLog}`,
|
|
||||||
reason: "coder did not pass lint+build"
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
if (dry) {
|
|
||||||
return {
|
|
||||||
content: "PASS \u2014 dry-run mode",
|
|
||||||
meta: {
|
|
||||||
workflowName: coderMeta.workflowName,
|
|
||||||
attempt,
|
|
||||||
passed: true,
|
|
||||||
dryRunLog: "[dry-run] tester skipped external checks",
|
|
||||||
reason: "dry-run mode"
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
const prompt = testerPrompt({
|
|
||||||
workflowName: coderMeta.workflowName,
|
|
||||||
plannerSpec: {
|
|
||||||
roles: plannerMeta.roles,
|
|
||||||
flowTransitions: plannerMeta.flowTransitions,
|
|
||||||
validationLoopsDesign: plannerMeta.validationLoopsDesign,
|
|
||||||
externalDeps: plannerMeta.externalDeps,
|
|
||||||
dataFlow: plannerMeta.dataFlow
|
|
||||||
},
|
|
||||||
coderOutput: coderMeta.cursorOutput,
|
|
||||||
nerveRoot
|
|
||||||
});
|
|
||||||
const run = await cursorAgent2({
|
|
||||||
prompt,
|
|
||||||
mode: "ask",
|
|
||||||
cwd: nerveRoot,
|
|
||||||
env: null,
|
|
||||||
timeoutMs: null,
|
|
||||||
dryRun: false
|
|
||||||
});
|
|
||||||
if (!run.ok) {
|
|
||||||
return {
|
|
||||||
content: "tester agent failed",
|
|
||||||
meta: {
|
|
||||||
workflowName: coderMeta.workflowName,
|
|
||||||
attempt,
|
|
||||||
passed: false,
|
|
||||||
dryRunLog: "",
|
|
||||||
reason: `tester agent failed: ${formatSpawnFailure2(run.error)}`
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
const text = run.value.trim();
|
|
||||||
const pass = text.startsWith("PASS|");
|
|
||||||
const fail = text.startsWith("FAIL|");
|
|
||||||
if (!pass && !fail) {
|
|
||||||
return {
|
|
||||||
content: "tester format invalid",
|
|
||||||
meta: {
|
|
||||||
workflowName: coderMeta.workflowName,
|
|
||||||
attempt,
|
|
||||||
passed: false,
|
|
||||||
dryRunLog: text,
|
|
||||||
reason: "tester format invalid"
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
const parts = text.split("|");
|
|
||||||
const reason = parts[1] ?? "no reason";
|
|
||||||
const log = parts.slice(2).join("|").trim();
|
|
||||||
return {
|
|
||||||
content: `${pass ? "PASS" : "FAIL"} \u2014 ${reason}`,
|
|
||||||
meta: {
|
|
||||||
workflowName: coderMeta.workflowName,
|
|
||||||
attempt,
|
|
||||||
passed: pass,
|
|
||||||
dryRunLog: log,
|
|
||||||
reason
|
|
||||||
}
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// roles/committer/index.ts
|
|
||||||
import { cursorAgent as cursorAgent3, isDryRun as isDryRun4, spawnSafe as spawnSafe2 } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { z as z4 } from "zod";
|
|
||||||
|
|
||||||
// roles/committer/prompt.ts
|
|
||||||
function committerPrompt({
|
|
||||||
nerveRoot,
|
|
||||||
workflowName,
|
|
||||||
userPrompt,
|
|
||||||
testerReason
|
|
||||||
}) {
|
|
||||||
return `You are a git committer subagent for Nerve workflow generation.
|
|
||||||
Repository root: ${nerveRoot}
|
|
||||||
|
|
||||||
Goal:
|
|
||||||
- Commit and push generated workflow "${workflowName}".
|
|
||||||
- Handle dirty worktree safely (do not discard unrelated user edits).
|
|
||||||
- Detect default branch automatically.
|
|
||||||
- Create a focused branch for this workflow update.
|
|
||||||
- Stage only workflow files and required config updates.
|
|
||||||
|
|
||||||
Context:
|
|
||||||
- User prompt summary: ${userPrompt.slice(0, 500)}
|
|
||||||
- Tester result: ${testerReason}
|
|
||||||
|
|
||||||
Expected output format:
|
|
||||||
BRANCH=<branch-or-empty>
|
|
||||||
COMMIT=<hash-or-empty>
|
|
||||||
PUSHED=<true|false|unknown>
|
|
||||||
LOG_START
|
|
||||||
<details>
|
|
||||||
LOG_END`;
|
|
||||||
}
|
|
||||||
|
|
||||||
// roles/committer/index.ts
|
|
||||||
var committerMetaSchema = z4.object({
|
|
||||||
invoked: z4.boolean().default(false),
|
|
||||||
success: z4.boolean().default(false),
|
|
||||||
branch: z4.string().nullable().default(null),
|
|
||||||
commitHash: z4.string().nullable().default(null),
|
|
||||||
pushed: z4.boolean().nullable().default(null),
|
|
||||||
log: z4.string().default(""),
|
|
||||||
error: z4.string().nullable().default(null)
|
|
||||||
});
|
|
||||||
function formatSpawnFailure3(error) {
|
|
||||||
if (error.kind === "spawn_failed") {
|
|
||||||
return error.message;
|
|
||||||
}
|
|
||||||
if (error.kind === "timeout") {
|
|
||||||
return `timeout stdout=${error.stdout.slice(0, 300)} stderr=${error.stderr.slice(0, 300)}`;
|
|
||||||
}
|
|
||||||
return `exit ${error.exitCode} stderr=${error.stderr.slice(0, 500)}`;
|
|
||||||
}
|
|
||||||
function lastMetaForRole3(messages, role) {
|
|
||||||
for (let i = messages.length - 1; i >= 0; i--) {
|
|
||||||
if (messages[i].role === role) {
|
|
||||||
return messages[i].meta;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
function inferWorkflowName(messages) {
|
|
||||||
const tester = lastMetaForRole3(messages, "tester");
|
|
||||||
if (tester !== null && tester.workflowName.trim().length > 0) {
|
|
||||||
return tester.workflowName.trim();
|
|
||||||
}
|
|
||||||
const coder = lastMetaForRole3(messages, "coder");
|
|
||||||
if (coder !== null && coder.workflowName.trim().length > 0) {
|
|
||||||
return coder.workflowName.trim();
|
|
||||||
}
|
|
||||||
const planner = lastMetaForRole3(messages, "planner");
|
|
||||||
if (planner !== null && planner.workflowName.trim().length > 0) {
|
|
||||||
return planner.workflowName.trim();
|
|
||||||
}
|
|
||||||
return "";
|
|
||||||
}
|
|
||||||
async function runHermesCommitter(task, nerveRoot) {
|
|
||||||
const commandAttempts = [
|
|
||||||
{ cmd: "hermes-agent", args: ["--cwd", nerveRoot, "--task", task] },
|
|
||||||
{ cmd: "hermes", args: ["agent", "--cwd", nerveRoot, "--task", task] }
|
|
||||||
];
|
|
||||||
for (const candidate of commandAttempts) {
|
|
||||||
const run = await spawnSafe2(candidate.cmd, candidate.args, {
|
|
||||||
cwd: nerveRoot,
|
|
||||||
env: null,
|
|
||||||
timeoutMs: 6e5,
|
|
||||||
dryRun: false
|
|
||||||
});
|
|
||||||
if (!run.ok) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
const text = `${run.value.stdout}
|
|
||||||
${run.value.stderr}`;
|
|
||||||
const branch2 = text.match(/^BRANCH=(.*)$/m)?.[1]?.trim() ?? null;
|
|
||||||
const commitHash2 = text.match(/^COMMIT=(.*)$/m)?.[1]?.trim() ?? null;
|
|
||||||
const pushedText = text.match(/^PUSHED=(.*)$/m)?.[1]?.trim().toLowerCase() ?? "unknown";
|
|
||||||
const pushed = pushedText === "true" ? true : pushedText === "false" ? false : null;
|
|
||||||
return {
|
|
||||||
invoked: true,
|
|
||||||
success: true,
|
|
||||||
branch: branch2 && branch2.length > 0 ? branch2 : null,
|
|
||||||
commitHash: commitHash2 && commitHash2.length > 0 ? commitHash2 : null,
|
|
||||||
pushed,
|
|
||||||
log: text.slice(0, 2e4),
|
|
||||||
error: null
|
|
||||||
};
|
|
||||||
}
|
|
||||||
const fallback = await cursorAgent3({
|
|
||||||
prompt: `Run this git committer task in repository ${nerveRoot}:
|
|
||||||
|
|
||||||
${task}`,
|
|
||||||
mode: "default",
|
|
||||||
cwd: nerveRoot,
|
|
||||||
env: null,
|
|
||||||
timeoutMs: null,
|
|
||||||
dryRun: false
|
|
||||||
});
|
|
||||||
if (!fallback.ok) {
|
|
||||||
return {
|
|
||||||
invoked: true,
|
|
||||||
success: false,
|
|
||||||
branch: null,
|
|
||||||
commitHash: null,
|
|
||||||
pushed: null,
|
|
||||||
log: "",
|
|
||||||
error: `hermes and fallback both failed: ${formatSpawnFailure3(fallback.error)}`
|
|
||||||
};
|
|
||||||
}
|
|
||||||
const out = fallback.value;
|
|
||||||
const branch = out.match(/(?:branch|BRANCH)\s*[:=]\s*([^\s]+)/)?.[1] ?? null;
|
|
||||||
const commitHash = out.match(/[a-f0-9]{7,40}/)?.[0] ?? null;
|
|
||||||
return {
|
|
||||||
invoked: true,
|
|
||||||
success: true,
|
|
||||||
branch,
|
|
||||||
commitHash,
|
|
||||||
pushed: out.toLowerCase().includes("push") ? true : null,
|
|
||||||
log: out.slice(0, 2e4),
|
|
||||||
error: null
|
|
||||||
};
|
|
||||||
}
|
|
||||||
function buildCommitterRole({ nerveRoot }) {
|
|
||||||
return async (start, messages) => {
|
|
||||||
const dry = isDryRun4(start);
|
|
||||||
const planner = lastMetaForRole3(messages, "planner");
|
|
||||||
const tester = lastMetaForRole3(messages, "tester");
|
|
||||||
const workflowName = inferWorkflowName(messages);
|
|
||||||
const skipMeta = {
|
|
||||||
invoked: false,
|
|
||||||
success: false,
|
|
||||||
branch: null,
|
|
||||||
commitHash: null,
|
|
||||||
pushed: null,
|
|
||||||
log: "",
|
|
||||||
error: null
|
|
||||||
};
|
|
||||||
if (planner === null || tester === null || workflowName.length === 0) {
|
|
||||||
return {
|
|
||||||
content: "committer skipped: missing planner/tester/workflowName context",
|
|
||||||
meta: { ...skipMeta, error: "missing committer context" }
|
|
||||||
};
|
|
||||||
}
|
|
||||||
if (!tester.passed) {
|
|
||||||
return {
|
|
||||||
content: "committer skipped: tester not passed",
|
|
||||||
meta: { ...skipMeta, error: "tester not passed" }
|
|
||||||
};
|
|
||||||
}
|
|
||||||
if (dry) {
|
|
||||||
return {
|
|
||||||
content: "[dry-run] skipped hermes committer",
|
|
||||||
meta: {
|
|
||||||
invoked: true,
|
|
||||||
success: true,
|
|
||||||
branch: "wf/dry-run",
|
|
||||||
commitHash: null,
|
|
||||||
pushed: null,
|
|
||||||
log: "[dry-run] skipped hermes committer",
|
|
||||||
error: null
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
const task = committerPrompt({
|
|
||||||
nerveRoot,
|
|
||||||
workflowName,
|
|
||||||
userPrompt: planner.userPrompt,
|
|
||||||
testerReason: tester.reason
|
|
||||||
});
|
|
||||||
const committed = await runHermesCommitter(task, nerveRoot);
|
|
||||||
return {
|
|
||||||
content: committed.success ? committed.log : `committer failed: ${committed.error ?? "unknown"}`,
|
|
||||||
meta: committed
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// moderator.ts
|
|
||||||
import { END } from "@uncaged/nerve-core";
|
|
||||||
var moderator = (context) => {
|
|
||||||
if (context.steps.length === 0) {
|
|
||||||
return "planner";
|
|
||||||
}
|
|
||||||
const last = context.steps[context.steps.length - 1];
|
|
||||||
if (last.role === "planner") {
|
|
||||||
if (last.meta.workflowName.trim().length > 0) return "coder";
|
|
||||||
const plannerAttempts = context.steps.filter((s) => s.role === "planner").length;
|
|
||||||
return plannerAttempts < 3 ? "planner" : END;
|
|
||||||
}
|
|
||||||
if (last.role === "coder") {
|
|
||||||
if (last.meta.lintPassed && last.meta.buildPassed) {
|
|
||||||
return "tester";
|
|
||||||
}
|
|
||||||
if (last.meta.attempt < 3) {
|
|
||||||
return "coder";
|
|
||||||
}
|
|
||||||
return END;
|
|
||||||
}
|
|
||||||
if (last.role === "tester") {
|
|
||||||
if (last.meta.passed) {
|
|
||||||
return "committer";
|
|
||||||
}
|
|
||||||
if (last.meta.attempt < 3) {
|
|
||||||
return "coder";
|
|
||||||
}
|
|
||||||
return END;
|
|
||||||
}
|
|
||||||
return END;
|
|
||||||
};
|
|
||||||
|
|
||||||
// build.ts
|
|
||||||
function buildWorkflowGenerator({
|
|
||||||
provider,
|
|
||||||
nerveRoot
|
|
||||||
}) {
|
|
||||||
const workflowsDir = join3(nerveRoot, "workflows");
|
|
||||||
return {
|
|
||||||
name: "workflow-generator",
|
|
||||||
roles: {
|
|
||||||
planner: buildPlannerRole({ provider, nerveRoot, workflowsDir }),
|
|
||||||
coder: buildCoderRole({ nerveRoot, workflowsDir }),
|
|
||||||
tester: buildTesterRole({ nerveRoot }),
|
|
||||||
committer: buildCommitterRole({ nerveRoot })
|
|
||||||
},
|
|
||||||
moderator
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// index.ts
|
|
||||||
var HOME = process.env.HOME ?? "/home/azureuser";
|
|
||||||
var NERVE_ROOT = join4(HOME, ".uncaged-nerve");
|
|
||||||
var apiKey = process.env.DASHSCOPE_API_KEY;
|
|
||||||
var baseUrl = process.env.DASHSCOPE_BASE_URL;
|
|
||||||
var model = process.env.DASHSCOPE_MODEL ?? "qwen-plus";
|
|
||||||
if (!apiKey || !baseUrl) {
|
|
||||||
throw new Error("Set DASHSCOPE_API_KEY and DASHSCOPE_BASE_URL");
|
|
||||||
}
|
|
||||||
var workflow = buildWorkflowGenerator({
|
|
||||||
provider: { apiKey, baseUrl, model },
|
|
||||||
nerveRoot: NERVE_ROOT
|
|
||||||
});
|
|
||||||
var index_default = workflow;
|
|
||||||
export {
|
|
||||||
index_default as default
|
|
||||||
};
|
|
||||||
@ -1,20 +0,0 @@
|
|||||||
import { join } from "node:path";
|
|
||||||
import { buildWorkflowGenerator } from "./build.js";
|
|
||||||
|
|
||||||
const HOME = process.env.HOME ?? "/home/azureuser";
|
|
||||||
const NERVE_ROOT = join(HOME, ".uncaged-nerve");
|
|
||||||
|
|
||||||
const apiKey = process.env.DASHSCOPE_API_KEY;
|
|
||||||
const baseUrl = process.env.DASHSCOPE_BASE_URL;
|
|
||||||
const model = process.env.DASHSCOPE_MODEL ?? "qwen-plus";
|
|
||||||
|
|
||||||
if (!apiKey || !baseUrl) {
|
|
||||||
throw new Error("Set DASHSCOPE_API_KEY and DASHSCOPE_BASE_URL");
|
|
||||||
}
|
|
||||||
|
|
||||||
const workflow = buildWorkflowGenerator({
|
|
||||||
provider: { apiKey, baseUrl, model },
|
|
||||||
nerveRoot: NERVE_ROOT,
|
|
||||||
});
|
|
||||||
|
|
||||||
export default workflow;
|
|
||||||
@ -1,45 +0,0 @@
|
|||||||
import { END } from "@uncaged/nerve-core";
|
|
||||||
import type { Moderator } from "@uncaged/nerve-core";
|
|
||||||
import type { PlannerMeta } from "./roles/planner/index.js";
|
|
||||||
import type { CoderMeta } from "./roles/coder/index.js";
|
|
||||||
import type { TesterMeta } from "./roles/tester/index.js";
|
|
||||||
import type { CommitterMeta } from "./roles/committer/index.js";
|
|
||||||
|
|
||||||
export type WorkflowMeta = {
|
|
||||||
planner: PlannerMeta;
|
|
||||||
coder: CoderMeta;
|
|
||||||
tester: TesterMeta;
|
|
||||||
committer: CommitterMeta;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const moderator: Moderator<WorkflowMeta> = (context) => {
|
|
||||||
if (context.steps.length === 0) {
|
|
||||||
return "planner";
|
|
||||||
}
|
|
||||||
const last = context.steps[context.steps.length - 1];
|
|
||||||
|
|
||||||
if (last.role === "planner") {
|
|
||||||
if (last.meta.workflowName.trim().length > 0) return "coder";
|
|
||||||
const plannerAttempts = context.steps.filter((s) => s.role === "planner").length;
|
|
||||||
return plannerAttempts < 3 ? "planner" : END;
|
|
||||||
}
|
|
||||||
if (last.role === "coder") {
|
|
||||||
if (last.meta.lintPassed && last.meta.buildPassed) {
|
|
||||||
return "tester";
|
|
||||||
}
|
|
||||||
if (last.meta.attempt < 3) {
|
|
||||||
return "coder";
|
|
||||||
}
|
|
||||||
return END;
|
|
||||||
}
|
|
||||||
if (last.role === "tester") {
|
|
||||||
if (last.meta.passed) {
|
|
||||||
return "committer";
|
|
||||||
}
|
|
||||||
if (last.meta.attempt < 3) {
|
|
||||||
return "coder";
|
|
||||||
}
|
|
||||||
return END;
|
|
||||||
}
|
|
||||||
return END;
|
|
||||||
};
|
|
||||||
@ -1,26 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "workflow-generator-workflow",
|
|
||||||
"version": "0.0.1",
|
|
||||||
"private": true,
|
|
||||||
"type": "module",
|
|
||||||
"scripts": {
|
|
||||||
"build": "esbuild index.ts --bundle --platform=node --format=esm --outdir=dist --packages=external"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"@uncaged/nerve-core": "latest",
|
|
||||||
"@uncaged/nerve-workflow-utils": "latest",
|
|
||||||
"zod": "^4.3.6"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@types/node": "^22.0.0",
|
|
||||||
"esbuild": "^0.27.0",
|
|
||||||
"typescript": "^5.7.0"
|
|
||||||
},
|
|
||||||
"pnpm": {
|
|
||||||
"overrides": {
|
|
||||||
"@uncaged/nerve-daemon": "link:../../../repos/nerve/packages/daemon",
|
|
||||||
"@uncaged/nerve-core": "link:../../../repos/nerve/packages/core",
|
|
||||||
"@uncaged/nerve-workflow-utils": "link:../../../repos/nerve/packages/workflow-utils"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
330
workflows/workflow-generator/pnpm-lock.yaml
generated
330
workflows/workflow-generator/pnpm-lock.yaml
generated
@ -1,330 +0,0 @@
|
|||||||
lockfileVersion: '9.0'
|
|
||||||
|
|
||||||
settings:
|
|
||||||
autoInstallPeers: true
|
|
||||||
excludeLinksFromLockfile: false
|
|
||||||
|
|
||||||
overrides:
|
|
||||||
'@uncaged/nerve-daemon': link:../../../repos/nerve/packages/daemon
|
|
||||||
'@uncaged/nerve-core': link:../../../repos/nerve/packages/core
|
|
||||||
'@uncaged/nerve-workflow-utils': link:../../../repos/nerve/packages/workflow-utils
|
|
||||||
|
|
||||||
importers:
|
|
||||||
|
|
||||||
.:
|
|
||||||
dependencies:
|
|
||||||
'@uncaged/nerve-core':
|
|
||||||
specifier: link:../../../repos/nerve/packages/core
|
|
||||||
version: link:../../../repos/nerve/packages/core
|
|
||||||
'@uncaged/nerve-workflow-utils':
|
|
||||||
specifier: link:../../../repos/nerve/packages/workflow-utils
|
|
||||||
version: link:../../../repos/nerve/packages/workflow-utils
|
|
||||||
zod:
|
|
||||||
specifier: ^4.3.6
|
|
||||||
version: 4.3.6
|
|
||||||
devDependencies:
|
|
||||||
'@types/node':
|
|
||||||
specifier: ^22.0.0
|
|
||||||
version: 22.19.17
|
|
||||||
esbuild:
|
|
||||||
specifier: ^0.27.0
|
|
||||||
version: 0.27.7
|
|
||||||
typescript:
|
|
||||||
specifier: ^5.7.0
|
|
||||||
version: 5.9.3
|
|
||||||
|
|
||||||
packages:
|
|
||||||
|
|
||||||
'@esbuild/aix-ppc64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-EKX3Qwmhz1eMdEJokhALr0YiD0lhQNwDqkPYyPhiSwKrh7/4KRjQc04sZ8db+5DVVnZ1LmbNDI1uAMPEUBnQPg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [ppc64]
|
|
||||||
os: [aix]
|
|
||||||
|
|
||||||
'@esbuild/android-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-62dPZHpIXzvChfvfLJow3q5dDtiNMkwiRzPylSCfriLvZeq0a1bWChrGx/BbUbPwOrsWKMn8idSllklzBy+dgQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [android]
|
|
||||||
|
|
||||||
'@esbuild/android-arm@0.27.7':
|
|
||||||
resolution: {integrity: sha512-jbPXvB4Yj2yBV7HUfE2KHe4GJX51QplCN1pGbYjvsyCZbQmies29EoJbkEc+vYuU5o45AfQn37vZlyXy4YJ8RQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm]
|
|
||||||
os: [android]
|
|
||||||
|
|
||||||
'@esbuild/android-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-x5VpMODneVDb70PYV2VQOmIUUiBtY3D3mPBG8NxVk5CogneYhkR7MmM3yR/uMdITLrC1ml/NV1rj4bMJuy9MCg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [android]
|
|
||||||
|
|
||||||
'@esbuild/darwin-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-5lckdqeuBPlKUwvoCXIgI2D9/ABmPq3Rdp7IfL70393YgaASt7tbju3Ac+ePVi3KDH6N2RqePfHnXkaDtY9fkw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [darwin]
|
|
||||||
|
|
||||||
'@esbuild/darwin-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-rYnXrKcXuT7Z+WL5K980jVFdvVKhCHhUwid+dDYQpH+qu+TefcomiMAJpIiC2EM3Rjtq0sO3StMV/+3w3MyyqQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [darwin]
|
|
||||||
|
|
||||||
'@esbuild/freebsd-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-B48PqeCsEgOtzME2GbNM2roU29AMTuOIN91dsMO30t+Ydis3z/3Ngoj5hhnsOSSwNzS+6JppqWsuhTp6E82l2w==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [freebsd]
|
|
||||||
|
|
||||||
'@esbuild/freebsd-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-jOBDK5XEjA4m5IJK3bpAQF9/Lelu/Z9ZcdhTRLf4cajlB+8VEhFFRjWgfy3M1O4rO2GQ/b2dLwCUGpiF/eATNQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [freebsd]
|
|
||||||
|
|
||||||
'@esbuild/linux-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-RZPHBoxXuNnPQO9rvjh5jdkRmVizktkT7TCDkDmQ0W2SwHInKCAV95GRuvdSvA7w4VMwfCjUiPwDi0ZO6Nfe9A==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-arm@0.27.7':
|
|
||||||
resolution: {integrity: sha512-RkT/YXYBTSULo3+af8Ib0ykH8u2MBh57o7q/DAs3lTJlyVQkgQvlrPTnjIzzRPQyavxtPtfg0EopvDyIt0j1rA==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-ia32@0.27.7':
|
|
||||||
resolution: {integrity: sha512-GA48aKNkyQDbd3KtkplYWT102C5sn/EZTY4XROkxONgruHPU72l+gW+FfF8tf2cFjeHaRbWpOYa/uRBz/Xq1Pg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [ia32]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-loong64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-a4POruNM2oWsD4WKvBSEKGIiWQF8fZOAsycHOt6JBpZ+JN2n2JH9WAv56SOyu9X5IqAjqSIPTaJkqN8F7XOQ5Q==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [loong64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-mips64el@0.27.7':
|
|
||||||
resolution: {integrity: sha512-KabT5I6StirGfIz0FMgl1I+R1H73Gp0ofL9A3nG3i/cYFJzKHhouBV5VWK1CSgKvVaG4q1RNpCTR2LuTVB3fIw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [mips64el]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-ppc64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-gRsL4x6wsGHGRqhtI+ifpN/vpOFTQtnbsupUF5R5YTAg+y/lKelYR1hXbnBdzDjGbMYjVJLJTd2OFmMewAgwlQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [ppc64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-riscv64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-hL25LbxO1QOngGzu2U5xeXtxXcW+/GvMN3ejANqXkxZ/opySAZMrc+9LY/WyjAan41unrR3YrmtTsUpwT66InQ==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [riscv64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-s390x@0.27.7':
|
|
||||||
resolution: {integrity: sha512-2k8go8Ycu1Kb46vEelhu1vqEP+UeRVj2zY1pSuPdgvbd5ykAw82Lrro28vXUrRmzEsUV0NzCf54yARIK8r0fdw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [s390x]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/linux-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-hzznmADPt+OmsYzw1EE33ccA+HPdIqiCRq7cQeL1Jlq2gb1+OyWBkMCrYGBJ+sxVzve2ZJEVeePbLM2iEIZSxA==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [linux]
|
|
||||||
|
|
||||||
'@esbuild/netbsd-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-b6pqtrQdigZBwZxAn1UpazEisvwaIDvdbMbmrly7cDTMFnw/+3lVxxCTGOrkPVnsYIosJJXAsILG9XcQS+Yu6w==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [netbsd]
|
|
||||||
|
|
||||||
'@esbuild/netbsd-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-OfatkLojr6U+WN5EDYuoQhtM+1xco+/6FSzJJnuWiUw5eVcicbyK3dq5EeV/QHT1uy6GoDhGbFpprUiHUYggrw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [netbsd]
|
|
||||||
|
|
||||||
'@esbuild/openbsd-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-AFuojMQTxAz75Fo8idVcqoQWEHIXFRbOc1TrVcFSgCZtQfSdc1RXgB3tjOn/krRHENUB4j00bfGjyl2mJrU37A==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [openbsd]
|
|
||||||
|
|
||||||
'@esbuild/openbsd-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-+A1NJmfM8WNDv5CLVQYJ5PshuRm/4cI6WMZRg1by1GwPIQPCTs1GLEUHwiiQGT5zDdyLiRM/l1G0Pv54gvtKIg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [openbsd]
|
|
||||||
|
|
||||||
'@esbuild/openharmony-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-+KrvYb/C8zA9CU/g0sR6w2RBw7IGc5J2BPnc3dYc5VJxHCSF1yNMxTV5LQ7GuKteQXZtspjFbiuW5/dOj7H4Yw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [openharmony]
|
|
||||||
|
|
||||||
'@esbuild/sunos-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-ikktIhFBzQNt/QDyOL580ti9+5mL/YZeUPKU2ivGtGjdTYoqz6jObj6nOMfhASpS4GU4Q/Clh1QtxWAvcYKamA==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [sunos]
|
|
||||||
|
|
||||||
'@esbuild/win32-arm64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-7yRhbHvPqSpRUV7Q20VuDwbjW5kIMwTHpptuUzV+AA46kiPze5Z7qgt6CLCK3pWFrHeNfDd1VKgyP4O+ng17CA==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [arm64]
|
|
||||||
os: [win32]
|
|
||||||
|
|
||||||
'@esbuild/win32-ia32@0.27.7':
|
|
||||||
resolution: {integrity: sha512-SmwKXe6VHIyZYbBLJrhOoCJRB/Z1tckzmgTLfFYOfpMAx63BJEaL9ExI8x7v0oAO3Zh6D/Oi1gVxEYr5oUCFhw==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [ia32]
|
|
||||||
os: [win32]
|
|
||||||
|
|
||||||
'@esbuild/win32-x64@0.27.7':
|
|
||||||
resolution: {integrity: sha512-56hiAJPhwQ1R4i+21FVF7V8kSD5zZTdHcVuRFMW0hn753vVfQN8xlx4uOPT4xoGH0Z/oVATuR82AiqSTDIpaHg==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
cpu: [x64]
|
|
||||||
os: [win32]
|
|
||||||
|
|
||||||
'@types/node@22.19.17':
|
|
||||||
resolution: {integrity: sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==}
|
|
||||||
|
|
||||||
esbuild@0.27.7:
|
|
||||||
resolution: {integrity: sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w==}
|
|
||||||
engines: {node: '>=18'}
|
|
||||||
hasBin: true
|
|
||||||
|
|
||||||
typescript@5.9.3:
|
|
||||||
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
|
|
||||||
engines: {node: '>=14.17'}
|
|
||||||
hasBin: true
|
|
||||||
|
|
||||||
undici-types@6.21.0:
|
|
||||||
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
|
|
||||||
|
|
||||||
zod@4.3.6:
|
|
||||||
resolution: {integrity: sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==}
|
|
||||||
|
|
||||||
snapshots:
|
|
||||||
|
|
||||||
'@esbuild/aix-ppc64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/android-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/android-arm@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/android-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/darwin-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/darwin-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/freebsd-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/freebsd-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-arm@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-ia32@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-loong64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-mips64el@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-ppc64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-riscv64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-s390x@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/linux-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/netbsd-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/netbsd-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/openbsd-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/openbsd-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/openharmony-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/sunos-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/win32-arm64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/win32-ia32@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@esbuild/win32-x64@0.27.7':
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@types/node@22.19.17':
|
|
||||||
dependencies:
|
|
||||||
undici-types: 6.21.0
|
|
||||||
|
|
||||||
esbuild@0.27.7:
|
|
||||||
optionalDependencies:
|
|
||||||
'@esbuild/aix-ppc64': 0.27.7
|
|
||||||
'@esbuild/android-arm': 0.27.7
|
|
||||||
'@esbuild/android-arm64': 0.27.7
|
|
||||||
'@esbuild/android-x64': 0.27.7
|
|
||||||
'@esbuild/darwin-arm64': 0.27.7
|
|
||||||
'@esbuild/darwin-x64': 0.27.7
|
|
||||||
'@esbuild/freebsd-arm64': 0.27.7
|
|
||||||
'@esbuild/freebsd-x64': 0.27.7
|
|
||||||
'@esbuild/linux-arm': 0.27.7
|
|
||||||
'@esbuild/linux-arm64': 0.27.7
|
|
||||||
'@esbuild/linux-ia32': 0.27.7
|
|
||||||
'@esbuild/linux-loong64': 0.27.7
|
|
||||||
'@esbuild/linux-mips64el': 0.27.7
|
|
||||||
'@esbuild/linux-ppc64': 0.27.7
|
|
||||||
'@esbuild/linux-riscv64': 0.27.7
|
|
||||||
'@esbuild/linux-s390x': 0.27.7
|
|
||||||
'@esbuild/linux-x64': 0.27.7
|
|
||||||
'@esbuild/netbsd-arm64': 0.27.7
|
|
||||||
'@esbuild/netbsd-x64': 0.27.7
|
|
||||||
'@esbuild/openbsd-arm64': 0.27.7
|
|
||||||
'@esbuild/openbsd-x64': 0.27.7
|
|
||||||
'@esbuild/openharmony-arm64': 0.27.7
|
|
||||||
'@esbuild/sunos-x64': 0.27.7
|
|
||||||
'@esbuild/win32-arm64': 0.27.7
|
|
||||||
'@esbuild/win32-ia32': 0.27.7
|
|
||||||
'@esbuild/win32-x64': 0.27.7
|
|
||||||
|
|
||||||
typescript@5.9.3: {}
|
|
||||||
|
|
||||||
undici-types@6.21.0: {}
|
|
||||||
|
|
||||||
zod@4.3.6: {}
|
|
||||||
@ -1,254 +0,0 @@
|
|||||||
import { existsSync, readFileSync } from "node:fs";
|
|
||||||
import { join } from "node:path";
|
|
||||||
import type { Role, RoleResult, WorkflowMessage } from "@uncaged/nerve-core";
|
|
||||||
import type { SpawnError } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { cursorAgent, isDryRun, spawnSafe } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { z } from "zod";
|
|
||||||
import type { PlannerMeta } from "../planner/index.js";
|
|
||||||
import type { TesterMeta } from "../tester/index.js";
|
|
||||||
import { coderPrompt } from "./prompt.js";
|
|
||||||
|
|
||||||
export const coderMetaSchema = z.object({
|
|
||||||
workflowName: z.string().default(""),
|
|
||||||
attempt: z.number().default(1),
|
|
||||||
files: z
|
|
||||||
.object({
|
|
||||||
indexTs: z.boolean().default(false),
|
|
||||||
packageJson: z.boolean().default(false),
|
|
||||||
tsconfigJson: z.boolean().default(false),
|
|
||||||
})
|
|
||||||
.default({ indexTs: false, packageJson: false, tsconfigJson: false }),
|
|
||||||
lintPassed: z.boolean().default(false),
|
|
||||||
buildPassed: z.boolean().default(false),
|
|
||||||
lintLog: z.string().default(""),
|
|
||||||
buildLog: z.string().default(""),
|
|
||||||
cursorOutput: z.string().default(""),
|
|
||||||
reason: z.string().nullable().default(null),
|
|
||||||
});
|
|
||||||
|
|
||||||
export type CoderMeta = z.infer<typeof coderMetaSchema>;
|
|
||||||
|
|
||||||
export type BuildCoderDeps = {
|
|
||||||
nerveRoot: string;
|
|
||||||
workflowsDir: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
function formatSpawnFailure(error: SpawnError): string {
|
|
||||||
if (error.kind === "spawn_failed") {
|
|
||||||
return error.message;
|
|
||||||
}
|
|
||||||
if (error.kind === "timeout") {
|
|
||||||
return `timeout stdout=${error.stdout.slice(0, 300)} stderr=${error.stderr.slice(0, 300)}`;
|
|
||||||
}
|
|
||||||
return `exit ${error.exitCode} stderr=${error.stderr.slice(0, 500)}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
function scanGeneratedCodePitfalls(source: string): string[] {
|
|
||||||
const issues: string[] = [];
|
|
||||||
if (/\bawait\s+import\s*\(/.test(source)) {
|
|
||||||
issues.push("Found await import() in generated workflow code");
|
|
||||||
}
|
|
||||||
if (/\bimport\s*\(\s*["'`]/.test(source) && !source.includes("Dynamic import required")) {
|
|
||||||
issues.push("Found undocumented dynamic import() call");
|
|
||||||
}
|
|
||||||
if (!/\bexport\s+default\s+/.test(source)) {
|
|
||||||
issues.push("Missing default export of WorkflowDefinition");
|
|
||||||
}
|
|
||||||
return issues;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function runLintAndBuild(
|
|
||||||
workflowDir: string,
|
|
||||||
dry: boolean,
|
|
||||||
): Promise<{
|
|
||||||
lintPassed: boolean;
|
|
||||||
buildPassed: boolean;
|
|
||||||
lintLog: string;
|
|
||||||
buildLog: string;
|
|
||||||
reason: string | null;
|
|
||||||
}> {
|
|
||||||
const lintRun = await spawnSafe("pnpm", ["run", "check"], {
|
|
||||||
cwd: workflowDir,
|
|
||||||
env: null,
|
|
||||||
timeoutMs: 300_000,
|
|
||||||
dryRun: dry,
|
|
||||||
});
|
|
||||||
if (!lintRun.ok) {
|
|
||||||
return {
|
|
||||||
lintPassed: false,
|
|
||||||
buildPassed: false,
|
|
||||||
lintLog: formatSpawnFailure(lintRun.error),
|
|
||||||
buildLog: "",
|
|
||||||
reason: `lint failed: ${formatSpawnFailure(lintRun.error)}`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const lintLog = lintRun.value.stderr.trim() || lintRun.value.stdout.trim() || "(no output)";
|
|
||||||
const tscRun = await spawnSafe("npx", ["tsc", "--noEmit"], {
|
|
||||||
cwd: workflowDir,
|
|
||||||
env: null,
|
|
||||||
timeoutMs: 300_000,
|
|
||||||
dryRun: dry,
|
|
||||||
});
|
|
||||||
if (!tscRun.ok) {
|
|
||||||
return {
|
|
||||||
lintPassed: true,
|
|
||||||
buildPassed: false,
|
|
||||||
lintLog,
|
|
||||||
buildLog: formatSpawnFailure(tscRun.error),
|
|
||||||
reason: `build failed: ${formatSpawnFailure(tscRun.error)}`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
const buildLog = tscRun.value.stderr.trim() || tscRun.value.stdout.trim() || "(no output)";
|
|
||||||
return { lintPassed: true, buildPassed: true, lintLog, buildLog, reason: null };
|
|
||||||
}
|
|
||||||
|
|
||||||
function lastMetaForRole<M>(messages: WorkflowMessage[], role: string): M | null {
|
|
||||||
for (let i = messages.length - 1; i >= 0; i--) {
|
|
||||||
if (messages[i].role === role) {
|
|
||||||
return messages[i].meta as M;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function buildCoderRole({ nerveRoot, workflowsDir }: BuildCoderDeps): Role<CoderMeta> {
|
|
||||||
return async (start, messages) => {
|
|
||||||
const dry = isDryRun(start);
|
|
||||||
const plannerMeta = lastMetaForRole<PlannerMeta>(messages, "planner");
|
|
||||||
const previousTester = lastMetaForRole<TesterMeta>(messages, "tester");
|
|
||||||
const attempt = messages.filter((m) => m.role === "coder").length + 1;
|
|
||||||
|
|
||||||
if (plannerMeta === null || plannerMeta.workflowName.trim().length === 0) {
|
|
||||||
return {
|
|
||||||
content: "coder cannot continue: missing planner output",
|
|
||||||
meta: {
|
|
||||||
workflowName: "",
|
|
||||||
attempt,
|
|
||||||
files: { indexTs: false, packageJson: false, tsconfigJson: false },
|
|
||||||
lintPassed: false,
|
|
||||||
buildPassed: false,
|
|
||||||
lintLog: "",
|
|
||||||
buildLog: "",
|
|
||||||
cursorOutput: "",
|
|
||||||
reason: "missing planner output",
|
|
||||||
},
|
|
||||||
} satisfies RoleResult<CoderMeta>;
|
|
||||||
}
|
|
||||||
|
|
||||||
const wfName = plannerMeta.workflowName.trim();
|
|
||||||
const feedback =
|
|
||||||
previousTester !== null && previousTester.passed === false
|
|
||||||
? `\n\nPrevious tester failure to fix:\n${previousTester.reason}\n${previousTester.dryRunLog}\n`
|
|
||||||
: "";
|
|
||||||
|
|
||||||
const prompt = coderPrompt({
|
|
||||||
workflowsDir,
|
|
||||||
wfName,
|
|
||||||
planMarkdown: plannerMeta.planMarkdown,
|
|
||||||
plannerStructured: {
|
|
||||||
workflowName: plannerMeta.workflowName,
|
|
||||||
roles: plannerMeta.roles,
|
|
||||||
flowTransitions: plannerMeta.flowTransitions,
|
|
||||||
validationLoopsDesign: plannerMeta.validationLoopsDesign,
|
|
||||||
externalDeps: plannerMeta.externalDeps,
|
|
||||||
dataFlow: plannerMeta.dataFlow,
|
|
||||||
},
|
|
||||||
feedback,
|
|
||||||
nerveRoot,
|
|
||||||
});
|
|
||||||
|
|
||||||
const agentRun = await cursorAgent({
|
|
||||||
prompt,
|
|
||||||
mode: "default",
|
|
||||||
cwd: nerveRoot,
|
|
||||||
env: null,
|
|
||||||
timeoutMs: null,
|
|
||||||
dryRun: dry,
|
|
||||||
});
|
|
||||||
|
|
||||||
const workflowDir = join(workflowsDir, wfName);
|
|
||||||
const files = {
|
|
||||||
indexTs: existsSync(join(workflowDir, "index.ts")),
|
|
||||||
packageJson: existsSync(join(workflowDir, "package.json")),
|
|
||||||
tsconfigJson: existsSync(join(workflowDir, "tsconfig.json")),
|
|
||||||
};
|
|
||||||
const missing = [
|
|
||||||
files.indexTs ? null : "index.ts",
|
|
||||||
files.packageJson ? null : "package.json",
|
|
||||||
files.tsconfigJson ? null : "tsconfig.json",
|
|
||||||
].filter((x) => x !== null) as string[];
|
|
||||||
|
|
||||||
if (!agentRun.ok) {
|
|
||||||
return {
|
|
||||||
content: `coder failed: ${formatSpawnFailure(agentRun.error)}`,
|
|
||||||
meta: {
|
|
||||||
workflowName: wfName,
|
|
||||||
attempt,
|
|
||||||
files,
|
|
||||||
lintPassed: false,
|
|
||||||
buildPassed: false,
|
|
||||||
lintLog: "",
|
|
||||||
buildLog: "",
|
|
||||||
cursorOutput: "",
|
|
||||||
reason: formatSpawnFailure(agentRun.error),
|
|
||||||
},
|
|
||||||
} satisfies RoleResult<CoderMeta>;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (missing.length > 0) {
|
|
||||||
return {
|
|
||||||
content: `coder failed: missing required files (${missing.join(", ")})`,
|
|
||||||
meta: {
|
|
||||||
workflowName: wfName,
|
|
||||||
attempt,
|
|
||||||
files,
|
|
||||||
lintPassed: false,
|
|
||||||
buildPassed: false,
|
|
||||||
lintLog: "",
|
|
||||||
buildLog: "",
|
|
||||||
cursorOutput: agentRun.value,
|
|
||||||
reason: `missing files: ${missing.join(", ")}`,
|
|
||||||
},
|
|
||||||
} satisfies RoleResult<CoderMeta>;
|
|
||||||
}
|
|
||||||
|
|
||||||
const source = readFileSync(join(workflowDir, "index.ts"), "utf-8");
|
|
||||||
const pitfalls = scanGeneratedCodePitfalls(source);
|
|
||||||
if (pitfalls.length > 0) {
|
|
||||||
return {
|
|
||||||
content: `coder static check failed:\n${pitfalls.join("\n")}`,
|
|
||||||
meta: {
|
|
||||||
workflowName: wfName,
|
|
||||||
attempt,
|
|
||||||
files,
|
|
||||||
lintPassed: false,
|
|
||||||
buildPassed: false,
|
|
||||||
lintLog: pitfalls.join("\n"),
|
|
||||||
buildLog: "",
|
|
||||||
cursorOutput: agentRun.value,
|
|
||||||
reason: pitfalls.join("; "),
|
|
||||||
},
|
|
||||||
} satisfies RoleResult<CoderMeta>;
|
|
||||||
}
|
|
||||||
|
|
||||||
const check = await runLintAndBuild(workflowDir, dry);
|
|
||||||
const passed = check.lintPassed && check.buildPassed;
|
|
||||||
return {
|
|
||||||
content: passed
|
|
||||||
? `coder PASS: lint+build ok\n\n${check.lintLog}\n\n${check.buildLog}`
|
|
||||||
: `coder FAIL: ${check.reason ?? "unknown error"}`,
|
|
||||||
meta: {
|
|
||||||
workflowName: wfName,
|
|
||||||
attempt,
|
|
||||||
files,
|
|
||||||
lintPassed: check.lintPassed,
|
|
||||||
buildPassed: check.buildPassed,
|
|
||||||
lintLog: check.lintLog,
|
|
||||||
buildLog: check.buildLog,
|
|
||||||
cursorOutput: agentRun.value,
|
|
||||||
reason: check.reason,
|
|
||||||
},
|
|
||||||
} satisfies RoleResult<CoderMeta>;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@ -1,39 +0,0 @@
|
|||||||
export type CoderPromptParams = {
|
|
||||||
workflowsDir: string;
|
|
||||||
wfName: string;
|
|
||||||
planMarkdown: string;
|
|
||||||
plannerStructured: object;
|
|
||||||
feedback: string;
|
|
||||||
nerveRoot: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export function coderPrompt({
|
|
||||||
workflowsDir,
|
|
||||||
wfName,
|
|
||||||
planMarkdown,
|
|
||||||
plannerStructured,
|
|
||||||
feedback,
|
|
||||||
nerveRoot,
|
|
||||||
}: CoderPromptParams): string {
|
|
||||||
return `Implement a Nerve workflow package under ${workflowsDir}/${wfName}/.
|
|
||||||
|
|
||||||
Planner output:
|
|
||||||
${planMarkdown}
|
|
||||||
|
|
||||||
Structured planner fields:
|
|
||||||
${JSON.stringify(plannerStructured, null, 2)}
|
|
||||||
${feedback}
|
|
||||||
|
|
||||||
Required files:
|
|
||||||
1) ${workflowsDir}/${wfName}/index.ts
|
|
||||||
2) ${workflowsDir}/${wfName}/package.json
|
|
||||||
3) ${workflowsDir}/${wfName}/tsconfig.json
|
|
||||||
4) update ${nerveRoot}/nerve.yaml with workflows.${wfName}
|
|
||||||
|
|
||||||
Rules:
|
|
||||||
- keep WorkflowDefinition<WorkflowMeta> pattern
|
|
||||||
- no dynamic import()
|
|
||||||
- use types (not interfaces)
|
|
||||||
- include retry-aware moderator routing
|
|
||||||
- write compile-ready TypeScript`;
|
|
||||||
}
|
|
||||||
@ -1,190 +0,0 @@
|
|||||||
import type { Role, RoleResult, WorkflowMessage } from "@uncaged/nerve-core";
|
|
||||||
import type { SpawnError } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { cursorAgent, isDryRun, spawnSafe } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { z } from "zod";
|
|
||||||
import type { PlannerMeta } from "../planner/index.js";
|
|
||||||
import type { TesterMeta } from "../tester/index.js";
|
|
||||||
import { committerPrompt } from "./prompt.js";
|
|
||||||
|
|
||||||
export const committerMetaSchema = z.object({
|
|
||||||
invoked: z.boolean().default(false),
|
|
||||||
success: z.boolean().default(false),
|
|
||||||
branch: z.string().nullable().default(null),
|
|
||||||
commitHash: z.string().nullable().default(null),
|
|
||||||
pushed: z.boolean().nullable().default(null),
|
|
||||||
log: z.string().default(""),
|
|
||||||
error: z.string().nullable().default(null),
|
|
||||||
});
|
|
||||||
|
|
||||||
export type CommitterMeta = z.infer<typeof committerMetaSchema>;
|
|
||||||
|
|
||||||
export type BuildCommitterDeps = {
|
|
||||||
nerveRoot: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
function formatSpawnFailure(error: SpawnError): string {
|
|
||||||
if (error.kind === "spawn_failed") {
|
|
||||||
return error.message;
|
|
||||||
}
|
|
||||||
if (error.kind === "timeout") {
|
|
||||||
return `timeout stdout=${error.stdout.slice(0, 300)} stderr=${error.stderr.slice(0, 300)}`;
|
|
||||||
}
|
|
||||||
return `exit ${error.exitCode} stderr=${error.stderr.slice(0, 500)}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
function lastMetaForRole<M>(messages: WorkflowMessage[], role: string): M | null {
|
|
||||||
for (let i = messages.length - 1; i >= 0; i--) {
|
|
||||||
if (messages[i].role === role) {
|
|
||||||
return messages[i].meta as M;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function inferWorkflowName(messages: WorkflowMessage[]): string {
|
|
||||||
const tester = lastMetaForRole<TesterMeta>(messages, "tester");
|
|
||||||
if (tester !== null && tester.workflowName.trim().length > 0) {
|
|
||||||
return tester.workflowName.trim();
|
|
||||||
}
|
|
||||||
const coder = lastMetaForRole<{ workflowName: string }>(messages, "coder");
|
|
||||||
if (coder !== null && coder.workflowName.trim().length > 0) {
|
|
||||||
return coder.workflowName.trim();
|
|
||||||
}
|
|
||||||
const planner = lastMetaForRole<PlannerMeta>(messages, "planner");
|
|
||||||
if (planner !== null && planner.workflowName.trim().length > 0) {
|
|
||||||
return planner.workflowName.trim();
|
|
||||||
}
|
|
||||||
return "";
|
|
||||||
}
|
|
||||||
|
|
||||||
async function runHermesCommitter(
|
|
||||||
task: string,
|
|
||||||
nerveRoot: string,
|
|
||||||
): Promise<CommitterMeta> {
|
|
||||||
const commandAttempts: Array<{ cmd: string; args: string[] }> = [
|
|
||||||
{ cmd: "hermes-agent", args: ["--cwd", nerveRoot, "--task", task] },
|
|
||||||
{ cmd: "hermes", args: ["agent", "--cwd", nerveRoot, "--task", task] },
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const candidate of commandAttempts) {
|
|
||||||
const run = await spawnSafe(candidate.cmd, candidate.args, {
|
|
||||||
cwd: nerveRoot,
|
|
||||||
env: null,
|
|
||||||
timeoutMs: 600_000,
|
|
||||||
dryRun: false,
|
|
||||||
});
|
|
||||||
if (!run.ok) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
const text = `${run.value.stdout}\n${run.value.stderr}`;
|
|
||||||
const branch = text.match(/^BRANCH=(.*)$/m)?.[1]?.trim() ?? null;
|
|
||||||
const commitHash = text.match(/^COMMIT=(.*)$/m)?.[1]?.trim() ?? null;
|
|
||||||
const pushedText = text.match(/^PUSHED=(.*)$/m)?.[1]?.trim().toLowerCase() ?? "unknown";
|
|
||||||
const pushed = pushedText === "true" ? true : pushedText === "false" ? false : null;
|
|
||||||
return {
|
|
||||||
invoked: true,
|
|
||||||
success: true,
|
|
||||||
branch: branch && branch.length > 0 ? branch : null,
|
|
||||||
commitHash: commitHash && commitHash.length > 0 ? commitHash : null,
|
|
||||||
pushed,
|
|
||||||
log: text.slice(0, 20_000),
|
|
||||||
error: null,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const fallback = await cursorAgent({
|
|
||||||
prompt: `Run this git committer task in repository ${nerveRoot}:\n\n${task}`,
|
|
||||||
mode: "default",
|
|
||||||
cwd: nerveRoot,
|
|
||||||
env: null,
|
|
||||||
timeoutMs: null,
|
|
||||||
dryRun: false,
|
|
||||||
});
|
|
||||||
if (!fallback.ok) {
|
|
||||||
return {
|
|
||||||
invoked: true,
|
|
||||||
success: false,
|
|
||||||
branch: null,
|
|
||||||
commitHash: null,
|
|
||||||
pushed: null,
|
|
||||||
log: "",
|
|
||||||
error: `hermes and fallback both failed: ${formatSpawnFailure(fallback.error)}`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const out = fallback.value;
|
|
||||||
const branch = out.match(/(?:branch|BRANCH)\s*[:=]\s*([^\s]+)/)?.[1] ?? null;
|
|
||||||
const commitHash = out.match(/[a-f0-9]{7,40}/)?.[0] ?? null;
|
|
||||||
return {
|
|
||||||
invoked: true,
|
|
||||||
success: true,
|
|
||||||
branch,
|
|
||||||
commitHash,
|
|
||||||
pushed: out.toLowerCase().includes("push") ? true : null,
|
|
||||||
log: out.slice(0, 20_000),
|
|
||||||
error: null,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export function buildCommitterRole({ nerveRoot }: BuildCommitterDeps): Role<CommitterMeta> {
|
|
||||||
return async (start, messages) => {
|
|
||||||
const dry = isDryRun(start);
|
|
||||||
const planner = lastMetaForRole<PlannerMeta>(messages, "planner");
|
|
||||||
const tester = lastMetaForRole<TesterMeta>(messages, "tester");
|
|
||||||
const workflowName = inferWorkflowName(messages);
|
|
||||||
|
|
||||||
const skipMeta: CommitterMeta = {
|
|
||||||
invoked: false,
|
|
||||||
success: false,
|
|
||||||
branch: null,
|
|
||||||
commitHash: null,
|
|
||||||
pushed: null,
|
|
||||||
log: "",
|
|
||||||
error: null,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (planner === null || tester === null || workflowName.length === 0) {
|
|
||||||
return {
|
|
||||||
content: "committer skipped: missing planner/tester/workflowName context",
|
|
||||||
meta: { ...skipMeta, error: "missing committer context" },
|
|
||||||
} satisfies RoleResult<CommitterMeta>;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!tester.passed) {
|
|
||||||
return {
|
|
||||||
content: "committer skipped: tester not passed",
|
|
||||||
meta: { ...skipMeta, error: "tester not passed" },
|
|
||||||
} satisfies RoleResult<CommitterMeta>;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (dry) {
|
|
||||||
return {
|
|
||||||
content: "[dry-run] skipped hermes committer",
|
|
||||||
meta: {
|
|
||||||
invoked: true,
|
|
||||||
success: true,
|
|
||||||
branch: "wf/dry-run",
|
|
||||||
commitHash: null,
|
|
||||||
pushed: null,
|
|
||||||
log: "[dry-run] skipped hermes committer",
|
|
||||||
error: null,
|
|
||||||
},
|
|
||||||
} satisfies RoleResult<CommitterMeta>;
|
|
||||||
}
|
|
||||||
|
|
||||||
const task = committerPrompt({
|
|
||||||
nerveRoot,
|
|
||||||
workflowName,
|
|
||||||
userPrompt: planner.userPrompt,
|
|
||||||
testerReason: tester.reason,
|
|
||||||
});
|
|
||||||
|
|
||||||
const committed = await runHermesCommitter(task, nerveRoot);
|
|
||||||
return {
|
|
||||||
content: committed.success
|
|
||||||
? committed.log
|
|
||||||
: `committer failed: ${committed.error ?? "unknown"}`,
|
|
||||||
meta: committed,
|
|
||||||
} satisfies RoleResult<CommitterMeta>;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@ -1,35 +0,0 @@
|
|||||||
export type CommitterPromptParams = {
|
|
||||||
nerveRoot: string;
|
|
||||||
workflowName: string;
|
|
||||||
userPrompt: string;
|
|
||||||
testerReason: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export function committerPrompt({
|
|
||||||
nerveRoot,
|
|
||||||
workflowName,
|
|
||||||
userPrompt,
|
|
||||||
testerReason,
|
|
||||||
}: CommitterPromptParams): string {
|
|
||||||
return `You are a git committer subagent for Nerve workflow generation.
|
|
||||||
Repository root: ${nerveRoot}
|
|
||||||
|
|
||||||
Goal:
|
|
||||||
- Commit and push generated workflow "${workflowName}".
|
|
||||||
- Handle dirty worktree safely (do not discard unrelated user edits).
|
|
||||||
- Detect default branch automatically.
|
|
||||||
- Create a focused branch for this workflow update.
|
|
||||||
- Stage only workflow files and required config updates.
|
|
||||||
|
|
||||||
Context:
|
|
||||||
- User prompt summary: ${userPrompt.slice(0, 500)}
|
|
||||||
- Tester result: ${testerReason}
|
|
||||||
|
|
||||||
Expected output format:
|
|
||||||
BRANCH=<branch-or-empty>
|
|
||||||
COMMIT=<hash-or-empty>
|
|
||||||
PUSHED=<true|false|unknown>
|
|
||||||
LOG_START
|
|
||||||
<details>
|
|
||||||
LOG_END`;
|
|
||||||
}
|
|
||||||
@ -1,142 +0,0 @@
|
|||||||
import { existsSync, readFileSync } from "node:fs";
|
|
||||||
import { join } from "node:path";
|
|
||||||
import type { Role, RoleResult } from "@uncaged/nerve-core";
|
|
||||||
import type { LlmProvider } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { isDryRun, llmExtract, nerveAgentContext, readNerveYaml } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { z } from "zod";
|
|
||||||
import { plannerPrompt } from "./prompt.js";
|
|
||||||
|
|
||||||
const roleSchema = z
|
|
||||||
.object({
|
|
||||||
name: z.string().default(""),
|
|
||||||
goal: z.string().default(""),
|
|
||||||
io: z.string().default(""),
|
|
||||||
})
|
|
||||||
.default({ name: "", goal: "", io: "" });
|
|
||||||
|
|
||||||
export const plannerMetaSchema = z.object({
|
|
||||||
userPrompt: z.string().default(""),
|
|
||||||
workflowName: z
|
|
||||||
.string()
|
|
||||||
.default("")
|
|
||||||
.describe("kebab-case workflow name under workflows/, e.g. issue-fixer"),
|
|
||||||
roles: z.array(roleSchema).default([]),
|
|
||||||
flowTransitions: z.preprocess((v) => (Array.isArray(v) ? v.join("\n") : v), z.string().default("")),
|
|
||||||
validationLoopsDesign: z.preprocess(
|
|
||||||
(v) => (Array.isArray(v) ? v.join("\n") : v),
|
|
||||||
z.string().default(""),
|
|
||||||
),
|
|
||||||
externalDeps: z.preprocess(
|
|
||||||
(v) => (Array.isArray(v) ? v.join(", ") : v),
|
|
||||||
z.string().default(""),
|
|
||||||
),
|
|
||||||
dataFlow: z.preprocess((v) => (Array.isArray(v) ? v.join("\n") : v), z.string().default("")),
|
|
||||||
planMarkdown: z.preprocess(
|
|
||||||
(v) => (Array.isArray(v) ? v.join("\n") : v),
|
|
||||||
z.string().default(""),
|
|
||||||
),
|
|
||||||
});
|
|
||||||
|
|
||||||
export type PlannerMeta = z.infer<typeof plannerMetaSchema>;
|
|
||||||
|
|
||||||
export type BuildPlannerDeps = {
|
|
||||||
provider: LlmProvider;
|
|
||||||
nerveRoot: string;
|
|
||||||
workflowsDir: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
function getNerveYaml(nerveRoot: string): string {
|
|
||||||
const result = readNerveYaml({ nerveRoot });
|
|
||||||
return result.ok ? result.value : "# nerve.yaml unavailable";
|
|
||||||
}
|
|
||||||
|
|
||||||
function getSenseGeneratorReference(workflowsDir: string): string {
|
|
||||||
const p = join(workflowsDir, "sense-generator", "index.ts");
|
|
||||||
if (!existsSync(p)) {
|
|
||||||
return "(missing workflows/sense-generator/index.ts)";
|
|
||||||
}
|
|
||||||
return readFileSync(p, "utf-8");
|
|
||||||
}
|
|
||||||
|
|
||||||
export function buildPlannerRole({
|
|
||||||
provider,
|
|
||||||
nerveRoot,
|
|
||||||
workflowsDir,
|
|
||||||
}: BuildPlannerDeps): Role<PlannerMeta> {
|
|
||||||
return async (start, _messages) => {
|
|
||||||
const dry = isDryRun(start);
|
|
||||||
const userPrompt = start.content;
|
|
||||||
|
|
||||||
const messages = plannerPrompt({
|
|
||||||
nerveAgentContext,
|
|
||||||
userPrompt,
|
|
||||||
nerveRoot,
|
|
||||||
workflowsDir,
|
|
||||||
senseGeneratorReference: getSenseGeneratorReference(workflowsDir),
|
|
||||||
nerveYaml: getNerveYaml(nerveRoot),
|
|
||||||
});
|
|
||||||
|
|
||||||
const extracted = await llmExtract({
|
|
||||||
text: messages.map((m) => m.content).join("\n"),
|
|
||||||
schema: plannerMetaSchema,
|
|
||||||
provider,
|
|
||||||
dryRun: dry,
|
|
||||||
});
|
|
||||||
|
|
||||||
const emptyMeta: PlannerMeta = {
|
|
||||||
userPrompt,
|
|
||||||
workflowName: "",
|
|
||||||
roles: [],
|
|
||||||
flowTransitions: "",
|
|
||||||
validationLoopsDesign: "",
|
|
||||||
externalDeps: "",
|
|
||||||
dataFlow: "",
|
|
||||||
planMarkdown: "",
|
|
||||||
};
|
|
||||||
|
|
||||||
if (!extracted.ok) {
|
|
||||||
return {
|
|
||||||
content: `[planner] llmExtract failed: ${JSON.stringify(extracted.error)}`,
|
|
||||||
meta: emptyMeta,
|
|
||||||
} satisfies RoleResult<PlannerMeta>;
|
|
||||||
}
|
|
||||||
|
|
||||||
const value = extracted.value;
|
|
||||||
const planMarkdown =
|
|
||||||
value.planMarkdown.length > 0
|
|
||||||
? value.planMarkdown
|
|
||||||
: [
|
|
||||||
`# Workflow Plan`,
|
|
||||||
`- workflowName: ${value.workflowName}`,
|
|
||||||
``,
|
|
||||||
`## Roles`,
|
|
||||||
...value.roles.map((r) => `- ${r.name}: ${r.goal} (${r.io})`),
|
|
||||||
``,
|
|
||||||
`## Flow Transitions`,
|
|
||||||
value.flowTransitions,
|
|
||||||
``,
|
|
||||||
`## Validation Loops`,
|
|
||||||
value.validationLoopsDesign,
|
|
||||||
``,
|
|
||||||
`## External Dependencies`,
|
|
||||||
value.externalDeps,
|
|
||||||
``,
|
|
||||||
`## Data Flow`,
|
|
||||||
value.dataFlow,
|
|
||||||
].join("\n");
|
|
||||||
|
|
||||||
return {
|
|
||||||
content: planMarkdown,
|
|
||||||
meta: {
|
|
||||||
userPrompt,
|
|
||||||
workflowName: value.workflowName,
|
|
||||||
roles: value.roles,
|
|
||||||
flowTransitions: value.flowTransitions,
|
|
||||||
validationLoopsDesign: value.validationLoopsDesign,
|
|
||||||
externalDeps: value.externalDeps,
|
|
||||||
dataFlow: value.dataFlow,
|
|
||||||
planMarkdown,
|
|
||||||
},
|
|
||||||
} satisfies RoleResult<PlannerMeta>;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@ -1,49 +0,0 @@
|
|||||||
import type { LlmMessage } from "@uncaged/nerve-workflow-utils";
|
|
||||||
|
|
||||||
export type PlannerPromptParams = {
|
|
||||||
nerveAgentContext: string;
|
|
||||||
userPrompt: string;
|
|
||||||
nerveRoot: string;
|
|
||||||
workflowsDir: string;
|
|
||||||
senseGeneratorReference: string;
|
|
||||||
nerveYaml: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export function plannerPrompt({
|
|
||||||
nerveAgentContext,
|
|
||||||
userPrompt,
|
|
||||||
nerveRoot,
|
|
||||||
workflowsDir,
|
|
||||||
senseGeneratorReference,
|
|
||||||
nerveYaml,
|
|
||||||
}: PlannerPromptParams): LlmMessage[] {
|
|
||||||
const content = `Design a Nerve workflow plan from this request.
|
|
||||||
|
|
||||||
${nerveAgentContext}
|
|
||||||
|
|
||||||
User request:
|
|
||||||
${userPrompt}
|
|
||||||
|
|
||||||
Target root: ${nerveRoot}
|
|
||||||
Workflow dir root: ${workflowsDir}
|
|
||||||
|
|
||||||
Reference structure:
|
|
||||||
\`\`\`ts
|
|
||||||
${senseGeneratorReference.slice(0, 18_000)}
|
|
||||||
\`\`\`
|
|
||||||
|
|
||||||
Current nerve.yaml:
|
|
||||||
\`\`\`yaml
|
|
||||||
${nerveYaml}
|
|
||||||
\`\`\`
|
|
||||||
|
|
||||||
Produce a complete markdown plan that includes:
|
|
||||||
- workflow name
|
|
||||||
- roles list
|
|
||||||
- flow/transitions
|
|
||||||
- validation loops design
|
|
||||||
- external deps
|
|
||||||
- data flow`;
|
|
||||||
|
|
||||||
return [{ role: "user", content }];
|
|
||||||
}
|
|
||||||
@ -1,153 +0,0 @@
|
|||||||
import type { Role, RoleResult, WorkflowMessage } from "@uncaged/nerve-core";
|
|
||||||
import type { SpawnError } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { cursorAgent, isDryRun } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { z } from "zod";
|
|
||||||
import type { CoderMeta } from "../coder/index.js";
|
|
||||||
import type { PlannerMeta } from "../planner/index.js";
|
|
||||||
import { testerPrompt } from "./prompt.js";
|
|
||||||
|
|
||||||
export const testerMetaSchema = z.object({
|
|
||||||
workflowName: z.string().default(""),
|
|
||||||
attempt: z.number().default(1),
|
|
||||||
passed: z.boolean().default(false),
|
|
||||||
dryRunLog: z.string().default(""),
|
|
||||||
reason: z.string().default(""),
|
|
||||||
});
|
|
||||||
|
|
||||||
export type TesterMeta = z.infer<typeof testerMetaSchema>;
|
|
||||||
|
|
||||||
export type BuildTesterDeps = {
|
|
||||||
nerveRoot: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
function formatSpawnFailure(error: SpawnError): string {
|
|
||||||
if (error.kind === "spawn_failed") {
|
|
||||||
return error.message;
|
|
||||||
}
|
|
||||||
if (error.kind === "timeout") {
|
|
||||||
return `timeout stdout=${error.stdout.slice(0, 300)} stderr=${error.stderr.slice(0, 300)}`;
|
|
||||||
}
|
|
||||||
return `exit ${error.exitCode} stderr=${error.stderr.slice(0, 500)}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
function lastMetaForRole<M>(messages: WorkflowMessage[], role: string): M | null {
|
|
||||||
for (let i = messages.length - 1; i >= 0; i--) {
|
|
||||||
if (messages[i].role === role) {
|
|
||||||
return messages[i].meta as M;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function buildTesterRole({ nerveRoot }: BuildTesterDeps): Role<TesterMeta> {
|
|
||||||
return async (start, messages) => {
|
|
||||||
const dry = isDryRun(start);
|
|
||||||
const plannerMeta = lastMetaForRole<PlannerMeta>(messages, "planner");
|
|
||||||
const coderMeta = lastMetaForRole<CoderMeta>(messages, "coder");
|
|
||||||
const attempt = messages.filter((m) => m.role === "tester").length + 1;
|
|
||||||
|
|
||||||
if (plannerMeta === null || coderMeta === null) {
|
|
||||||
return {
|
|
||||||
content: "tester cannot continue: missing planner/coder output",
|
|
||||||
meta: {
|
|
||||||
workflowName: "",
|
|
||||||
attempt,
|
|
||||||
passed: false,
|
|
||||||
dryRunLog: "",
|
|
||||||
reason: "missing planner/coder output",
|
|
||||||
},
|
|
||||||
} satisfies RoleResult<TesterMeta>;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!coderMeta.lintPassed || !coderMeta.buildPassed) {
|
|
||||||
return {
|
|
||||||
content: "tester blocked: coder has not passed lint+build",
|
|
||||||
meta: {
|
|
||||||
workflowName: coderMeta.workflowName,
|
|
||||||
attempt,
|
|
||||||
passed: false,
|
|
||||||
dryRunLog: `${coderMeta.lintLog}\n\n${coderMeta.buildLog}`,
|
|
||||||
reason: "coder did not pass lint+build",
|
|
||||||
},
|
|
||||||
} satisfies RoleResult<TesterMeta>;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (dry) {
|
|
||||||
return {
|
|
||||||
content: "PASS — dry-run mode",
|
|
||||||
meta: {
|
|
||||||
workflowName: coderMeta.workflowName,
|
|
||||||
attempt,
|
|
||||||
passed: true,
|
|
||||||
dryRunLog: "[dry-run] tester skipped external checks",
|
|
||||||
reason: "dry-run mode",
|
|
||||||
},
|
|
||||||
} satisfies RoleResult<TesterMeta>;
|
|
||||||
}
|
|
||||||
|
|
||||||
const prompt = testerPrompt({
|
|
||||||
workflowName: coderMeta.workflowName,
|
|
||||||
plannerSpec: {
|
|
||||||
roles: plannerMeta.roles,
|
|
||||||
flowTransitions: plannerMeta.flowTransitions,
|
|
||||||
validationLoopsDesign: plannerMeta.validationLoopsDesign,
|
|
||||||
externalDeps: plannerMeta.externalDeps,
|
|
||||||
dataFlow: plannerMeta.dataFlow,
|
|
||||||
},
|
|
||||||
coderOutput: coderMeta.cursorOutput,
|
|
||||||
nerveRoot,
|
|
||||||
});
|
|
||||||
|
|
||||||
const run = await cursorAgent({
|
|
||||||
prompt,
|
|
||||||
mode: "ask",
|
|
||||||
cwd: nerveRoot,
|
|
||||||
env: null,
|
|
||||||
timeoutMs: null,
|
|
||||||
dryRun: false,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!run.ok) {
|
|
||||||
return {
|
|
||||||
content: "tester agent failed",
|
|
||||||
meta: {
|
|
||||||
workflowName: coderMeta.workflowName,
|
|
||||||
attempt,
|
|
||||||
passed: false,
|
|
||||||
dryRunLog: "",
|
|
||||||
reason: `tester agent failed: ${formatSpawnFailure(run.error)}`,
|
|
||||||
},
|
|
||||||
} satisfies RoleResult<TesterMeta>;
|
|
||||||
}
|
|
||||||
|
|
||||||
const text = run.value.trim();
|
|
||||||
const pass = text.startsWith("PASS|");
|
|
||||||
const fail = text.startsWith("FAIL|");
|
|
||||||
if (!pass && !fail) {
|
|
||||||
return {
|
|
||||||
content: "tester format invalid",
|
|
||||||
meta: {
|
|
||||||
workflowName: coderMeta.workflowName,
|
|
||||||
attempt,
|
|
||||||
passed: false,
|
|
||||||
dryRunLog: text,
|
|
||||||
reason: "tester format invalid",
|
|
||||||
},
|
|
||||||
} satisfies RoleResult<TesterMeta>;
|
|
||||||
}
|
|
||||||
|
|
||||||
const parts = text.split("|");
|
|
||||||
const reason = parts[1] ?? "no reason";
|
|
||||||
const log = parts.slice(2).join("|").trim();
|
|
||||||
return {
|
|
||||||
content: `${pass ? "PASS" : "FAIL"} — ${reason}`,
|
|
||||||
meta: {
|
|
||||||
workflowName: coderMeta.workflowName,
|
|
||||||
attempt,
|
|
||||||
passed: pass,
|
|
||||||
dryRunLog: log,
|
|
||||||
reason,
|
|
||||||
},
|
|
||||||
} satisfies RoleResult<TesterMeta>;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@ -1,34 +0,0 @@
|
|||||||
export type TesterPromptParams = {
|
|
||||||
workflowName: string;
|
|
||||||
plannerSpec: object;
|
|
||||||
coderOutput: string;
|
|
||||||
nerveRoot: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export function testerPrompt({
|
|
||||||
workflowName,
|
|
||||||
plannerSpec,
|
|
||||||
coderOutput,
|
|
||||||
nerveRoot: _nerveRoot,
|
|
||||||
}: TesterPromptParams): string {
|
|
||||||
return `You are testing a generated Nerve workflow by doing a dry-run review.
|
|
||||||
|
|
||||||
Workflow: ${workflowName}
|
|
||||||
|
|
||||||
Planner specification:
|
|
||||||
${JSON.stringify(plannerSpec, null, 2)}
|
|
||||||
|
|
||||||
Coder output summary:
|
|
||||||
${coderOutput.slice(0, 6000)}
|
|
||||||
|
|
||||||
Required checks:
|
|
||||||
1) Verify role transitions are coherent and terminates to END.
|
|
||||||
2) Verify generated workflow adheres to planner intent.
|
|
||||||
3) Verify retry loops are explicit for recoverable failures.
|
|
||||||
4) Verify no obvious runtime-breaking issue in generated index.ts.
|
|
||||||
|
|
||||||
Return exactly:
|
|
||||||
PASS|<reason>|<compact markdown log>
|
|
||||||
or
|
|
||||||
FAIL|<reason>|<compact markdown log>`;
|
|
||||||
}
|
|
||||||
Loading…
x
Reference in New Issue
Block a user