Compare commits

...

71 Commits

Author SHA1 Message Date
a4625a4559 fix: restore CLI-triggered workflows, only remove restart-gateway
The previous commit incorrectly deleted all workflows. Only restart-gateway
should be removed (replaced by direct shell trigger). Other workflows
(solve-issue, extract-knowledge, develop-sense, develop-workflow) are
CLI-triggered and independent of sense coupling.
2026-05-02 13:55:27 +00:00
c71212a0ce refactor: sense triggers shell command directly, remove workflow
- SenseTrigger is now { command: string } — no workflow coupling
- Restart gateway via direct systemctl command instead of workflow
- Remove workflows/ directory and workflow config from nerve.yaml
2026-05-02 13:44:22 +00:00
8186a23ceb chore: remove unused schema and migrations 2026-05-02 09:38:22 +00:00
29d47bd9c4 feat: add restart-gateway workflow, remove unused senses
- Remove 4 data-only senses (linux-system-health, worker-process-metrics,
  hermes-session-message-stats, git-workspace-status) — none triggered workflows
- Refactor hermes-gateway-health sense: add state tracking, trigger
  restart-gateway workflow after 3 consecutive failures (with 5min cooldown)
- Add restart-gateway workflow: restarter role (systemctl restart) +
  verifier role (check service came back)
- Simplify nerve.yaml to single sense + single workflow
2026-05-02 05:38:44 +00:00
436ccf12b3 refactor(solve-issue): flatten role folders to single files
Also clean up leftover knowledge-extraction folder (renamed to extract-knowledge in dc1e96d).

Refs uncaged/nerve#284
2026-04-30 13:05:41 +00:00
2f78c72e4e refactor(solve-issue): flatten role folders to single files
Refs uncaged/nerve#284
2026-04-30 13:04:36 +00:00
dc1e96d8f3 refactor(workflows): rename knowledge-extraction to extract-knowledge
Align WorkflowDefinition name, nerve.yaml, role prompts, and lockfile path with extract-knowledge.

Refs #285

Made-with: Cursor
2026-04-30 13:04:29 +00:00
7432f80d61 refactor(knowledge-extraction): convert questioner and answerer to createRole four-tuple
- questioner: createRole(adapter, questionerPrompt, schema, extract) + queue short-circuit + meta post-processing
- answerer: createRole(adapter, answererPrompt, schema, extract) + empty-questions short-circuit
- build.ts: use createLlmAdapter(extract.provider) as default LLM adapter for questioner/answerer

Refs uncaged/nerve#277
2026-04-30 12:38:58 +00:00
1da41c7f08 chore: remove stale sense index.js from source and tracking
小橘 <xiaoju@shazhou.work>
2026-04-30 09:16:13 +00:00
07be0d3dfa refactor: move all build output to dist/
- senses build to dist/senses/<name>/index.js
- workflows build to dist/workflows/<name>/index.js
- scripts/build.mjs: clean dist/ before build, output to dist/
- .gitignore: simplified to just dist/

小橘 <xiaoju@shazhou.work>
2026-04-30 09:16:04 +00:00
0fdd2d26cc chore: remove build artifacts from git tracking
These are esbuild outputs, now covered by .gitignore.

小橘 <xiaoju@shazhou.work>
2026-04-30 09:11:16 +00:00
cf7e288874 chore: consolidate .gitignore into root
Remove per-workflow .gitignore files, add build output patterns to root.

小橘 <xiaoju@shazhou.work>
2026-04-30 09:06:22 +00:00
f7cf1a1cb2 refactor: single-package workspace with root esbuild build
Merge workflow and sense devDependencies into root, remove per-package package.json and workflow tsconfigs, add scripts/build.mjs for consistent outputs.

Fixes #22
2026-04-30 09:03:05 +00:00
e4fd5d6ba4 refactor: migrate all workflows to RFC-005 ThreadContext signatures
- Role: (start, messages) → (ctx: ThreadContext)
- AgentFn prompt callbacks: (start) → (ctx)
- ModeratorContext → ThreadContext
- 13 files updated across knowledge-extraction and solve-issue workflows

小橘 <xiaoju@shazhou.work>
2026-04-30 08:39:52 +00:00
1c512435de feat: add knowledge-extraction BFS workflow
Three-role workflow (questioner → answerer → explorer) that iterates
over .knowledge/ cards to discover and fill knowledge gaps via BFS.

- questioner: createLlmRole, reads card, asks 3 technical questions
- answerer: spawnSafe nerve knowledge query, judges answers
- explorer: reads code, writes/patches .knowledge cards, runs sync
- moderator: BFS queue from message history, stagnation rule

Closes #266
2026-04-30 02:27:10 +00:00
8774d71d57 feat: update senses to return ComputeResult<T>
Wrap compute return values in { signal, workflow: null }
to match new SenseComputeFn contract.

— 小橘 🍊(NEKO Team)
2026-04-30 00:38:34 +00:00
1d9e574c94 fix: remove unused AbortSignal param from sense compute
— 小橘 🍊(NEKO Team)
2026-04-30 00:23:12 +00:00
252162ea8e refactor: pure sense compute — return data instead of db.insert
All 5 senses updated to new API:
- compute(signal: AbortSignal) => Promise<T | null>
- Export table for runtime-side insert
- Remove drizzle-orm/libsql imports

Refs uncaged/nerve#264

— 小橘 🍊(NEKO Team)
2026-04-30 00:15:03 +00:00
60979aaa6a refactor: migrate develop-sense/develop-workflow to @uncaged/nerve-workflow-meta
Delete local roles, moderator, and build files. Workflow index.ts
now imports factory from package and wires adapters/extract/cwd.

Closes #21

— 小橘 🍊(NEKO Team)
2026-04-29 14:52:25 +00:00
b0cff7e0ed refactor: update reviewer calls to use ReviewerConfig object
API changed from (adapter, extract, cwd) to (adapter, extract, { cwd, ... })

— 小橘 🍊(NEKO Team)
2026-04-29 14:32:23 +00:00
b282dfdb7b Merge pull request 'refactor: use @uncaged/nerve-role-reviewer package' (#20) from feat/use-role-reviewer-package into master 2026-04-29 14:27:36 +00:00
bed5ecb56b refactor: use @uncaged/nerve-role-reviewer package
Replace inline reviewer role with import from package.
Both develop-sense and develop-workflow now share the same code.

— 小橘 🍊(NEKO Team)
2026-04-29 14:26:27 +00:00
6a2dbb7335 Merge pull request 'refactor: use @uncaged/nerve-role-committer package' (#19) from feat/use-role-committer-package into master 2026-04-29 14:20:02 +00:00
174df68368 refactor: use @uncaged/nerve-role-committer package, delete _shared/
- develop-sense/develop-workflow committer → re-export from package
- solve-issue committer → uses decorateRole chain (custom prompt stays)
- Delete _shared/workspace-committer.ts and _shared/ directory

RFC-004 Phase 1 complete
2026-04-29 14:18:12 +00:00
59b8f033ba Merge pull request 'refactor: simplify workspace committer — infer from thread' (#18) from refactor/17-simplify-committer into master 2026-04-29 12:53:34 +00:00
0a9da468f7 refactor: simplify workspace committer — agent infers context from thread
Remove nerveRoot, workflowName, conventionalCommitScopeHint, branchCheckoutExample params.
Signature: createWorkspaceCommitterRole(adapter, extract)
Agent reads thread history to decide branch name, scope, and commit message.

Closes #17
2026-04-29 12:52:56 +00:00
ac47daa42b Merge pull request 'refactor: decouple adapters from workflow factories + createXxxRole' (#16) from refactor/15-decouple-adapters into master 2026-04-29 12:44:18 +00:00
a609dc2486 refactor: derive adapter keys from Meta type instead of manual union 2026-04-29 12:41:10 +00:00
eaddd88109 refactor: add defaultAdapter + typed role union, adapters becomes Partial
- Each workflow factory takes defaultAdapter: AgentFn + adapters?: Partial<Record<RoleUnion, AgentFn>>
- index.ts only overrides roles that differ from default (planner/coder use cursor, rest fallback)
- Cleaner call sites, type-safe role names

Refs #15
2026-04-29 12:38:21 +00:00
1683e41b05 refactor: decouple adapters from workflow factories, roles export createXxxRole
- Rename build* → create* workflow factories
- Workflow factories accept adapters: Record<string, AgentFn>
- Each role file exports createXxxRole(adapter, ...) factory
- _shared/workspace-committer accepts adapter as first param
- All adapter imports moved to index.ts (injection point)
- solve-issue roles also updated

Closes #15
2026-04-29 12:35:07 +00:00
a506e5b36b Merge pull request 'refactor: flatten role folders into single .ts files' (#14) from refactor/13-flatten-role-folders into master 2026-04-29 12:23:34 +00:00
42f943c303 refactor: flatten role folders into single .ts files
Each role's index.ts + prompt.ts merged into a single <role>.ts file.
Committer stays as re-export from _shared.
Import paths updated in build.ts and moderator.ts.

Closes #13
2026-04-29 12:21:41 +00:00
215a8f6566 fix(committer): forbid --author in shared workspace committer prompt 2026-04-29 11:15:27 +00:00
f6e29a5cae Merge chore/10-dedup-committer: dedup workspace committer into _shared 2026-04-29 11:15:10 +00:00
f651389ad8 chore(workflows): deduplicate workspace committer role
Extract shared buildWorkspaceCommitterRole into workflows/_shared/workspace-committer.ts
with parameterized conventionalCommitScopeHint and branchCheckoutExample.

Both develop-sense and develop-workflow committer/index.ts now re-export from the
shared module. Duplicate prompt.ts files removed.

Fixes #10
2026-04-29 11:11:51 +00:00
85fac3158d Merge refactor/9-committer-agent-workflow: committer agent + forbid --author 2026-04-29 11:09:12 +00:00
16bea3b8a7 fix(committer): forbid --author flag, use repo git config identity 2026-04-29 11:09:01 +00:00
03146b210a refactor(solve-issue): align committer/publish prompts and docs with agent role workflow
- publish/prompt.ts: require 'Fixes #N' in Ref section to auto-close issues
- CONVENTIONS.md: update Role Patterns table (committer uses createRole hermesAdapter), fix Meta Convention (committed not success)
- committer/prompt.ts: add defaultBranch guard before branch creation to prevent empty PR diffs
- implement/prompt.ts: strengthen git commit prohibition

Refs #9
2026-04-29 11:06:05 +00:00
小橘 🍊(NEKO Team)
c585e0d8a8 Refactor committer into Hermes agent with branch/commit/push workflow
Add solve-issue committer after implement; replace develop-sense and develop-workflow script roles with createRole(hermesAdapter). Implement prompt no longer does git; publish prompt asks for meaningful PR titles.

Refs #9

Made-with: Cursor
2026-04-29 10:44:28 +00:00
3a2b8a49a3 fix: add nerve-daemon back to workspace deps (needed by CLI thread commands)
小橘 🍊(NEKO Team)
2026-04-29 10:11:12 +00:00
aef9943746 fix(solve-issue): prepare supports local repo path from trigger prompt
If the trigger prompt specifies a local repo path (--repo /path or absolute path),
prepare validates it instead of cloning to ~/Code/. Enables running solve-issue
on repos outside the default ~/Code directory.

小橘 🍊(NEKO Team)
2026-04-29 10:09:34 +00:00
小橘 🍊(NEKO Team)
95df8bc3c2 refactor(workflows): use createRole instead of compileWorkflowSpec
Switch build.ts and solve-issue inner roles to @uncaged/nerve-workflow-utils createRole with LlmExtractorConfig. Remove @uncaged/nerve-daemon from workspace dependencies; keep override for linking. Planner uses createCursorAdapter ask mode; dynamic cwd via start.meta.workdir.

Made-with: Cursor
2026-04-29 10:01:02 +00:00
小橘 🍊(NEKO Team)
70fd064bad Refactor workflows to use compileWorkflowSpec from nerve-daemon
Remove workflows/_shared; wire createLlmExtractFn, zodMeta, and createCursorAdapter(mode ask). Plan/implement/publish compile inner specs via daemon.

Made-with: Cursor
2026-04-29 09:35:13 +00:00
56ce22fb1b Migrate workflows to WorkflowSpec-style roles (RFC-003)
Replace createCursorRole/createHermesRole with adapter + prompt + zod meta.

Add shared compileRoleSpec, cursor ask adapter, nerve.yaml extract defaults.

Refs #248

Made-with: Cursor
2026-04-29 09:23:55 +00:00
66ce30cdfb rename: generate-workflow → develop-workflow, generate-sense → develop-sense
小橘 🍊(NEKO Team)
2026-04-29 00:20:01 +00:00
28ac2e9dad chore: change workflow overflow from drop to queue
小橘 🍊(NEKO Team)
2026-04-29 00:13:14 +00:00
86f02da306 refactor: rename workflows to verb phrases, delete gitea-issue-solver
- workflow-generator → generate-workflow
- sense-generator → generate-sense
- Delete gitea-issue-solver (replaced by solve-issue)

小橘 🍊(NEKO Team)
2026-04-28 23:17:26 +00:00
7313111548 chore: remove nerve.db from tracking, add *.db to gitignore
小橘 🍊(NEKO Team)
2026-04-28 23:08:00 +00:00
64a5fc5301 chore(workflow): auto-generated commit 2026-04-28 22:46:19 +00:00
d786827ac8 chore(workflow): auto-generated commit 2026-04-28 22:37:27 +00:00
d6e95f5c65 chore(workflow): auto-generated commit 2026-04-28 22:24:20 +00:00
95587260f6 chore(workflow): auto-generated commit 2026-04-28 16:02:16 +00:00
57c740cdde Revert "chore(workflow): auto-generated commit"
This reverts commit 75f2768a8c7713879bb2ab564f42f24bc609338e.
2026-04-28 15:49:22 +00:00
75f2768a8c chore(workflow): auto-generated commit 2026-04-28 14:46:13 +00:00
3d9f239230 refactor: moderator uses dual limits — max coder rounds (20) + max total rejections (10)
Either limit triggers END. Simple, no per-rejector budgets.
2026-04-28 14:27:40 +00:00
bbcaf1eba5 refactor: moderator uses per-rejector limits instead of shared coderCount
- coder self-iterations (done=false): max 5
- reviewer rejections: max 3
- tester rejections: max 3
- committer rejections: max 2
Each budget is independent, no longer starved by coder's own passes.
2026-04-28 14:22:04 +00:00
fbcc1ff30c feat: add CONVENTIONS.md, reviewer references it instead of hardcoding rules
- CONVENTIONS.md covers: language paradigm, naming, error handling,
  workflow/sense structure, role patterns, meta convention, git rules
- Reviewer prompt now: cat CONVENTIONS.md + check diff against it
- Single source of truth for all roles
2026-04-28 14:19:20 +00:00
76760c4d29 fix: reviewer — remove 'missing files from planner' check, planner designs roles not files 2026-04-28 14:16:23 +00:00
18e201b49c fix: reviewer prompt — reject or approve, no middle ground 2026-04-28 14:13:33 +00:00
daf07b5746 feat: add reviewer role to all three workflows
- workflow-generator, sense-generator, gitea-issue-solver all now have:
  planner → coder → reviewer → tester → committer → END
- Reviewer uses createHermesRole with git diff/status for static analysis
- Checks: garbage files, secrets, debug code, unrelated changes
- Planner prompt now requires Role Behavior sections for every role
- Coder prompt now emphasizes reading initial user prompt for specifics
2026-04-28 13:56:37 +00:00
bd89dcaff6 chore(workflow): auto-generated commit 2026-04-28 13:33:14 +00:00
994de1e7ff chore(workflow): auto-generated commit 2026-04-28 13:20:29 +00:00
e8765abac6 fix: pass nerveRoot to tester prompts for correct path resolution
Hermes agent cwd is not necessarily the nerve workspace root.
Inject nerveRoot into tester prompts so all file paths and commands
use absolute paths to the workspace directory.
2026-04-28 13:03:31 +00:00
ef7d83ad0a chore(sense): auto-generated commit 2026-04-28 13:02:12 +00:00
495d8d1b60 chore(workflow): auto-generated commit 2026-04-28 12:55:08 +00:00
0fab8a68c3 chore: declare pnpm workspace, remove per-package lockfiles and pnpm config
- Add pnpm-workspace.yaml (senses/*, workflows/*)
- Add root build script: pnpm -r build
- Remove pnpm.onlyBuiltDependencies from sense package.json
- Remove pnpm.overrides from workflow package.json
- Remove per-package pnpm-lock.yaml and node_modules
2026-04-28 11:11:41 +00:00
7fb161cf96 fix: tester prompts — build before list, remove non-existent dry-run command
- Both workflow/sense tester: build first, then validate, then list
- workflow-generator: remove nerve workflow dry-run (doesn't exist)
- sense-generator: add build step before sense list/trigger
2026-04-28 11:04:31 +00:00
6778ba5246 refactor: clarify coder done semantics + allow multi-step iterations
- done=true means all files created, build+lint pass
- done=false means progress made, continue next iteration
- Updated both sense-generator and workflow-generator coder prompts
2026-04-28 11:02:25 +00:00
d638623456 refactor(workflow-generator): simplify meta to routing booleans + log-to-file
- planner: { ready }, coder: { done }, tester: { passed }, committer: { success }
- planner/coder: createCursorRole, tester: createHermesRole
- committer: direct spawn, output to .log file
- moderator: coder loop (max 5), committer fail → coder
- bundle 24kb → 8.7kb

Fixes #5
2026-04-28 10:22:57 +00:00
bf77e3452a chore: gitignore dist/ for workflow-generator 2026-04-28 08:50:47 +00:00
a469f30b42 refactor(workflow-generator): multi-file DIP + Role Factory + esbuild bundle
- Split 500-line monolith into roles/{planner,coder,tester,committer}/
- Each role: index.ts (build function) + prompt.ts (pure function)
- Use createCursorRole/createLlmRole/createHermesRole factories
- DIP: env vars read in index.ts, injected via build.ts
- esbuild bundle to dist/index.js (24kb)
- Moderator logic preserved: planner→coder→tester→committer with retries

Fixes xiaoju/nerve-workspace#3
2026-04-28 08:48:23 +00:00
77 changed files with 1986 additions and 5112 deletions

2
.gitignore vendored
View File

@ -4,3 +4,5 @@ logs/
nerve.pid
nerve.sock
false/
*.db
dist/

154
CONVENTIONS.md Normal file
View File

@ -0,0 +1,154 @@
# Nerve Workspace Conventions
This document defines coding and workflow conventions for the nerve-workspace (`~/.uncaged-nerve`).
All roles (planner, coder, reviewer, tester) should reference this file.
## Language & Paradigm
### Functional-first
Use `function` + `type`, not `class` + `interface`.
```typescript
// ✅ Good
type Signal = { senseId: string; value: unknown; ts: number };
function createSignal(senseId: string, value: unknown): Signal { ... }
// ❌ Bad
class Signal implements ISignal { ... }
```
### Rules
| Rule | Description |
|------|-------------|
| `type` over `interface` | All type definitions use `type` |
| `function` over `class` | Pure functions + closures, no class |
| No `this` | Functions must not depend on `this` context |
| No inheritance | No `extends`, `implements`, `abstract` |
| Composition over inheritance | Use function composition |
| No optional properties | Use `T \| null` instead of `?:` |
| No dynamic `import()` | Always static top-level `import` |
| `async/await` only | Never `.then()` chains |
### Exceptions
Classes allowed when required by a library (e.g. Drizzle `sqliteTable`) or Error subclasses.
## Naming
| Type | Style | Example |
|------|-------|---------|
| Files | kebab-case | `signal-bus.ts` |
| Types | PascalCase | `SignalBus` |
| Functions/variables | camelCase | `createSignalBus` |
| Constants | UPPER_SNAKE | `MAX_RETRY_COUNT` |
## Error Handling
- Use `Result<T, E>` for expected failures
- `throw` only for unrecoverable bugs
- No try-catch for flow control
```typescript
type Result<T, E = Error> = { ok: true; value: T } | { ok: false; error: E };
```
## Workflow Structure
Each workflow follows the multi-file pattern:
```
workflows/<name>/
index.ts — WorkflowDefinition default export (thin entry point)
build.ts — factory function with dependency injection
moderator.ts — moderator function + WorkflowMeta type
roles/
<role>/
index.ts — build function + meta schema
prompt.ts — prompt pure function (string template)
package.json — with esbuild build script
tsconfig.json
```
### Role Implementation Patterns
| Pattern | When to use | Example |
|---------|-------------|---------|
| `createCursorRole` | Needs file system access (code generation, planning) | planner, coder |
| `createHermesRole` | Needs shell + tools (testing, reviewing) | tester, reviewer |
| `createLlmRole` | Pure LLM reasoning, no tools | analysis roles |
| `createRole(hermesAdapter, …)` | Agent role with LLM + shell (branch/commit/push from thread context) | solve-issue committer, publish |
| Direct `Role<Meta>` | No LLM needed, scripted logic | thin wrappers only |
### Meta Convention
Meta is a **routing signal only** — one boolean per role:
- `{ ready: boolean }` — planner
- `{ done: boolean }` — coder
- `{ approved: boolean }` — reviewer
- `{ passed: boolean }` — tester
- `{ committed: boolean }` — committer (solve-issue: branch created, pushed)
- `{ success: boolean }` — publish (PR opened)
### Standard Flow
```
planner → coder → reviewer → tester → committer → END
```
- Reviewer rejection → back to coder (within MAX_CODER_ITERATIONS)
- Tester failure → back to coder (within MAX_CODER_ITERATIONS)
- Committer failure → back to coder (within MAX_CODER_ITERATIONS)
## Sense Structure
```
senses/<name>/
src/
index.ts — compute() function + schema
schema.ts — Drizzle table definition
migrations/ — SQLite migrations
package.json — with esbuild build script
```
## Toolchain
| Tool | Purpose |
|------|---------|
| **pnpm** | Package manager (workspace mode) |
| **TypeScript** | Type checking |
| **esbuild** | Bundling (each workflow/sense bundles independently) |
### Commands
```bash
pnpm build # build all packages
pnpm -r build # same, explicit recursive
cd workflows/<name> && pnpm build # build one workflow
```
## Git & Commit Convention
```
<type>(<scope>): <description>
type: feat | fix | refactor | docs | chore | test
scope: workflow | sense | core | ...
```
### What NOT to commit
- `node_modules/`
- `dist/` (build outputs, generated by esbuild)
- `.DS_Store`
- pnpm cache artifacts (e.g. `false/` directories from `--no-cache` misuse)
- Secrets, API keys, tokens
- Unrelated file changes outside the task scope
## Dependencies
Shared packages from the nerve monorepo:
- `@uncaged/nerve-core` — types, END constant, WorkflowDefinition
- `@uncaged/nerve-workflow-utils` — role factories, spawnSafe, llmExtract, cursorAgent
- `zod` — schema definitions for meta extraction

View File

View File

@ -1,33 +1,26 @@
# nerve.yaml — Nerve workspace configuration
extract:
provider: dashscope
model: qwen-plus
senses:
linux-system-health:
group: system
interval: 30s
throttle: 10s
timeout: 15s
hermes-gateway-health:
group: system
interval: 2m
throttle: 30s
timeout: 30s
hermes-session-message-stats:
group: hermes
interval: 15m
throttle: 30s
timeout: 60s
worker-process-metrics:
group: system
interval: 1m
throttle: 15s
timeout: 5s
workflows:
sense-generator:
develop-sense:
concurrency: 1
overflow: drop
workflow-generator:
overflow: queue
develop-workflow:
concurrency: 1
overflow: drop
gitea-issue-solver:
overflow: queue
solve-issue:
concurrency: 1
overflow: drop
overflow: queue
extract-knowledge:
concurrency: 1
overflow: queue

View File

@ -3,24 +3,39 @@
"version": "0.0.1",
"private": true,
"type": "module",
"scripts": {
"build": "node scripts/build.mjs"
},
"dependencies": {
"@uncaged/nerve-adapter-cursor": "link:../repos/nerve/packages/adapter-cursor",
"@uncaged/nerve-adapter-hermes": "link:../repos/nerve/packages/adapter-hermes",
"@uncaged/nerve-core": "latest",
"@uncaged/nerve-daemon": "latest",
"@uncaged/nerve-daemon": "link:../repos/nerve/packages/daemon",
"@uncaged/nerve-role-committer": "link:../repos/nerve/packages/role-committer",
"@uncaged/nerve-role-reviewer": "link:../repos/nerve/packages/role-reviewer",
"@uncaged/nerve-workflow-meta": "link:../repos/nerve/packages/workflow-meta",
"@uncaged/nerve-workflow-utils": "link:../repos/nerve/packages/workflow-utils",
"drizzle-orm": "latest",
"zod": "^4.3.6"
},
"devDependencies": {
"drizzle-kit": "latest"
"@types/node": "^22.0.0",
"drizzle-kit": "latest",
"esbuild": "^0.27.0",
"typescript": "^5.7.0"
},
"pnpm": {
"onlyBuiltDependencies": [
"esbuild"
],
"overrides": {
"@uncaged/nerve-adapter-cursor": "link:../repos/nerve/packages/adapter-cursor",
"@uncaged/nerve-adapter-hermes": "link:../repos/nerve/packages/adapter-hermes",
"@uncaged/nerve-daemon": "link:../repos/nerve/packages/daemon",
"@uncaged/nerve-core": "link:../repos/nerve/packages/core",
"@uncaged/nerve-workflow-utils": "link:../repos/nerve/packages/workflow-utils"
"@uncaged/nerve-workflow-utils": "link:../repos/nerve/packages/workflow-utils",
"@uncaged/nerve-role-committer": "link:../repos/nerve/packages/role-committer",
"@uncaged/nerve-workflow-meta": "link:../repos/nerve/packages/workflow-meta"
}
}
}

225
pnpm-lock.yaml generated
View File

@ -5,20 +5,39 @@ settings:
excludeLinksFromLockfile: false
overrides:
'@uncaged/nerve-adapter-cursor': link:../repos/nerve/packages/adapter-cursor
'@uncaged/nerve-adapter-hermes': link:../repos/nerve/packages/adapter-hermes
'@uncaged/nerve-daemon': link:../repos/nerve/packages/daemon
'@uncaged/nerve-core': link:../repos/nerve/packages/core
'@uncaged/nerve-workflow-utils': link:../repos/nerve/packages/workflow-utils
'@uncaged/nerve-role-committer': link:../repos/nerve/packages/role-committer
'@uncaged/nerve-workflow-meta': link:../repos/nerve/packages/workflow-meta
importers:
.:
dependencies:
'@uncaged/nerve-adapter-cursor':
specifier: link:../repos/nerve/packages/adapter-cursor
version: link:../repos/nerve/packages/adapter-cursor
'@uncaged/nerve-adapter-hermes':
specifier: link:../repos/nerve/packages/adapter-hermes
version: link:../repos/nerve/packages/adapter-hermes
'@uncaged/nerve-core':
specifier: link:../repos/nerve/packages/core
version: link:../repos/nerve/packages/core
'@uncaged/nerve-daemon':
specifier: link:../repos/nerve/packages/daemon
version: link:../repos/nerve/packages/daemon
'@uncaged/nerve-role-committer':
specifier: link:../repos/nerve/packages/role-committer
version: link:../repos/nerve/packages/role-committer
'@uncaged/nerve-role-reviewer':
specifier: link:../repos/nerve/packages/role-reviewer
version: link:../repos/nerve/packages/role-reviewer
'@uncaged/nerve-workflow-meta':
specifier: link:../repos/nerve/packages/workflow-meta
version: link:../repos/nerve/packages/workflow-meta
'@uncaged/nerve-workflow-utils':
specifier: link:../repos/nerve/packages/workflow-utils
version: link:../repos/nerve/packages/workflow-utils
@ -29,9 +48,196 @@ importers:
specifier: ^4.3.6
version: 4.3.6
devDependencies:
'@types/node':
specifier: ^22.0.0
version: 22.19.17
drizzle-kit:
specifier: latest
version: 0.31.10
esbuild:
specifier: ^0.27.0
version: 0.27.7
typescript:
specifier: ^5.7.0
version: 5.9.3
senses/git-workspace-status:
devDependencies:
'@types/node':
specifier: ^22.0.0
version: 22.19.17
esbuild:
specifier: ^0.27.0
version: 0.27.7
typescript:
specifier: ^5.7.0
version: 5.9.3
senses/hermes-gateway-health:
devDependencies:
'@types/node':
specifier: ^22.0.0
version: 22.19.17
esbuild:
specifier: ^0.27.0
version: 0.27.7
typescript:
specifier: ^5.7.0
version: 5.9.3
senses/hermes-session-message-stats:
devDependencies:
'@types/node':
specifier: ^22.0.0
version: 22.19.17
esbuild:
specifier: ^0.27.0
version: 0.27.7
typescript:
specifier: ^5.7.0
version: 5.9.3
senses/linux-system-health:
devDependencies:
'@types/node':
specifier: ^22.0.0
version: 22.19.17
esbuild:
specifier: ^0.27.0
version: 0.27.7
typescript:
specifier: ^5.7.0
version: 5.9.3
senses/worker-process-metrics:
devDependencies:
'@types/node':
specifier: ^22.0.0
version: 22.19.17
esbuild:
specifier: ^0.27.0
version: 0.27.7
typescript:
specifier: ^5.7.0
version: 5.9.3
workflows/develop-sense:
dependencies:
'@uncaged/nerve-adapter-cursor':
specifier: link:../../../repos/nerve/packages/adapter-cursor
version: link:../../../repos/nerve/packages/adapter-cursor
'@uncaged/nerve-adapter-hermes':
specifier: link:../../../repos/nerve/packages/adapter-hermes
version: link:../../../repos/nerve/packages/adapter-hermes
'@uncaged/nerve-core':
specifier: link:../../../repos/nerve/packages/core
version: link:../../../repos/nerve/packages/core
'@uncaged/nerve-workflow-meta':
specifier: link:../../../repos/nerve/packages/workflow-meta
version: link:../../../repos/nerve/packages/workflow-meta
'@uncaged/nerve-workflow-utils':
specifier: link:../../../repos/nerve/packages/workflow-utils
version: link:../../../repos/nerve/packages/workflow-utils
zod:
specifier: ^4.3.6
version: 4.3.6
devDependencies:
'@types/node':
specifier: ^22.0.0
version: 22.19.17
esbuild:
specifier: ^0.27.0
version: 0.27.7
typescript:
specifier: ^5.7.0
version: 5.9.3
workflows/develop-workflow:
dependencies:
'@uncaged/nerve-adapter-cursor':
specifier: link:../../../repos/nerve/packages/adapter-cursor
version: link:../../../repos/nerve/packages/adapter-cursor
'@uncaged/nerve-adapter-hermes':
specifier: link:../../../repos/nerve/packages/adapter-hermes
version: link:../../../repos/nerve/packages/adapter-hermes
'@uncaged/nerve-core':
specifier: link:../../../repos/nerve/packages/core
version: link:../../../repos/nerve/packages/core
'@uncaged/nerve-workflow-meta':
specifier: link:../../../repos/nerve/packages/workflow-meta
version: link:../../../repos/nerve/packages/workflow-meta
'@uncaged/nerve-workflow-utils':
specifier: link:../../../repos/nerve/packages/workflow-utils
version: link:../../../repos/nerve/packages/workflow-utils
zod:
specifier: ^4.3.6
version: 4.3.6
devDependencies:
'@types/node':
specifier: ^22.0.0
version: 22.19.17
esbuild:
specifier: ^0.27.0
version: 0.27.7
typescript:
specifier: ^5.7.0
version: 5.9.3
workflows/extract-knowledge:
dependencies:
'@uncaged/nerve-adapter-cursor':
specifier: link:../../../repos/nerve/packages/adapter-cursor
version: link:../../../repos/nerve/packages/adapter-cursor
'@uncaged/nerve-adapter-hermes':
specifier: link:../../../repos/nerve/packages/adapter-hermes
version: link:../../../repos/nerve/packages/adapter-hermes
'@uncaged/nerve-core':
specifier: link:../../../repos/nerve/packages/core
version: link:../../../repos/nerve/packages/core
'@uncaged/nerve-workflow-utils':
specifier: link:../../../repos/nerve/packages/workflow-utils
version: link:../../../repos/nerve/packages/workflow-utils
zod:
specifier: ^4.3.6
version: 4.3.6
devDependencies:
'@types/node':
specifier: ^22.0.0
version: 22.19.17
esbuild:
specifier: ^0.27.0
version: 0.27.7
typescript:
specifier: ^5.7.0
version: 5.9.3
workflows/solve-issue:
dependencies:
'@uncaged/nerve-adapter-cursor':
specifier: link:../../../repos/nerve/packages/adapter-cursor
version: link:../../../repos/nerve/packages/adapter-cursor
'@uncaged/nerve-adapter-hermes':
specifier: link:../../../repos/nerve/packages/adapter-hermes
version: link:../../../repos/nerve/packages/adapter-hermes
'@uncaged/nerve-core':
specifier: link:../../../repos/nerve/packages/core
version: link:../../../repos/nerve/packages/core
'@uncaged/nerve-workflow-utils':
specifier: link:../../../repos/nerve/packages/workflow-utils
version: link:../../../repos/nerve/packages/workflow-utils
zod:
specifier: ^4.3.6
version: 4.3.6
devDependencies:
'@types/node':
specifier: ^22.0.0
version: 22.19.17
esbuild:
specifier: ^0.27.0
version: 0.27.7
typescript:
specifier: ^5.7.0
version: 5.9.3
packages:
@ -490,6 +696,9 @@ packages:
cpu: [x64]
os: [win32]
'@types/node@22.19.17':
resolution: {integrity: sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==}
base64-js@1.5.1:
resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==}
@ -750,6 +959,14 @@ packages:
tunnel-agent@0.6.0:
resolution: {integrity: sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==}
typescript@5.9.3:
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
engines: {node: '>=14.17'}
hasBin: true
undici-types@6.21.0:
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
util-deprecate@1.0.2:
resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==}
@ -995,6 +1212,10 @@ snapshots:
'@esbuild/win32-x64@0.27.7':
optional: true
'@types/node@22.19.17':
dependencies:
undici-types: 6.21.0
base64-js@1.5.1:
optional: true
@ -1286,6 +1507,10 @@ snapshots:
safe-buffer: 5.2.1
optional: true
typescript@5.9.3: {}
undici-types@6.21.0: {}
util-deprecate@1.0.2:
optional: true

46
scripts/build.mjs Normal file
View File

@ -0,0 +1,46 @@
import * as esbuild from "esbuild";
import fs from "node:fs";
import path from "node:path";
import { fileURLToPath } from "node:url";
const root = path.join(path.dirname(fileURLToPath(import.meta.url)), "..");
const dist = path.join(root, "dist");
const opts = {
bundle: true,
platform: "node",
format: "esm",
packages: "external",
};
function listDirs(dir) {
if (!fs.existsSync(dir)) return [];
return fs
.readdirSync(dir)
.filter((name) => !name.startsWith(".") && !name.startsWith("_"))
.map((name) => ({ name, full: path.join(dir, name) }))
.filter(({ full }) => fs.statSync(full).isDirectory());
}
async function main() {
// Clean dist/
fs.rmSync(dist, { recursive: true, force: true });
for (const { name, full } of listDirs(path.join(root, "senses"))) {
const entry = path.join(full, "src", "index.ts");
if (!fs.existsSync(entry)) continue;
const outfile = path.join(dist, "senses", name, "index.js");
fs.mkdirSync(path.dirname(outfile), { recursive: true });
await esbuild.build({ ...opts, entryPoints: [entry], outfile });
}
for (const { name, full } of listDirs(path.join(root, "workflows"))) {
const entry = path.join(full, "index.ts");
if (!fs.existsSync(entry)) continue;
const outfile = path.join(dist, "workflows", name, "index.js");
fs.mkdirSync(path.dirname(outfile), { recursive: true });
await esbuild.build({ ...opts, entryPoints: [entry], outfile });
}
}
await main();

View File

@ -1,374 +0,0 @@
// src/index.ts
import { execFile } from "node:child_process";
// src/schema.ts
import { integer, real, sqliteTable, text } from "drizzle-orm/sqlite-core";
var hermesGatewayHealth = sqliteTable("hermes_gateway_health", {
id: integer("id").primaryKey({ autoIncrement: true }),
ts: integer("ts").notNull(),
alive: integer("alive").notNull(),
mainPid: integer("main_pid").notNull(),
rssBytes: integer("rss_bytes").notNull(),
cpuPercent: real("cpu_percent").notNull(),
uptimeSec: integer("uptime_sec").notNull(),
activeSessions: integer("active_sessions").notNull(),
childProcessCount: integer("child_process_count").notNull(),
httpOk: integer("http_ok").notNull(),
httpStatusCode: integer("http_status_code").notNull(),
httpLatencyMs: integer("http_latency_ms").notNull(),
httpError: text("http_error").notNull()
});
// src/index.ts
var EXEC_TIMEOUT_MS = 25e3;
var HTTP_TIMEOUT_MS = Math.min(23e3, EXEC_TIMEOUT_MS - 2e3);
var HTTP_ERROR_MAX_LEN = 256;
function gatewayProbeUrl() {
const u = process.env.HERMES_GATEWAY_HEALTH_URL ?? process.env.NERVE_HERMES_GATEWAY_URL ?? "";
return String(u).trim();
}
function truncateHttpError(err) {
const raw = err && typeof err === "object" && "code" in err && err.code ? String(err.code) : String(err?.message ?? err ?? "error");
const s = raw.trim() || "error";
return s.length > HTTP_ERROR_MAX_LEN ? s.slice(0, HTTP_ERROR_MAX_LEN) : s;
}
async function probeGatewayHttp(url) {
if (!url) {
return {
httpOk: 0,
httpStatusCode: 0,
httpLatencyMs: 0,
httpError: "missing_url"
};
}
const t0 = Date.now();
try {
const signal = AbortSignal.timeout(HTTP_TIMEOUT_MS);
const res = await fetch(url, {
method: "GET",
signal,
redirect: "follow"
});
const httpLatencyMs = Date.now() - t0;
const code = res.status;
const ok = code >= 200 && code < 400;
return {
httpOk: ok ? 1 : 0,
httpStatusCode: code,
httpLatencyMs,
httpError: ok ? "" : truncateHttpError({ message: `HTTP ${code}` })
};
} catch (err) {
const httpLatencyMs = Date.now() - t0;
return {
httpOk: 0,
httpStatusCode: 0,
httpLatencyMs,
httpError: truncateHttpError(err)
};
}
}
function etimeToSeconds(etime) {
let s = String(etime).trim();
if (!s) return 0;
let days = 0;
if (s.includes("-")) {
const idx = s.indexOf("-");
const d = Number.parseInt(s.slice(0, idx), 10);
days = Number.isFinite(d) ? d : 0;
s = s.slice(idx + 1);
}
const parts = s.split(":").map((x) => Number.parseInt(String(x).trim(), 10));
if (parts.some((n) => !Number.isFinite(n))) return 0;
if (parts.length === 3) {
return Math.trunc(days * 86400 + parts[0] * 3600 + parts[1] * 60 + parts[2]);
}
if (parts.length === 2) {
return Math.trunc(days * 86400 + parts[0] * 60 + parts[1]);
}
if (parts.length === 1) {
return Math.trunc(days * 86400 + parts[0]);
}
return 0;
}
function execFileUtf8(file, args, opts = {}) {
return new Promise((resolve) => {
execFile(
file,
args,
{
encoding: "utf8",
maxBuffer: 8 * 1024 * 1024,
timeout: EXEC_TIMEOUT_MS,
...opts
},
(err, stdout, stderr) => {
const exitCode = err && typeof err.status === "number" ? err.status : err ? -1 : 0;
resolve({
exitCode,
errCode: err?.code,
stdout: String(stdout ?? ""),
stderr: String(stderr ?? "")
});
}
);
});
}
function parseMainPidFromStatus(text2) {
const m = text2.match(/Main PID:\s*(\d+)/i);
return m ? Math.trunc(Number.parseInt(m[1], 10)) || 0 : 0;
}
function parseActiveLineFromStatus(text2) {
for (const line of text2.split("\n")) {
if (/^\s*Active:/i.test(line)) {
const m = line.match(/Active:\s*(\S+)\s*\(([^)]*)\)/i);
if (m) {
return {
active: m[1].toLowerCase() === "active",
subRunning: m[2].toLowerCase().includes("running")
};
}
}
}
return { active: false, subRunning: false };
}
function parseSystemctlShow(text2) {
let mainPid = 0;
let active = false;
let subRunning = false;
for (const line of text2.split("\n")) {
const t = line.trim();
if (t.startsWith("MainPID=")) {
mainPid = Math.trunc(Number.parseInt(t.slice("MainPID=".length), 10)) || 0;
} else if (t.startsWith("ActiveState=")) {
active = t.slice("ActiveState=".length).trim().toLowerCase() === "active";
} else if (t.startsWith("SubState=")) {
subRunning = t.slice("SubState=".length).trim().toLowerCase() === "running";
}
}
return { mainPid, active, subRunning };
}
async function readSystemdState() {
const status = await execFileUtf8("systemctl", [
"--user",
"--no-pager",
"status",
"hermes-gateway"
]);
const combined = `${status.stdout}
${status.stderr}`.trim();
let mainPid = parseMainPidFromStatus(combined);
let { active, subRunning } = parseActiveLineFromStatus(combined);
const needShow = mainPid <= 0 || !active || !subRunning;
if (needShow) {
const show = await execFileUtf8("systemctl", [
"--user",
"--no-pager",
"show",
"hermes-gateway",
"-p",
"MainPID",
"-p",
"ActiveState",
"-p",
"SubState"
]);
const showText = `${show.stdout}
${show.stderr}`;
const s = parseSystemctlShow(showText);
if (mainPid <= 0 && s.mainPid > 0) mainPid = s.mainPid;
if (!active) active = s.active;
if (!subRunning) subRunning = s.subRunning;
}
return { mainPid, systemdActiveRunning: active && subRunning };
}
async function processExists(mainPid) {
if (mainPid <= 0) return false;
const r = await execFileUtf8("ps", ["-p", String(mainPid), "-o", "pid="]);
if (r.errCode === "ENOENT") return false;
return r.stdout.trim().length > 0;
}
async function readPsMetrics(mainPid) {
if (mainPid <= 0) {
return { rssBytes: 0, cpuPercent: 0, uptimeSec: 0 };
}
let r = await execFileUtf8("ps", [
"-p",
String(mainPid),
"-o",
"rss=,%cpu=,etimes="
]);
let line = r.stdout.trim().replace(/\s+/g, " ");
if (r.errCode === "ENOENT" || !line) {
return { rssBytes: 0, cpuPercent: 0, uptimeSec: 0 };
}
let parts = line.split(" ").filter(Boolean);
if (parts.length < 3) {
r = await execFileUtf8("ps", [
"-p",
String(mainPid),
"-o",
"rss=,%cpu=,etime="
]);
line = r.stdout.trim().replace(/\s+/g, " ");
parts = line.split(" ").filter(Boolean);
if (parts.length < 3) {
return { rssBytes: 0, cpuPercent: 0, uptimeSec: 0 };
}
const rssKiB2 = Number(parts[0]);
const cpu2 = Number(parts[1]);
const uptimeSec2 = etimeToSeconds(parts.slice(2).join(" "));
const rssBytes2 = Number.isFinite(rssKiB2) ? Math.trunc(rssKiB2 * 1024) : 0;
const cpuPercent2 = Number.isFinite(cpu2) ? Math.round(cpu2 * 100) / 100 : 0;
return { rssBytes: rssBytes2, cpuPercent: cpuPercent2, uptimeSec: uptimeSec2 };
}
const rssKiB = Number(parts[0]);
const cpu = Number(parts[1]);
const etimes = Number(parts[2]);
const rssBytes = Number.isFinite(rssKiB) ? Math.trunc(rssKiB * 1024) : 0;
const cpuPercent = Number.isFinite(cpu) ? Math.round(cpu * 100) / 100 : 0;
const uptimeSec = Number.isFinite(etimes) ? Math.trunc(etimes) : 0;
return { rssBytes, cpuPercent, uptimeSec };
}
function parseActiveSessionsFromHermesStats(text2) {
const src = String(text2);
const patterns = [
/^\s*Active\s+sessions?:\s*(\d+)/gim,
/^\s*active\s+sessions?:\s*(\d+)/gim,
/^\s*Total\s+sessions?:\s*(\d+)/gim
];
for (const re of patterns) {
re.lastIndex = 0;
const m = re.exec(src);
if (m) {
const n = Math.trunc(Number.parseInt(m[1], 10));
return Number.isFinite(n) ? n : 0;
}
}
return 0;
}
async function readActiveSessions() {
try {
const r = await execFileUtf8("hermes", ["sessions", "stats"]);
if (r.errCode === "ENOENT") return 0;
return parseActiveSessionsFromHermesStats(`${r.stdout}
${r.stderr}`);
} catch {
return 0;
}
}
async function countDirectChildren(mainPid) {
if (mainPid <= 0) return 0;
try {
const r = await execFileUtf8("ps", [
"--no-headers",
"-o",
"pid",
"--ppid",
String(mainPid)
]);
if (r.errCode === "ENOENT") return 0;
const lines = r.stdout.split("\n").map((l) => l.trim()).filter(Boolean);
return lines.length;
} catch {
return 0;
}
}
async function compute(db, _peers) {
const ts = Date.now();
let mainPid = 0;
let systemdActiveRunning = false;
try {
const st = await readSystemdState();
mainPid = st.mainPid;
systemdActiveRunning = st.systemdActiveRunning;
} catch {
mainPid = 0;
systemdActiveRunning = false;
}
let psOk = false;
try {
psOk = await processExists(mainPid);
} catch {
psOk = false;
}
let rssBytes = 0;
let cpuPercent = 0;
let uptimeSec = 0;
if (psOk) {
try {
const m = await readPsMetrics(mainPid);
rssBytes = m.rssBytes;
cpuPercent = m.cpuPercent;
uptimeSec = m.uptimeSec;
} catch {
rssBytes = 0;
cpuPercent = 0;
uptimeSec = 0;
}
}
const alive = systemdActiveRunning && mainPid > 0 && psOk ? 1 : 0;
let activeSessions = 0;
try {
activeSessions = await readActiveSessions();
} catch {
activeSessions = 0;
}
let childProcessCount = 0;
if (alive && mainPid > 0) {
try {
childProcessCount = await countDirectChildren(mainPid);
} catch {
childProcessCount = 0;
}
}
let httpOk = 0;
let httpStatusCode = 0;
let httpLatencyMs = 0;
let httpError = "";
try {
const h = await probeGatewayHttp(gatewayProbeUrl());
httpOk = h.httpOk;
httpStatusCode = h.httpStatusCode;
httpLatencyMs = h.httpLatencyMs;
httpError = h.httpError;
} catch {
httpOk = 0;
httpStatusCode = 0;
httpLatencyMs = 0;
httpError = "probe_failed";
}
const storedMainPid = mainPid > 0 ? mainPid : 0;
const row = {
ts,
alive,
mainPid: storedMainPid,
rssBytes: alive ? rssBytes : 0,
cpuPercent: alive ? cpuPercent : 0,
uptimeSec: alive ? uptimeSec : 0,
activeSessions,
childProcessCount: alive ? childProcessCount : 0,
httpOk,
httpStatusCode,
httpLatencyMs,
httpError
};
await db.insert(hermesGatewayHealth).values(row);
return {
ts: row.ts,
alive: row.alive,
mainPid: row.mainPid,
rssBytes: row.rssBytes,
cpuPercent: row.cpuPercent,
uptimeSec: row.uptimeSec,
activeSessions: row.activeSessions,
childProcessCount: row.childProcessCount,
httpOk: row.httpOk,
httpStatusCode: row.httpStatusCode,
httpLatencyMs: row.httpLatencyMs,
httpError: row.httpError
};
}
export {
compute
};

View File

@ -1,14 +0,0 @@
-- Migration: 0001_init
-- Creates the hermes_gateway_health table for hermes-gateway-health sense.
CREATE TABLE IF NOT EXISTS hermes_gateway_health (
id INTEGER PRIMARY KEY AUTOINCREMENT,
ts INTEGER NOT NULL,
alive INTEGER NOT NULL,
main_pid INTEGER NOT NULL,
rss_bytes INTEGER NOT NULL,
cpu_percent REAL NOT NULL,
uptime_sec INTEGER NOT NULL,
active_sessions INTEGER NOT NULL,
child_process_count INTEGER NOT NULL
);

View File

@ -1,7 +0,0 @@
-- Migration: 0002_add_http_probe
-- HTTP reachability columns for hermes-gateway-health sense.
ALTER TABLE hermes_gateway_health ADD COLUMN http_ok INTEGER NOT NULL DEFAULT 0;
ALTER TABLE hermes_gateway_health ADD COLUMN http_status_code INTEGER NOT NULL DEFAULT 0;
ALTER TABLE hermes_gateway_health ADD COLUMN http_latency_ms INTEGER NOT NULL DEFAULT 0;
ALTER TABLE hermes_gateway_health ADD COLUMN http_error TEXT NOT NULL DEFAULT '';

View File

@ -1,17 +0,0 @@
{
"name": "sense-hermes-gateway-health",
"version": "0.0.1",
"private": true,
"type": "module",
"scripts": {
"build": "esbuild src/index.ts --bundle --platform=node --format=esm --outfile=index.js --packages=external"
},
"devDependencies": {
"@types/node": "^22.0.0",
"esbuild": "^0.27.0",
"typescript": "^5.7.0"
},
"pnpm": {
"onlyBuiltDependencies": ["esbuild"]
}
}

View File

@ -1,310 +0,0 @@
lockfileVersion: '9.0'
settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
importers:
.:
devDependencies:
'@types/node':
specifier: ^22.0.0
version: 22.19.17
esbuild:
specifier: ^0.27.0
version: 0.27.7
typescript:
specifier: ^5.7.0
version: 5.9.3
packages:
'@esbuild/aix-ppc64@0.27.7':
resolution: {integrity: sha512-EKX3Qwmhz1eMdEJokhALr0YiD0lhQNwDqkPYyPhiSwKrh7/4KRjQc04sZ8db+5DVVnZ1LmbNDI1uAMPEUBnQPg==}
engines: {node: '>=18'}
cpu: [ppc64]
os: [aix]
'@esbuild/android-arm64@0.27.7':
resolution: {integrity: sha512-62dPZHpIXzvChfvfLJow3q5dDtiNMkwiRzPylSCfriLvZeq0a1bWChrGx/BbUbPwOrsWKMn8idSllklzBy+dgQ==}
engines: {node: '>=18'}
cpu: [arm64]
os: [android]
'@esbuild/android-arm@0.27.7':
resolution: {integrity: sha512-jbPXvB4Yj2yBV7HUfE2KHe4GJX51QplCN1pGbYjvsyCZbQmies29EoJbkEc+vYuU5o45AfQn37vZlyXy4YJ8RQ==}
engines: {node: '>=18'}
cpu: [arm]
os: [android]
'@esbuild/android-x64@0.27.7':
resolution: {integrity: sha512-x5VpMODneVDb70PYV2VQOmIUUiBtY3D3mPBG8NxVk5CogneYhkR7MmM3yR/uMdITLrC1ml/NV1rj4bMJuy9MCg==}
engines: {node: '>=18'}
cpu: [x64]
os: [android]
'@esbuild/darwin-arm64@0.27.7':
resolution: {integrity: sha512-5lckdqeuBPlKUwvoCXIgI2D9/ABmPq3Rdp7IfL70393YgaASt7tbju3Ac+ePVi3KDH6N2RqePfHnXkaDtY9fkw==}
engines: {node: '>=18'}
cpu: [arm64]
os: [darwin]
'@esbuild/darwin-x64@0.27.7':
resolution: {integrity: sha512-rYnXrKcXuT7Z+WL5K980jVFdvVKhCHhUwid+dDYQpH+qu+TefcomiMAJpIiC2EM3Rjtq0sO3StMV/+3w3MyyqQ==}
engines: {node: '>=18'}
cpu: [x64]
os: [darwin]
'@esbuild/freebsd-arm64@0.27.7':
resolution: {integrity: sha512-B48PqeCsEgOtzME2GbNM2roU29AMTuOIN91dsMO30t+Ydis3z/3Ngoj5hhnsOSSwNzS+6JppqWsuhTp6E82l2w==}
engines: {node: '>=18'}
cpu: [arm64]
os: [freebsd]
'@esbuild/freebsd-x64@0.27.7':
resolution: {integrity: sha512-jOBDK5XEjA4m5IJK3bpAQF9/Lelu/Z9ZcdhTRLf4cajlB+8VEhFFRjWgfy3M1O4rO2GQ/b2dLwCUGpiF/eATNQ==}
engines: {node: '>=18'}
cpu: [x64]
os: [freebsd]
'@esbuild/linux-arm64@0.27.7':
resolution: {integrity: sha512-RZPHBoxXuNnPQO9rvjh5jdkRmVizktkT7TCDkDmQ0W2SwHInKCAV95GRuvdSvA7w4VMwfCjUiPwDi0ZO6Nfe9A==}
engines: {node: '>=18'}
cpu: [arm64]
os: [linux]
'@esbuild/linux-arm@0.27.7':
resolution: {integrity: sha512-RkT/YXYBTSULo3+af8Ib0ykH8u2MBh57o7q/DAs3lTJlyVQkgQvlrPTnjIzzRPQyavxtPtfg0EopvDyIt0j1rA==}
engines: {node: '>=18'}
cpu: [arm]
os: [linux]
'@esbuild/linux-ia32@0.27.7':
resolution: {integrity: sha512-GA48aKNkyQDbd3KtkplYWT102C5sn/EZTY4XROkxONgruHPU72l+gW+FfF8tf2cFjeHaRbWpOYa/uRBz/Xq1Pg==}
engines: {node: '>=18'}
cpu: [ia32]
os: [linux]
'@esbuild/linux-loong64@0.27.7':
resolution: {integrity: sha512-a4POruNM2oWsD4WKvBSEKGIiWQF8fZOAsycHOt6JBpZ+JN2n2JH9WAv56SOyu9X5IqAjqSIPTaJkqN8F7XOQ5Q==}
engines: {node: '>=18'}
cpu: [loong64]
os: [linux]
'@esbuild/linux-mips64el@0.27.7':
resolution: {integrity: sha512-KabT5I6StirGfIz0FMgl1I+R1H73Gp0ofL9A3nG3i/cYFJzKHhouBV5VWK1CSgKvVaG4q1RNpCTR2LuTVB3fIw==}
engines: {node: '>=18'}
cpu: [mips64el]
os: [linux]
'@esbuild/linux-ppc64@0.27.7':
resolution: {integrity: sha512-gRsL4x6wsGHGRqhtI+ifpN/vpOFTQtnbsupUF5R5YTAg+y/lKelYR1hXbnBdzDjGbMYjVJLJTd2OFmMewAgwlQ==}
engines: {node: '>=18'}
cpu: [ppc64]
os: [linux]
'@esbuild/linux-riscv64@0.27.7':
resolution: {integrity: sha512-hL25LbxO1QOngGzu2U5xeXtxXcW+/GvMN3ejANqXkxZ/opySAZMrc+9LY/WyjAan41unrR3YrmtTsUpwT66InQ==}
engines: {node: '>=18'}
cpu: [riscv64]
os: [linux]
'@esbuild/linux-s390x@0.27.7':
resolution: {integrity: sha512-2k8go8Ycu1Kb46vEelhu1vqEP+UeRVj2zY1pSuPdgvbd5ykAw82Lrro28vXUrRmzEsUV0NzCf54yARIK8r0fdw==}
engines: {node: '>=18'}
cpu: [s390x]
os: [linux]
'@esbuild/linux-x64@0.27.7':
resolution: {integrity: sha512-hzznmADPt+OmsYzw1EE33ccA+HPdIqiCRq7cQeL1Jlq2gb1+OyWBkMCrYGBJ+sxVzve2ZJEVeePbLM2iEIZSxA==}
engines: {node: '>=18'}
cpu: [x64]
os: [linux]
'@esbuild/netbsd-arm64@0.27.7':
resolution: {integrity: sha512-b6pqtrQdigZBwZxAn1UpazEisvwaIDvdbMbmrly7cDTMFnw/+3lVxxCTGOrkPVnsYIosJJXAsILG9XcQS+Yu6w==}
engines: {node: '>=18'}
cpu: [arm64]
os: [netbsd]
'@esbuild/netbsd-x64@0.27.7':
resolution: {integrity: sha512-OfatkLojr6U+WN5EDYuoQhtM+1xco+/6FSzJJnuWiUw5eVcicbyK3dq5EeV/QHT1uy6GoDhGbFpprUiHUYggrw==}
engines: {node: '>=18'}
cpu: [x64]
os: [netbsd]
'@esbuild/openbsd-arm64@0.27.7':
resolution: {integrity: sha512-AFuojMQTxAz75Fo8idVcqoQWEHIXFRbOc1TrVcFSgCZtQfSdc1RXgB3tjOn/krRHENUB4j00bfGjyl2mJrU37A==}
engines: {node: '>=18'}
cpu: [arm64]
os: [openbsd]
'@esbuild/openbsd-x64@0.27.7':
resolution: {integrity: sha512-+A1NJmfM8WNDv5CLVQYJ5PshuRm/4cI6WMZRg1by1GwPIQPCTs1GLEUHwiiQGT5zDdyLiRM/l1G0Pv54gvtKIg==}
engines: {node: '>=18'}
cpu: [x64]
os: [openbsd]
'@esbuild/openharmony-arm64@0.27.7':
resolution: {integrity: sha512-+KrvYb/C8zA9CU/g0sR6w2RBw7IGc5J2BPnc3dYc5VJxHCSF1yNMxTV5LQ7GuKteQXZtspjFbiuW5/dOj7H4Yw==}
engines: {node: '>=18'}
cpu: [arm64]
os: [openharmony]
'@esbuild/sunos-x64@0.27.7':
resolution: {integrity: sha512-ikktIhFBzQNt/QDyOL580ti9+5mL/YZeUPKU2ivGtGjdTYoqz6jObj6nOMfhASpS4GU4Q/Clh1QtxWAvcYKamA==}
engines: {node: '>=18'}
cpu: [x64]
os: [sunos]
'@esbuild/win32-arm64@0.27.7':
resolution: {integrity: sha512-7yRhbHvPqSpRUV7Q20VuDwbjW5kIMwTHpptuUzV+AA46kiPze5Z7qgt6CLCK3pWFrHeNfDd1VKgyP4O+ng17CA==}
engines: {node: '>=18'}
cpu: [arm64]
os: [win32]
'@esbuild/win32-ia32@0.27.7':
resolution: {integrity: sha512-SmwKXe6VHIyZYbBLJrhOoCJRB/Z1tckzmgTLfFYOfpMAx63BJEaL9ExI8x7v0oAO3Zh6D/Oi1gVxEYr5oUCFhw==}
engines: {node: '>=18'}
cpu: [ia32]
os: [win32]
'@esbuild/win32-x64@0.27.7':
resolution: {integrity: sha512-56hiAJPhwQ1R4i+21FVF7V8kSD5zZTdHcVuRFMW0hn753vVfQN8xlx4uOPT4xoGH0Z/oVATuR82AiqSTDIpaHg==}
engines: {node: '>=18'}
cpu: [x64]
os: [win32]
'@types/node@22.19.17':
resolution: {integrity: sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==}
esbuild@0.27.7:
resolution: {integrity: sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w==}
engines: {node: '>=18'}
hasBin: true
typescript@5.9.3:
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
engines: {node: '>=14.17'}
hasBin: true
undici-types@6.21.0:
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
snapshots:
'@esbuild/aix-ppc64@0.27.7':
optional: true
'@esbuild/android-arm64@0.27.7':
optional: true
'@esbuild/android-arm@0.27.7':
optional: true
'@esbuild/android-x64@0.27.7':
optional: true
'@esbuild/darwin-arm64@0.27.7':
optional: true
'@esbuild/darwin-x64@0.27.7':
optional: true
'@esbuild/freebsd-arm64@0.27.7':
optional: true
'@esbuild/freebsd-x64@0.27.7':
optional: true
'@esbuild/linux-arm64@0.27.7':
optional: true
'@esbuild/linux-arm@0.27.7':
optional: true
'@esbuild/linux-ia32@0.27.7':
optional: true
'@esbuild/linux-loong64@0.27.7':
optional: true
'@esbuild/linux-mips64el@0.27.7':
optional: true
'@esbuild/linux-ppc64@0.27.7':
optional: true
'@esbuild/linux-riscv64@0.27.7':
optional: true
'@esbuild/linux-s390x@0.27.7':
optional: true
'@esbuild/linux-x64@0.27.7':
optional: true
'@esbuild/netbsd-arm64@0.27.7':
optional: true
'@esbuild/netbsd-x64@0.27.7':
optional: true
'@esbuild/openbsd-arm64@0.27.7':
optional: true
'@esbuild/openbsd-x64@0.27.7':
optional: true
'@esbuild/openharmony-arm64@0.27.7':
optional: true
'@esbuild/sunos-x64@0.27.7':
optional: true
'@esbuild/win32-arm64@0.27.7':
optional: true
'@esbuild/win32-ia32@0.27.7':
optional: true
'@esbuild/win32-x64@0.27.7':
optional: true
'@types/node@22.19.17':
dependencies:
undici-types: 6.21.0
esbuild@0.27.7:
optionalDependencies:
'@esbuild/aix-ppc64': 0.27.7
'@esbuild/android-arm': 0.27.7
'@esbuild/android-arm64': 0.27.7
'@esbuild/android-x64': 0.27.7
'@esbuild/darwin-arm64': 0.27.7
'@esbuild/darwin-x64': 0.27.7
'@esbuild/freebsd-arm64': 0.27.7
'@esbuild/freebsd-x64': 0.27.7
'@esbuild/linux-arm': 0.27.7
'@esbuild/linux-arm64': 0.27.7
'@esbuild/linux-ia32': 0.27.7
'@esbuild/linux-loong64': 0.27.7
'@esbuild/linux-mips64el': 0.27.7
'@esbuild/linux-ppc64': 0.27.7
'@esbuild/linux-riscv64': 0.27.7
'@esbuild/linux-s390x': 0.27.7
'@esbuild/linux-x64': 0.27.7
'@esbuild/netbsd-arm64': 0.27.7
'@esbuild/netbsd-x64': 0.27.7
'@esbuild/openbsd-arm64': 0.27.7
'@esbuild/openbsd-x64': 0.27.7
'@esbuild/openharmony-arm64': 0.27.7
'@esbuild/sunos-x64': 0.27.7
'@esbuild/win32-arm64': 0.27.7
'@esbuild/win32-ia32': 0.27.7
'@esbuild/win32-x64': 0.27.7
typescript@5.9.3: {}
undici-types@6.21.0: {}

View File

@ -1,7 +1,4 @@
import { execFile } from "node:child_process";
import type { LibSQLDatabase } from "drizzle-orm/libsql";
import { hermesGatewayHealth } from "./schema.ts";
/** Keep subprocess deadlines slightly under typical sense timeout (30s). */
const EXEC_TIMEOUT_MS = 25_000;
@ -10,6 +7,22 @@ const HTTP_TIMEOUT_MS = Math.min(23_000, EXEC_TIMEOUT_MS - 2000);
const HTTP_ERROR_MAX_LEN = 256;
/** How many consecutive failures before triggering a restart. */
const FAILURE_THRESHOLD = 3;
type SenseState = {
consecutiveFailures: number;
lastRestartTs: number;
/** Minimum ms between restart attempts to avoid restart loops. */
restartCooldownMs: number;
};
export const initialState: SenseState = {
consecutiveFailures: 0,
lastRestartTs: 0,
restartCooldownMs: 300_000, // 5 minutes
};
function gatewayProbeUrl(): string {
const u =
process.env.HERMES_GATEWAY_HEALTH_URL ??
@ -27,17 +40,13 @@ function truncateHttpError(err: unknown): string {
return s.length > HTTP_ERROR_MAX_LEN ? s.slice(0, HTTP_ERROR_MAX_LEN) : s;
}
interface HttpProbeResult {
type HttpProbeResult = {
httpOk: number;
httpStatusCode: number;
httpLatencyMs: number;
httpError: string;
}
};
/**
* GET the gateway URL; success = HTTP 200399.
* URL must be set via HERMES_GATEWAY_HEALTH_URL or NERVE_HERMES_GATEWAY_URL.
*/
async function probeGatewayHttp(url: string): Promise<HttpProbeResult> {
if (!url) {
return {
@ -75,10 +84,6 @@ async function probeGatewayHttp(url: string): Promise<HttpProbeResult> {
}
}
/**
* When `ps` lacks `etimes` (wall-clock seconds since start), parse `etime`
* ([[dd-]hh:]mm:ss) into seconds. See ps(1) `etime` field description.
*/
function etimeToSeconds(etime: string): number {
let s = String(etime).trim();
if (!s) return 0;
@ -103,12 +108,12 @@ function etimeToSeconds(etime: string): number {
return 0;
}
interface ExecResult {
type ExecResult = {
exitCode: number;
errCode: string | undefined;
stdout: string;
stderr: string;
}
};
function execFileUtf8(file: string, args: string[], opts: Record<string, unknown> = {}): Promise<ExecResult> {
return new Promise((resolve) => {
@ -217,11 +222,11 @@ async function processExists(mainPid: number): Promise<boolean> {
return r.stdout.trim().length > 0;
}
interface PsMetrics {
type PsMetrics = {
rssBytes: number;
cpuPercent: number;
uptimeSec: number;
}
};
async function readPsMetrics(mainPid: number): Promise<PsMetrics> {
if (mainPid <= 0) {
@ -266,61 +271,12 @@ async function readPsMetrics(mainPid: number): Promise<PsMetrics> {
return { rssBytes, cpuPercent, uptimeSec };
}
function parseActiveSessionsFromHermesStats(text: string): number {
const src = String(text);
const patterns = [
/^\s*Active\s+sessions?:\s*(\d+)/gim,
/^\s*active\s+sessions?:\s*(\d+)/gim,
/^\s*Total\s+sessions?:\s*(\d+)/gim,
];
for (const re of patterns) {
re.lastIndex = 0;
const m = re.exec(src);
if (m) {
const n = Math.trunc(Number.parseInt(m[1], 10));
return Number.isFinite(n) ? n : 0;
}
}
return 0;
}
async function readActiveSessions(): Promise<number> {
try {
const r = await execFileUtf8("hermes", ["sessions", "stats"]);
if (r.errCode === "ENOENT") return 0;
return parseActiveSessionsFromHermesStats(`${r.stdout}\n${r.stderr}`);
} catch {
return 0;
}
}
async function countDirectChildren(mainPid: number): Promise<number> {
if (mainPid <= 0) return 0;
try {
const r = await execFileUtf8("ps", [
"--no-headers",
"-o",
"pid",
"--ppid",
String(mainPid),
]);
if (r.errCode === "ENOENT") return 0;
const lines = r.stdout
.split("\n")
.map((l) => l.trim())
.filter(Boolean);
return lines.length;
} catch {
return 0;
}
}
export async function compute(db: LibSQLDatabase, _peers: unknown) {
const ts = Date.now();
export async function compute(prevState: SenseState) {
const now = Date.now();
// --- probe gateway ---
let mainPid = 0;
let systemdActiveRunning = false;
try {
const st = await readSystemdState();
mainPid = st.mainPid;
@ -355,22 +311,6 @@ export async function compute(db: LibSQLDatabase, _peers: unknown) {
const alive = systemdActiveRunning && mainPid > 0 && psOk ? 1 : 0;
let activeSessions = 0;
try {
activeSessions = await readActiveSessions();
} catch {
activeSessions = 0;
}
let childProcessCount = 0;
if (alive && mainPid > 0) {
try {
childProcessCount = await countDirectChildren(mainPid);
} catch {
childProcessCount = 0;
}
}
let httpOk = 0;
let httpStatusCode = 0;
let httpLatencyMs = 0;
@ -388,37 +328,42 @@ export async function compute(db: LibSQLDatabase, _peers: unknown) {
httpError = "probe_failed";
}
const storedMainPid = mainPid > 0 ? mainPid : 0;
// --- decide health ---
const healthy = alive === 1 && httpOk === 1;
const row = {
ts,
// --- state machine: track consecutive failures ---
const consecutiveFailures = healthy ? 0 : prevState.consecutiveFailures + 1;
const lastRestartTs = prevState.lastRestartTs;
const cooldown = prevState.restartCooldownMs;
const cooldownElapsed = now - lastRestartTs >= cooldown;
// --- trigger restart? ---
const shouldRestart =
consecutiveFailures >= FAILURE_THRESHOLD && cooldownElapsed;
const nextState: SenseState = {
consecutiveFailures,
lastRestartTs: shouldRestart ? now : lastRestartTs,
restartCooldownMs: cooldown,
};
const signal = {
ts: now,
alive,
mainPid: storedMainPid,
mainPid: mainPid > 0 ? mainPid : 0,
rssBytes: alive ? rssBytes : 0,
cpuPercent: alive ? cpuPercent : 0,
uptimeSec: alive ? uptimeSec : 0,
activeSessions,
childProcessCount: alive ? childProcessCount : 0,
httpOk,
httpStatusCode,
httpLatencyMs,
httpError,
consecutiveFailures,
};
await db.insert(hermesGatewayHealth).values(row);
const trigger = shouldRestart
? { command: "systemctl --user restart hermes-gateway" }
: null;
return {
ts: row.ts,
alive: row.alive,
mainPid: row.mainPid,
rssBytes: row.rssBytes,
cpuPercent: row.cpuPercent,
uptimeSec: row.uptimeSec,
activeSessions: row.activeSessions,
childProcessCount: row.childProcessCount,
httpOk: row.httpOk,
httpStatusCode: row.httpStatusCode,
httpLatencyMs: row.httpLatencyMs,
httpError: row.httpError,
};
return { state: nextState, signal, trigger };
}

View File

@ -1,17 +0,0 @@
import { integer, real, sqliteTable, text } from "drizzle-orm/sqlite-core";
export const hermesGatewayHealth = sqliteTable("hermes_gateway_health", {
id: integer("id").primaryKey({ autoIncrement: true }),
ts: integer("ts").notNull(),
alive: integer("alive").notNull(),
mainPid: integer("main_pid").notNull(),
rssBytes: integer("rss_bytes").notNull(),
cpuPercent: real("cpu_percent").notNull(),
uptimeSec: integer("uptime_sec").notNull(),
activeSessions: integer("active_sessions").notNull(),
childProcessCount: integer("child_process_count").notNull(),
httpOk: integer("http_ok").notNull(),
httpStatusCode: integer("http_status_code").notNull(),
httpLatencyMs: integer("http_latency_ms").notNull(),
httpError: text("http_error").notNull(),
});

View File

@ -1,118 +0,0 @@
// src/index.ts
import { createReadStream } from "node:fs";
import { readdir } from "node:fs/promises";
import { homedir } from "node:os";
import { join } from "node:path";
import { createInterface } from "node:readline";
// src/schema.ts
import { integer, sqliteTable } from "drizzle-orm/sqlite-core";
var hermesSessionMessageStats = sqliteTable("hermes_session_message_stats", {
id: integer("id").primaryKey({ autoIncrement: true }),
ts: integer("ts").notNull(),
totalUserMessages: integer("total_user_messages").notNull(),
totalAssistantMessages: integer("total_assistant_messages").notNull(),
totalToolMessages: integer("total_tool_messages").notNull(),
totalMessages: integer("total_messages").notNull(),
activeSessions: integer("active_sessions").notNull(),
measurementWindowSeconds: integer("measurement_window_seconds").notNull()
});
// src/index.ts
var MEASUREMENT_WINDOW_MS = 9e5;
var MEASUREMENT_WINDOW_SECONDS = 900;
async function aggregateJsonlFile(filePath, cutoffMs, nowMs) {
let user = 0;
let assistant = 0;
let tool = 0;
let fileHadActivity = false;
const input = createReadStream(filePath, { encoding: "utf8" });
const rl = createInterface({ input, crlfDelay: Infinity });
try {
for await (const line of rl) {
const trimmed = line.trim();
if (!trimmed) continue;
let obj;
try {
obj = JSON.parse(trimmed);
} catch {
continue;
}
if (typeof obj !== "object" || obj === null || typeof obj.role !== "string" || typeof obj.timestamp !== "string") {
continue;
}
const record = obj;
const t = Date.parse(record.timestamp);
if (!Number.isFinite(t) || t < cutoffMs || t > nowMs) continue;
const roleNorm = record.role.trim().toLowerCase();
if (roleNorm === "user") {
user++;
fileHadActivity = true;
} else if (roleNorm === "assistant") {
assistant++;
fileHadActivity = true;
} else if (roleNorm === "tool") {
tool++;
fileHadActivity = true;
}
}
} finally {
rl.close();
}
return { user, assistant, tool, fileHadActivity };
}
async function compute(db, _peers) {
const nowMs = Date.now();
const cutoffMs = nowMs - MEASUREMENT_WINDOW_MS;
const ts = nowMs;
let totalUserMessages = 0;
let totalAssistantMessages = 0;
let totalToolMessages = 0;
let activeSessions = 0;
const sessionsDir = join(homedir(), ".hermes", "sessions");
let files = [];
try {
const entries = await readdir(sessionsDir, { withFileTypes: true });
files = entries.filter((e) => e.isFile() && e.name.endsWith(".jsonl")).map((e) => join(sessionsDir, e.name));
} catch (err) {
if (err && typeof err === "object" && "code" in err && err.code === "ENOENT") {
files = [];
} else {
throw err;
}
}
for (const filePath of files) {
const { user, assistant, tool, fileHadActivity } = await aggregateJsonlFile(
filePath,
cutoffMs,
nowMs
);
totalUserMessages += user;
totalAssistantMessages += assistant;
totalToolMessages += tool;
if (fileHadActivity) activeSessions++;
}
const totalMessages = totalUserMessages + totalAssistantMessages + totalToolMessages;
const row = {
ts,
totalUserMessages,
totalAssistantMessages,
totalToolMessages,
totalMessages,
activeSessions,
measurementWindowSeconds: MEASUREMENT_WINDOW_SECONDS
};
await db.insert(hermesSessionMessageStats).values(row);
return {
ts: row.ts,
totalUserMessages: row.totalUserMessages,
totalAssistantMessages: row.totalAssistantMessages,
totalToolMessages: row.totalToolMessages,
totalMessages: row.totalMessages,
activeSessions: row.activeSessions,
measurementWindowSeconds: row.measurementWindowSeconds
};
}
export {
compute
};

View File

@ -1,13 +0,0 @@
-- Migration: 0001_init
-- Creates the hermes_session_message_stats table for hermes-session-message-stats sense.
CREATE TABLE IF NOT EXISTS hermes_session_message_stats (
id INTEGER PRIMARY KEY AUTOINCREMENT,
ts INTEGER NOT NULL,
total_user_messages INTEGER NOT NULL,
total_assistant_messages INTEGER NOT NULL,
total_tool_messages INTEGER NOT NULL,
total_messages INTEGER NOT NULL,
active_sessions INTEGER NOT NULL,
measurement_window_seconds INTEGER NOT NULL
);

View File

@ -1,17 +0,0 @@
{
"name": "sense-hermes-session-message-stats",
"version": "0.0.1",
"private": true,
"type": "module",
"scripts": {
"build": "esbuild src/index.ts --bundle --platform=node --format=esm --outfile=index.js --packages=external"
},
"devDependencies": {
"@types/node": "^22.0.0",
"esbuild": "^0.27.0",
"typescript": "^5.7.0"
},
"pnpm": {
"onlyBuiltDependencies": ["esbuild"]
}
}

View File

@ -1,310 +0,0 @@
lockfileVersion: '9.0'
settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
importers:
.:
devDependencies:
'@types/node':
specifier: ^22.0.0
version: 22.19.17
esbuild:
specifier: ^0.27.0
version: 0.27.7
typescript:
specifier: ^5.7.0
version: 5.9.3
packages:
'@esbuild/aix-ppc64@0.27.7':
resolution: {integrity: sha512-EKX3Qwmhz1eMdEJokhALr0YiD0lhQNwDqkPYyPhiSwKrh7/4KRjQc04sZ8db+5DVVnZ1LmbNDI1uAMPEUBnQPg==}
engines: {node: '>=18'}
cpu: [ppc64]
os: [aix]
'@esbuild/android-arm64@0.27.7':
resolution: {integrity: sha512-62dPZHpIXzvChfvfLJow3q5dDtiNMkwiRzPylSCfriLvZeq0a1bWChrGx/BbUbPwOrsWKMn8idSllklzBy+dgQ==}
engines: {node: '>=18'}
cpu: [arm64]
os: [android]
'@esbuild/android-arm@0.27.7':
resolution: {integrity: sha512-jbPXvB4Yj2yBV7HUfE2KHe4GJX51QplCN1pGbYjvsyCZbQmies29EoJbkEc+vYuU5o45AfQn37vZlyXy4YJ8RQ==}
engines: {node: '>=18'}
cpu: [arm]
os: [android]
'@esbuild/android-x64@0.27.7':
resolution: {integrity: sha512-x5VpMODneVDb70PYV2VQOmIUUiBtY3D3mPBG8NxVk5CogneYhkR7MmM3yR/uMdITLrC1ml/NV1rj4bMJuy9MCg==}
engines: {node: '>=18'}
cpu: [x64]
os: [android]
'@esbuild/darwin-arm64@0.27.7':
resolution: {integrity: sha512-5lckdqeuBPlKUwvoCXIgI2D9/ABmPq3Rdp7IfL70393YgaASt7tbju3Ac+ePVi3KDH6N2RqePfHnXkaDtY9fkw==}
engines: {node: '>=18'}
cpu: [arm64]
os: [darwin]
'@esbuild/darwin-x64@0.27.7':
resolution: {integrity: sha512-rYnXrKcXuT7Z+WL5K980jVFdvVKhCHhUwid+dDYQpH+qu+TefcomiMAJpIiC2EM3Rjtq0sO3StMV/+3w3MyyqQ==}
engines: {node: '>=18'}
cpu: [x64]
os: [darwin]
'@esbuild/freebsd-arm64@0.27.7':
resolution: {integrity: sha512-B48PqeCsEgOtzME2GbNM2roU29AMTuOIN91dsMO30t+Ydis3z/3Ngoj5hhnsOSSwNzS+6JppqWsuhTp6E82l2w==}
engines: {node: '>=18'}
cpu: [arm64]
os: [freebsd]
'@esbuild/freebsd-x64@0.27.7':
resolution: {integrity: sha512-jOBDK5XEjA4m5IJK3bpAQF9/Lelu/Z9ZcdhTRLf4cajlB+8VEhFFRjWgfy3M1O4rO2GQ/b2dLwCUGpiF/eATNQ==}
engines: {node: '>=18'}
cpu: [x64]
os: [freebsd]
'@esbuild/linux-arm64@0.27.7':
resolution: {integrity: sha512-RZPHBoxXuNnPQO9rvjh5jdkRmVizktkT7TCDkDmQ0W2SwHInKCAV95GRuvdSvA7w4VMwfCjUiPwDi0ZO6Nfe9A==}
engines: {node: '>=18'}
cpu: [arm64]
os: [linux]
'@esbuild/linux-arm@0.27.7':
resolution: {integrity: sha512-RkT/YXYBTSULo3+af8Ib0ykH8u2MBh57o7q/DAs3lTJlyVQkgQvlrPTnjIzzRPQyavxtPtfg0EopvDyIt0j1rA==}
engines: {node: '>=18'}
cpu: [arm]
os: [linux]
'@esbuild/linux-ia32@0.27.7':
resolution: {integrity: sha512-GA48aKNkyQDbd3KtkplYWT102C5sn/EZTY4XROkxONgruHPU72l+gW+FfF8tf2cFjeHaRbWpOYa/uRBz/Xq1Pg==}
engines: {node: '>=18'}
cpu: [ia32]
os: [linux]
'@esbuild/linux-loong64@0.27.7':
resolution: {integrity: sha512-a4POruNM2oWsD4WKvBSEKGIiWQF8fZOAsycHOt6JBpZ+JN2n2JH9WAv56SOyu9X5IqAjqSIPTaJkqN8F7XOQ5Q==}
engines: {node: '>=18'}
cpu: [loong64]
os: [linux]
'@esbuild/linux-mips64el@0.27.7':
resolution: {integrity: sha512-KabT5I6StirGfIz0FMgl1I+R1H73Gp0ofL9A3nG3i/cYFJzKHhouBV5VWK1CSgKvVaG4q1RNpCTR2LuTVB3fIw==}
engines: {node: '>=18'}
cpu: [mips64el]
os: [linux]
'@esbuild/linux-ppc64@0.27.7':
resolution: {integrity: sha512-gRsL4x6wsGHGRqhtI+ifpN/vpOFTQtnbsupUF5R5YTAg+y/lKelYR1hXbnBdzDjGbMYjVJLJTd2OFmMewAgwlQ==}
engines: {node: '>=18'}
cpu: [ppc64]
os: [linux]
'@esbuild/linux-riscv64@0.27.7':
resolution: {integrity: sha512-hL25LbxO1QOngGzu2U5xeXtxXcW+/GvMN3ejANqXkxZ/opySAZMrc+9LY/WyjAan41unrR3YrmtTsUpwT66InQ==}
engines: {node: '>=18'}
cpu: [riscv64]
os: [linux]
'@esbuild/linux-s390x@0.27.7':
resolution: {integrity: sha512-2k8go8Ycu1Kb46vEelhu1vqEP+UeRVj2zY1pSuPdgvbd5ykAw82Lrro28vXUrRmzEsUV0NzCf54yARIK8r0fdw==}
engines: {node: '>=18'}
cpu: [s390x]
os: [linux]
'@esbuild/linux-x64@0.27.7':
resolution: {integrity: sha512-hzznmADPt+OmsYzw1EE33ccA+HPdIqiCRq7cQeL1Jlq2gb1+OyWBkMCrYGBJ+sxVzve2ZJEVeePbLM2iEIZSxA==}
engines: {node: '>=18'}
cpu: [x64]
os: [linux]
'@esbuild/netbsd-arm64@0.27.7':
resolution: {integrity: sha512-b6pqtrQdigZBwZxAn1UpazEisvwaIDvdbMbmrly7cDTMFnw/+3lVxxCTGOrkPVnsYIosJJXAsILG9XcQS+Yu6w==}
engines: {node: '>=18'}
cpu: [arm64]
os: [netbsd]
'@esbuild/netbsd-x64@0.27.7':
resolution: {integrity: sha512-OfatkLojr6U+WN5EDYuoQhtM+1xco+/6FSzJJnuWiUw5eVcicbyK3dq5EeV/QHT1uy6GoDhGbFpprUiHUYggrw==}
engines: {node: '>=18'}
cpu: [x64]
os: [netbsd]
'@esbuild/openbsd-arm64@0.27.7':
resolution: {integrity: sha512-AFuojMQTxAz75Fo8idVcqoQWEHIXFRbOc1TrVcFSgCZtQfSdc1RXgB3tjOn/krRHENUB4j00bfGjyl2mJrU37A==}
engines: {node: '>=18'}
cpu: [arm64]
os: [openbsd]
'@esbuild/openbsd-x64@0.27.7':
resolution: {integrity: sha512-+A1NJmfM8WNDv5CLVQYJ5PshuRm/4cI6WMZRg1by1GwPIQPCTs1GLEUHwiiQGT5zDdyLiRM/l1G0Pv54gvtKIg==}
engines: {node: '>=18'}
cpu: [x64]
os: [openbsd]
'@esbuild/openharmony-arm64@0.27.7':
resolution: {integrity: sha512-+KrvYb/C8zA9CU/g0sR6w2RBw7IGc5J2BPnc3dYc5VJxHCSF1yNMxTV5LQ7GuKteQXZtspjFbiuW5/dOj7H4Yw==}
engines: {node: '>=18'}
cpu: [arm64]
os: [openharmony]
'@esbuild/sunos-x64@0.27.7':
resolution: {integrity: sha512-ikktIhFBzQNt/QDyOL580ti9+5mL/YZeUPKU2ivGtGjdTYoqz6jObj6nOMfhASpS4GU4Q/Clh1QtxWAvcYKamA==}
engines: {node: '>=18'}
cpu: [x64]
os: [sunos]
'@esbuild/win32-arm64@0.27.7':
resolution: {integrity: sha512-7yRhbHvPqSpRUV7Q20VuDwbjW5kIMwTHpptuUzV+AA46kiPze5Z7qgt6CLCK3pWFrHeNfDd1VKgyP4O+ng17CA==}
engines: {node: '>=18'}
cpu: [arm64]
os: [win32]
'@esbuild/win32-ia32@0.27.7':
resolution: {integrity: sha512-SmwKXe6VHIyZYbBLJrhOoCJRB/Z1tckzmgTLfFYOfpMAx63BJEaL9ExI8x7v0oAO3Zh6D/Oi1gVxEYr5oUCFhw==}
engines: {node: '>=18'}
cpu: [ia32]
os: [win32]
'@esbuild/win32-x64@0.27.7':
resolution: {integrity: sha512-56hiAJPhwQ1R4i+21FVF7V8kSD5zZTdHcVuRFMW0hn753vVfQN8xlx4uOPT4xoGH0Z/oVATuR82AiqSTDIpaHg==}
engines: {node: '>=18'}
cpu: [x64]
os: [win32]
'@types/node@22.19.17':
resolution: {integrity: sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==}
esbuild@0.27.7:
resolution: {integrity: sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w==}
engines: {node: '>=18'}
hasBin: true
typescript@5.9.3:
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
engines: {node: '>=14.17'}
hasBin: true
undici-types@6.21.0:
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
snapshots:
'@esbuild/aix-ppc64@0.27.7':
optional: true
'@esbuild/android-arm64@0.27.7':
optional: true
'@esbuild/android-arm@0.27.7':
optional: true
'@esbuild/android-x64@0.27.7':
optional: true
'@esbuild/darwin-arm64@0.27.7':
optional: true
'@esbuild/darwin-x64@0.27.7':
optional: true
'@esbuild/freebsd-arm64@0.27.7':
optional: true
'@esbuild/freebsd-x64@0.27.7':
optional: true
'@esbuild/linux-arm64@0.27.7':
optional: true
'@esbuild/linux-arm@0.27.7':
optional: true
'@esbuild/linux-ia32@0.27.7':
optional: true
'@esbuild/linux-loong64@0.27.7':
optional: true
'@esbuild/linux-mips64el@0.27.7':
optional: true
'@esbuild/linux-ppc64@0.27.7':
optional: true
'@esbuild/linux-riscv64@0.27.7':
optional: true
'@esbuild/linux-s390x@0.27.7':
optional: true
'@esbuild/linux-x64@0.27.7':
optional: true
'@esbuild/netbsd-arm64@0.27.7':
optional: true
'@esbuild/netbsd-x64@0.27.7':
optional: true
'@esbuild/openbsd-arm64@0.27.7':
optional: true
'@esbuild/openbsd-x64@0.27.7':
optional: true
'@esbuild/openharmony-arm64@0.27.7':
optional: true
'@esbuild/sunos-x64@0.27.7':
optional: true
'@esbuild/win32-arm64@0.27.7':
optional: true
'@esbuild/win32-ia32@0.27.7':
optional: true
'@esbuild/win32-x64@0.27.7':
optional: true
'@types/node@22.19.17':
dependencies:
undici-types: 6.21.0
esbuild@0.27.7:
optionalDependencies:
'@esbuild/aix-ppc64': 0.27.7
'@esbuild/android-arm': 0.27.7
'@esbuild/android-arm64': 0.27.7
'@esbuild/android-x64': 0.27.7
'@esbuild/darwin-arm64': 0.27.7
'@esbuild/darwin-x64': 0.27.7
'@esbuild/freebsd-arm64': 0.27.7
'@esbuild/freebsd-x64': 0.27.7
'@esbuild/linux-arm': 0.27.7
'@esbuild/linux-arm64': 0.27.7
'@esbuild/linux-ia32': 0.27.7
'@esbuild/linux-loong64': 0.27.7
'@esbuild/linux-mips64el': 0.27.7
'@esbuild/linux-ppc64': 0.27.7
'@esbuild/linux-riscv64': 0.27.7
'@esbuild/linux-s390x': 0.27.7
'@esbuild/linux-x64': 0.27.7
'@esbuild/netbsd-arm64': 0.27.7
'@esbuild/netbsd-x64': 0.27.7
'@esbuild/openbsd-arm64': 0.27.7
'@esbuild/openbsd-x64': 0.27.7
'@esbuild/openharmony-arm64': 0.27.7
'@esbuild/sunos-x64': 0.27.7
'@esbuild/win32-arm64': 0.27.7
'@esbuild/win32-ia32': 0.27.7
'@esbuild/win32-x64': 0.27.7
typescript@5.9.3: {}
undici-types@6.21.0: {}

View File

@ -1,128 +0,0 @@
import { createReadStream } from "node:fs";
import { readdir } from "node:fs/promises";
import { homedir } from "node:os";
import { join } from "node:path";
import { createInterface } from "node:readline";
import type { LibSQLDatabase } from "drizzle-orm/libsql";
import { hermesSessionMessageStats } from "./schema.ts";
const MEASUREMENT_WINDOW_MS = 900_000;
const MEASUREMENT_WINDOW_SECONDS = 900;
interface MessageCounts {
user: number;
assistant: number;
tool: number;
fileHadActivity: boolean;
}
async function aggregateJsonlFile(filePath: string, cutoffMs: number, nowMs: number): Promise<MessageCounts> {
let user = 0;
let assistant = 0;
let tool = 0;
let fileHadActivity = false;
const input = createReadStream(filePath, { encoding: "utf8" });
const rl = createInterface({ input, crlfDelay: Infinity });
try {
for await (const line of rl) {
const trimmed = line.trim();
if (!trimmed) continue;
let obj: unknown;
try {
obj = JSON.parse(trimmed);
} catch {
continue;
}
if (
typeof obj !== "object" || obj === null ||
typeof (obj as Record<string, unknown>).role !== "string" ||
typeof (obj as Record<string, unknown>).timestamp !== "string"
) {
continue;
}
const record = obj as { role: string; timestamp: string };
const t = Date.parse(record.timestamp);
if (!Number.isFinite(t) || t < cutoffMs || t > nowMs) continue;
const roleNorm = record.role.trim().toLowerCase();
if (roleNorm === "user") {
user++;
fileHadActivity = true;
} else if (roleNorm === "assistant") {
assistant++;
fileHadActivity = true;
} else if (roleNorm === "tool") {
tool++;
fileHadActivity = true;
}
}
} finally {
rl.close();
}
return { user, assistant, tool, fileHadActivity };
}
export async function compute(db: LibSQLDatabase, _peers: unknown) {
const nowMs = Date.now();
const cutoffMs = nowMs - MEASUREMENT_WINDOW_MS;
const ts = nowMs;
let totalUserMessages = 0;
let totalAssistantMessages = 0;
let totalToolMessages = 0;
let activeSessions = 0;
const sessionsDir = join(homedir(), ".hermes", "sessions");
let files: string[] = [];
try {
const entries = await readdir(sessionsDir, { withFileTypes: true });
files = entries
.filter((e) => e.isFile() && e.name.endsWith(".jsonl"))
.map((e) => join(sessionsDir, e.name));
} catch (err) {
if (err && typeof err === "object" && "code" in err && (err as NodeJS.ErrnoException).code === "ENOENT") {
files = [];
} else {
throw err;
}
}
for (const filePath of files) {
const { user, assistant, tool, fileHadActivity } = await aggregateJsonlFile(
filePath,
cutoffMs,
nowMs,
);
totalUserMessages += user;
totalAssistantMessages += assistant;
totalToolMessages += tool;
if (fileHadActivity) activeSessions++;
}
const totalMessages =
totalUserMessages + totalAssistantMessages + totalToolMessages;
const row = {
ts,
totalUserMessages,
totalAssistantMessages,
totalToolMessages,
totalMessages,
activeSessions,
measurementWindowSeconds: MEASUREMENT_WINDOW_SECONDS,
};
await db.insert(hermesSessionMessageStats).values(row);
return {
ts: row.ts,
totalUserMessages: row.totalUserMessages,
totalAssistantMessages: row.totalAssistantMessages,
totalToolMessages: row.totalToolMessages,
totalMessages: row.totalMessages,
activeSessions: row.activeSessions,
measurementWindowSeconds: row.measurementWindowSeconds,
};
}

View File

@ -1,12 +0,0 @@
import { integer, sqliteTable } from "drizzle-orm/sqlite-core";
export const hermesSessionMessageStats = sqliteTable("hermes_session_message_stats", {
id: integer("id").primaryKey({ autoIncrement: true }),
ts: integer("ts").notNull(),
totalUserMessages: integer("total_user_messages").notNull(),
totalAssistantMessages: integer("total_assistant_messages").notNull(),
totalToolMessages: integer("total_tool_messages").notNull(),
totalMessages: integer("total_messages").notNull(),
activeSessions: integer("active_sessions").notNull(),
measurementWindowSeconds: integer("measurement_window_seconds").notNull(),
});

View File

@ -1,112 +0,0 @@
// src/index.ts
import { loadavg, totalmem, freemem, uptime } from "node:os";
import { execSync } from "node:child_process";
import { readFile } from "node:fs/promises";
// src/schema.ts
import { integer, real, sqliteTable } from "drizzle-orm/sqlite-core";
var snapshots = sqliteTable("snapshots", {
ts: integer("ts").primaryKey(),
cpuLoad1m: real("cpu_load_1m").notNull(),
cpuLoad5m: real("cpu_load_5m").notNull(),
cpuLoad15m: real("cpu_load_15m").notNull(),
memTotalMB: integer("mem_total_mb").notNull(),
memUsedMB: integer("mem_used_mb").notNull(),
memUsedPct: real("mem_used_pct").notNull(),
diskTotalGB: real("disk_total_gb").notNull(),
diskUsedGB: real("disk_used_gb").notNull(),
diskUsedPct: real("disk_used_pct").notNull(),
uptimeSec: integer("uptime_sec").notNull(),
// TCP socket stats (merged from linux-tcp-socket-stats)
socketsUsed: integer("sockets_used"),
tcpInuse: integer("tcp_inuse"),
tcpOrphan: integer("tcp_orphan"),
tcpTw: integer("tcp_tw"),
tcpAlloc: integer("tcp_alloc"),
tcpMemPages: integer("tcp_mem_pages")
});
// src/index.ts
var SOCKSTAT_PATH = "/proc/net/sockstat";
function parseSockstat(content) {
let socketsUsed = 0, tcpInuse = 0, tcpOrphan = 0, tcpTw = 0, tcpAlloc = 0, tcpMemPages = 0;
for (const line of content.split("\n")) {
const trimmed = line.trim();
if (trimmed.startsWith("sockets:")) {
const parts = trimmed.split(/\s+/);
const idx = parts.indexOf("used");
if (idx !== -1 && idx + 1 < parts.length) {
socketsUsed = Number.parseInt(parts[idx + 1], 10) || 0;
}
} else if (trimmed.startsWith("TCP:")) {
const parts = trimmed.split(/\s+/);
const map = {};
for (let i = 1; i + 1 < parts.length; i += 2) {
map[parts[i]] = Number.parseInt(parts[i + 1], 10) || 0;
}
tcpInuse = map.inuse ?? 0;
tcpOrphan = map.orphan ?? 0;
tcpTw = map.tw ?? 0;
tcpAlloc = map.alloc ?? 0;
tcpMemPages = map.mem ?? 0;
}
}
return { socketsUsed, tcpInuse, tcpOrphan, tcpTw, tcpAlloc, tcpMemPages };
}
async function compute(db, _peers) {
const [load1, load5, load15] = loadavg();
const memTotal = totalmem();
const memFree = freemem();
const memUsed = memTotal - memFree;
const memTotalMB = Math.round(memTotal / 1024 / 1024);
const memUsedMB = Math.round(memUsed / 1024 / 1024);
const memUsedPct = Math.round(memUsed / memTotal * 1e4) / 100;
let diskTotalGB = 0, diskUsedGB = 0, diskUsedPct = 0;
try {
const df = execSync("df -B1 / | tail -1", { encoding: "utf-8" }).trim();
const parts = df.split(/\s+/);
const total = Number(parts[1]);
const used = Number(parts[2]);
diskTotalGB = Math.round(total / 1024 / 1024 / 1024 * 100) / 100;
diskUsedGB = Math.round(used / 1024 / 1024 / 1024 * 100) / 100;
diskUsedPct = total > 0 ? Math.round(used / total * 1e4) / 100 : 0;
} catch {
}
let tcp = { socketsUsed: 0, tcpInuse: 0, tcpOrphan: 0, tcpTw: 0, tcpAlloc: 0, tcpMemPages: 0 };
try {
const content = await readFile(SOCKSTAT_PATH, "utf8");
tcp = parseSockstat(content);
} catch {
}
const ts = Date.now();
const uptimeSec = Math.round(uptime());
await db.insert(snapshots).values({
ts,
cpuLoad1m: load1,
cpuLoad5m: load5,
cpuLoad15m: load15,
memTotalMB,
memUsedMB,
memUsedPct,
diskTotalGB,
diskUsedGB,
diskUsedPct,
uptimeSec,
socketsUsed: tcp.socketsUsed,
tcpInuse: tcp.tcpInuse,
tcpOrphan: tcp.tcpOrphan,
tcpTw: tcp.tcpTw,
tcpAlloc: tcp.tcpAlloc,
tcpMemPages: tcp.tcpMemPages
});
return {
cpu: { load1m: load1, load5m: load5, load15m: load15 },
memory: { totalMB: memTotalMB, usedMB: memUsedMB, usedPct: memUsedPct },
disk: { totalGB: diskTotalGB, usedGB: diskUsedGB, usedPct: diskUsedPct },
tcp: { socketsUsed: tcp.socketsUsed, inuse: tcp.tcpInuse, orphan: tcp.tcpOrphan, tw: tcp.tcpTw, alloc: tcp.tcpAlloc, memPages: tcp.tcpMemPages },
uptimeSec
};
}
export {
compute
};

View File

@ -1,16 +0,0 @@
-- Migration: 0001_init
-- Creates the snapshots table for linux-system-health sense.
CREATE TABLE IF NOT EXISTS snapshots (
ts INTEGER PRIMARY KEY,
cpu_load_1m REAL NOT NULL,
cpu_load_5m REAL NOT NULL,
cpu_load_15m REAL NOT NULL,
mem_total_mb INTEGER NOT NULL,
mem_used_mb INTEGER NOT NULL,
mem_used_pct REAL NOT NULL,
disk_total_gb REAL NOT NULL,
disk_used_gb REAL NOT NULL,
disk_used_pct REAL NOT NULL,
uptime_sec INTEGER NOT NULL
);

View File

@ -1,6 +0,0 @@
ALTER TABLE snapshots ADD COLUMN sockets_used INTEGER;
ALTER TABLE snapshots ADD COLUMN tcp_inuse INTEGER;
ALTER TABLE snapshots ADD COLUMN tcp_orphan INTEGER;
ALTER TABLE snapshots ADD COLUMN tcp_tw INTEGER;
ALTER TABLE snapshots ADD COLUMN tcp_alloc INTEGER;
ALTER TABLE snapshots ADD COLUMN tcp_mem_pages INTEGER;

View File

@ -1,17 +0,0 @@
{
"name": "sense-linux-system-health",
"version": "0.0.1",
"private": true,
"type": "module",
"scripts": {
"build": "esbuild src/index.ts --bundle --platform=node --format=esm --outfile=index.js --packages=external"
},
"devDependencies": {
"@types/node": "^22.0.0",
"esbuild": "^0.27.0",
"typescript": "^5.7.0"
},
"pnpm": {
"onlyBuiltDependencies": ["esbuild"]
}
}

View File

@ -1,310 +0,0 @@
lockfileVersion: '9.0'
settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
importers:
.:
devDependencies:
'@types/node':
specifier: ^22.0.0
version: 22.19.17
esbuild:
specifier: ^0.27.0
version: 0.27.7
typescript:
specifier: ^5.7.0
version: 5.9.3
packages:
'@esbuild/aix-ppc64@0.27.7':
resolution: {integrity: sha512-EKX3Qwmhz1eMdEJokhALr0YiD0lhQNwDqkPYyPhiSwKrh7/4KRjQc04sZ8db+5DVVnZ1LmbNDI1uAMPEUBnQPg==}
engines: {node: '>=18'}
cpu: [ppc64]
os: [aix]
'@esbuild/android-arm64@0.27.7':
resolution: {integrity: sha512-62dPZHpIXzvChfvfLJow3q5dDtiNMkwiRzPylSCfriLvZeq0a1bWChrGx/BbUbPwOrsWKMn8idSllklzBy+dgQ==}
engines: {node: '>=18'}
cpu: [arm64]
os: [android]
'@esbuild/android-arm@0.27.7':
resolution: {integrity: sha512-jbPXvB4Yj2yBV7HUfE2KHe4GJX51QplCN1pGbYjvsyCZbQmies29EoJbkEc+vYuU5o45AfQn37vZlyXy4YJ8RQ==}
engines: {node: '>=18'}
cpu: [arm]
os: [android]
'@esbuild/android-x64@0.27.7':
resolution: {integrity: sha512-x5VpMODneVDb70PYV2VQOmIUUiBtY3D3mPBG8NxVk5CogneYhkR7MmM3yR/uMdITLrC1ml/NV1rj4bMJuy9MCg==}
engines: {node: '>=18'}
cpu: [x64]
os: [android]
'@esbuild/darwin-arm64@0.27.7':
resolution: {integrity: sha512-5lckdqeuBPlKUwvoCXIgI2D9/ABmPq3Rdp7IfL70393YgaASt7tbju3Ac+ePVi3KDH6N2RqePfHnXkaDtY9fkw==}
engines: {node: '>=18'}
cpu: [arm64]
os: [darwin]
'@esbuild/darwin-x64@0.27.7':
resolution: {integrity: sha512-rYnXrKcXuT7Z+WL5K980jVFdvVKhCHhUwid+dDYQpH+qu+TefcomiMAJpIiC2EM3Rjtq0sO3StMV/+3w3MyyqQ==}
engines: {node: '>=18'}
cpu: [x64]
os: [darwin]
'@esbuild/freebsd-arm64@0.27.7':
resolution: {integrity: sha512-B48PqeCsEgOtzME2GbNM2roU29AMTuOIN91dsMO30t+Ydis3z/3Ngoj5hhnsOSSwNzS+6JppqWsuhTp6E82l2w==}
engines: {node: '>=18'}
cpu: [arm64]
os: [freebsd]
'@esbuild/freebsd-x64@0.27.7':
resolution: {integrity: sha512-jOBDK5XEjA4m5IJK3bpAQF9/Lelu/Z9ZcdhTRLf4cajlB+8VEhFFRjWgfy3M1O4rO2GQ/b2dLwCUGpiF/eATNQ==}
engines: {node: '>=18'}
cpu: [x64]
os: [freebsd]
'@esbuild/linux-arm64@0.27.7':
resolution: {integrity: sha512-RZPHBoxXuNnPQO9rvjh5jdkRmVizktkT7TCDkDmQ0W2SwHInKCAV95GRuvdSvA7w4VMwfCjUiPwDi0ZO6Nfe9A==}
engines: {node: '>=18'}
cpu: [arm64]
os: [linux]
'@esbuild/linux-arm@0.27.7':
resolution: {integrity: sha512-RkT/YXYBTSULo3+af8Ib0ykH8u2MBh57o7q/DAs3lTJlyVQkgQvlrPTnjIzzRPQyavxtPtfg0EopvDyIt0j1rA==}
engines: {node: '>=18'}
cpu: [arm]
os: [linux]
'@esbuild/linux-ia32@0.27.7':
resolution: {integrity: sha512-GA48aKNkyQDbd3KtkplYWT102C5sn/EZTY4XROkxONgruHPU72l+gW+FfF8tf2cFjeHaRbWpOYa/uRBz/Xq1Pg==}
engines: {node: '>=18'}
cpu: [ia32]
os: [linux]
'@esbuild/linux-loong64@0.27.7':
resolution: {integrity: sha512-a4POruNM2oWsD4WKvBSEKGIiWQF8fZOAsycHOt6JBpZ+JN2n2JH9WAv56SOyu9X5IqAjqSIPTaJkqN8F7XOQ5Q==}
engines: {node: '>=18'}
cpu: [loong64]
os: [linux]
'@esbuild/linux-mips64el@0.27.7':
resolution: {integrity: sha512-KabT5I6StirGfIz0FMgl1I+R1H73Gp0ofL9A3nG3i/cYFJzKHhouBV5VWK1CSgKvVaG4q1RNpCTR2LuTVB3fIw==}
engines: {node: '>=18'}
cpu: [mips64el]
os: [linux]
'@esbuild/linux-ppc64@0.27.7':
resolution: {integrity: sha512-gRsL4x6wsGHGRqhtI+ifpN/vpOFTQtnbsupUF5R5YTAg+y/lKelYR1hXbnBdzDjGbMYjVJLJTd2OFmMewAgwlQ==}
engines: {node: '>=18'}
cpu: [ppc64]
os: [linux]
'@esbuild/linux-riscv64@0.27.7':
resolution: {integrity: sha512-hL25LbxO1QOngGzu2U5xeXtxXcW+/GvMN3ejANqXkxZ/opySAZMrc+9LY/WyjAan41unrR3YrmtTsUpwT66InQ==}
engines: {node: '>=18'}
cpu: [riscv64]
os: [linux]
'@esbuild/linux-s390x@0.27.7':
resolution: {integrity: sha512-2k8go8Ycu1Kb46vEelhu1vqEP+UeRVj2zY1pSuPdgvbd5ykAw82Lrro28vXUrRmzEsUV0NzCf54yARIK8r0fdw==}
engines: {node: '>=18'}
cpu: [s390x]
os: [linux]
'@esbuild/linux-x64@0.27.7':
resolution: {integrity: sha512-hzznmADPt+OmsYzw1EE33ccA+HPdIqiCRq7cQeL1Jlq2gb1+OyWBkMCrYGBJ+sxVzve2ZJEVeePbLM2iEIZSxA==}
engines: {node: '>=18'}
cpu: [x64]
os: [linux]
'@esbuild/netbsd-arm64@0.27.7':
resolution: {integrity: sha512-b6pqtrQdigZBwZxAn1UpazEisvwaIDvdbMbmrly7cDTMFnw/+3lVxxCTGOrkPVnsYIosJJXAsILG9XcQS+Yu6w==}
engines: {node: '>=18'}
cpu: [arm64]
os: [netbsd]
'@esbuild/netbsd-x64@0.27.7':
resolution: {integrity: sha512-OfatkLojr6U+WN5EDYuoQhtM+1xco+/6FSzJJnuWiUw5eVcicbyK3dq5EeV/QHT1uy6GoDhGbFpprUiHUYggrw==}
engines: {node: '>=18'}
cpu: [x64]
os: [netbsd]
'@esbuild/openbsd-arm64@0.27.7':
resolution: {integrity: sha512-AFuojMQTxAz75Fo8idVcqoQWEHIXFRbOc1TrVcFSgCZtQfSdc1RXgB3tjOn/krRHENUB4j00bfGjyl2mJrU37A==}
engines: {node: '>=18'}
cpu: [arm64]
os: [openbsd]
'@esbuild/openbsd-x64@0.27.7':
resolution: {integrity: sha512-+A1NJmfM8WNDv5CLVQYJ5PshuRm/4cI6WMZRg1by1GwPIQPCTs1GLEUHwiiQGT5zDdyLiRM/l1G0Pv54gvtKIg==}
engines: {node: '>=18'}
cpu: [x64]
os: [openbsd]
'@esbuild/openharmony-arm64@0.27.7':
resolution: {integrity: sha512-+KrvYb/C8zA9CU/g0sR6w2RBw7IGc5J2BPnc3dYc5VJxHCSF1yNMxTV5LQ7GuKteQXZtspjFbiuW5/dOj7H4Yw==}
engines: {node: '>=18'}
cpu: [arm64]
os: [openharmony]
'@esbuild/sunos-x64@0.27.7':
resolution: {integrity: sha512-ikktIhFBzQNt/QDyOL580ti9+5mL/YZeUPKU2ivGtGjdTYoqz6jObj6nOMfhASpS4GU4Q/Clh1QtxWAvcYKamA==}
engines: {node: '>=18'}
cpu: [x64]
os: [sunos]
'@esbuild/win32-arm64@0.27.7':
resolution: {integrity: sha512-7yRhbHvPqSpRUV7Q20VuDwbjW5kIMwTHpptuUzV+AA46kiPze5Z7qgt6CLCK3pWFrHeNfDd1VKgyP4O+ng17CA==}
engines: {node: '>=18'}
cpu: [arm64]
os: [win32]
'@esbuild/win32-ia32@0.27.7':
resolution: {integrity: sha512-SmwKXe6VHIyZYbBLJrhOoCJRB/Z1tckzmgTLfFYOfpMAx63BJEaL9ExI8x7v0oAO3Zh6D/Oi1gVxEYr5oUCFhw==}
engines: {node: '>=18'}
cpu: [ia32]
os: [win32]
'@esbuild/win32-x64@0.27.7':
resolution: {integrity: sha512-56hiAJPhwQ1R4i+21FVF7V8kSD5zZTdHcVuRFMW0hn753vVfQN8xlx4uOPT4xoGH0Z/oVATuR82AiqSTDIpaHg==}
engines: {node: '>=18'}
cpu: [x64]
os: [win32]
'@types/node@22.19.17':
resolution: {integrity: sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==}
esbuild@0.27.7:
resolution: {integrity: sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w==}
engines: {node: '>=18'}
hasBin: true
typescript@5.9.3:
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
engines: {node: '>=14.17'}
hasBin: true
undici-types@6.21.0:
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
snapshots:
'@esbuild/aix-ppc64@0.27.7':
optional: true
'@esbuild/android-arm64@0.27.7':
optional: true
'@esbuild/android-arm@0.27.7':
optional: true
'@esbuild/android-x64@0.27.7':
optional: true
'@esbuild/darwin-arm64@0.27.7':
optional: true
'@esbuild/darwin-x64@0.27.7':
optional: true
'@esbuild/freebsd-arm64@0.27.7':
optional: true
'@esbuild/freebsd-x64@0.27.7':
optional: true
'@esbuild/linux-arm64@0.27.7':
optional: true
'@esbuild/linux-arm@0.27.7':
optional: true
'@esbuild/linux-ia32@0.27.7':
optional: true
'@esbuild/linux-loong64@0.27.7':
optional: true
'@esbuild/linux-mips64el@0.27.7':
optional: true
'@esbuild/linux-ppc64@0.27.7':
optional: true
'@esbuild/linux-riscv64@0.27.7':
optional: true
'@esbuild/linux-s390x@0.27.7':
optional: true
'@esbuild/linux-x64@0.27.7':
optional: true
'@esbuild/netbsd-arm64@0.27.7':
optional: true
'@esbuild/netbsd-x64@0.27.7':
optional: true
'@esbuild/openbsd-arm64@0.27.7':
optional: true
'@esbuild/openbsd-x64@0.27.7':
optional: true
'@esbuild/openharmony-arm64@0.27.7':
optional: true
'@esbuild/sunos-x64@0.27.7':
optional: true
'@esbuild/win32-arm64@0.27.7':
optional: true
'@esbuild/win32-ia32@0.27.7':
optional: true
'@esbuild/win32-x64@0.27.7':
optional: true
'@types/node@22.19.17':
dependencies:
undici-types: 6.21.0
esbuild@0.27.7:
optionalDependencies:
'@esbuild/aix-ppc64': 0.27.7
'@esbuild/android-arm': 0.27.7
'@esbuild/android-arm64': 0.27.7
'@esbuild/android-x64': 0.27.7
'@esbuild/darwin-arm64': 0.27.7
'@esbuild/darwin-x64': 0.27.7
'@esbuild/freebsd-arm64': 0.27.7
'@esbuild/freebsd-x64': 0.27.7
'@esbuild/linux-arm': 0.27.7
'@esbuild/linux-arm64': 0.27.7
'@esbuild/linux-ia32': 0.27.7
'@esbuild/linux-loong64': 0.27.7
'@esbuild/linux-mips64el': 0.27.7
'@esbuild/linux-ppc64': 0.27.7
'@esbuild/linux-riscv64': 0.27.7
'@esbuild/linux-s390x': 0.27.7
'@esbuild/linux-x64': 0.27.7
'@esbuild/netbsd-arm64': 0.27.7
'@esbuild/netbsd-x64': 0.27.7
'@esbuild/openbsd-arm64': 0.27.7
'@esbuild/openbsd-x64': 0.27.7
'@esbuild/openharmony-arm64': 0.27.7
'@esbuild/sunos-x64': 0.27.7
'@esbuild/win32-arm64': 0.27.7
'@esbuild/win32-ia32': 0.27.7
'@esbuild/win32-x64': 0.27.7
typescript@5.9.3: {}
undici-types@6.21.0: {}

View File

@ -1,96 +0,0 @@
import { loadavg, totalmem, freemem, uptime } from "node:os";
import { execSync } from "node:child_process";
import { readFile } from "node:fs/promises";
import type { LibSQLDatabase } from "drizzle-orm/libsql";
import { snapshots } from "./schema.ts";
const SOCKSTAT_PATH = "/proc/net/sockstat";
interface SockstatResult {
socketsUsed: number;
tcpInuse: number;
tcpOrphan: number;
tcpTw: number;
tcpAlloc: number;
tcpMemPages: number;
}
function parseSockstat(content: string): SockstatResult {
let socketsUsed = 0, tcpInuse = 0, tcpOrphan = 0, tcpTw = 0, tcpAlloc = 0, tcpMemPages = 0;
for (const line of content.split("\n")) {
const trimmed = line.trim();
if (trimmed.startsWith("sockets:")) {
const parts = trimmed.split(/\s+/);
const idx = parts.indexOf("used");
if (idx !== -1 && idx + 1 < parts.length) {
socketsUsed = Number.parseInt(parts[idx + 1], 10) || 0;
}
} else if (trimmed.startsWith("TCP:")) {
const parts = trimmed.split(/\s+/);
const map: Record<string, number> = {};
for (let i = 1; i + 1 < parts.length; i += 2) {
map[parts[i]] = Number.parseInt(parts[i + 1], 10) || 0;
}
tcpInuse = map.inuse ?? 0;
tcpOrphan = map.orphan ?? 0;
tcpTw = map.tw ?? 0;
tcpAlloc = map.alloc ?? 0;
tcpMemPages = map.mem ?? 0;
}
}
return { socketsUsed, tcpInuse, tcpOrphan, tcpTw, tcpAlloc, tcpMemPages };
}
export async function compute(db: LibSQLDatabase, _peers: unknown) {
const [load1, load5, load15] = loadavg();
const memTotal = totalmem();
const memFree = freemem();
const memUsed = memTotal - memFree;
const memTotalMB = Math.round(memTotal / 1024 / 1024);
const memUsedMB = Math.round(memUsed / 1024 / 1024);
const memUsedPct = Math.round((memUsed / memTotal) * 10000) / 100;
let diskTotalGB = 0, diskUsedGB = 0, diskUsedPct = 0;
try {
const df = execSync("df -B1 / | tail -1", { encoding: "utf-8" }).trim();
const parts = df.split(/\s+/);
const total = Number(parts[1]);
const used = Number(parts[2]);
diskTotalGB = Math.round(total / 1024 / 1024 / 1024 * 100) / 100;
diskUsedGB = Math.round(used / 1024 / 1024 / 1024 * 100) / 100;
diskUsedPct = total > 0 ? Math.round((used / total) * 10000) / 100 : 0;
} catch {}
let tcp: SockstatResult = { socketsUsed: 0, tcpInuse: 0, tcpOrphan: 0, tcpTw: 0, tcpAlloc: 0, tcpMemPages: 0 };
try {
const content = await readFile(SOCKSTAT_PATH, "utf8");
tcp = parseSockstat(content);
} catch {}
const ts = Date.now();
const uptimeSec = Math.round(uptime());
await db.insert(snapshots).values({
ts, cpuLoad1m: load1, cpuLoad5m: load5, cpuLoad15m: load15,
memTotalMB, memUsedMB, memUsedPct,
diskTotalGB, diskUsedGB, diskUsedPct,
uptimeSec,
socketsUsed: tcp.socketsUsed,
tcpInuse: tcp.tcpInuse,
tcpOrphan: tcp.tcpOrphan,
tcpTw: tcp.tcpTw,
tcpAlloc: tcp.tcpAlloc,
tcpMemPages: tcp.tcpMemPages,
});
return {
cpu: { load1m: load1, load5m: load5, load15m: load15 },
memory: { totalMB: memTotalMB, usedMB: memUsedMB, usedPct: memUsedPct },
disk: { totalGB: diskTotalGB, usedGB: diskUsedGB, usedPct: diskUsedPct },
tcp: { socketsUsed: tcp.socketsUsed, inuse: tcp.tcpInuse, orphan: tcp.tcpOrphan, tw: tcp.tcpTw, alloc: tcp.tcpAlloc, memPages: tcp.tcpMemPages },
uptimeSec,
};
}

View File

@ -1,22 +0,0 @@
import { integer, real, sqliteTable, text } from "drizzle-orm/sqlite-core";
export const snapshots = sqliteTable("snapshots", {
ts: integer("ts").primaryKey(),
cpuLoad1m: real("cpu_load_1m").notNull(),
cpuLoad5m: real("cpu_load_5m").notNull(),
cpuLoad15m: real("cpu_load_15m").notNull(),
memTotalMB: integer("mem_total_mb").notNull(),
memUsedMB: integer("mem_used_mb").notNull(),
memUsedPct: real("mem_used_pct").notNull(),
diskTotalGB: real("disk_total_gb").notNull(),
diskUsedGB: real("disk_used_gb").notNull(),
diskUsedPct: real("disk_used_pct").notNull(),
uptimeSec: integer("uptime_sec").notNull(),
// TCP socket stats (merged from linux-tcp-socket-stats)
socketsUsed: integer("sockets_used"),
tcpInuse: integer("tcp_inuse"),
tcpOrphan: integer("tcp_orphan"),
tcpTw: integer("tcp_tw"),
tcpAlloc: integer("tcp_alloc"),
tcpMemPages: integer("tcp_mem_pages"),
});

View File

@ -1,44 +0,0 @@
// src/schema.ts
import { integer, real, sqliteTable } from "drizzle-orm/sqlite-core";
var workerProcessMetrics = sqliteTable("worker_process_metrics", {
ts: integer("ts").primaryKey(),
pid: integer("pid").notNull(),
uptimeSec: real("uptime_sec").notNull(),
heapUsedMB: real("heap_used_mb").notNull(),
rssMB: real("rss_mb").notNull(),
externalMB: real("external_mb").notNull()
});
// src/index.ts
function round2(n) {
return Math.round(n * 100) / 100;
}
async function compute(db, _peers) {
const ts = Date.now();
const pid = process.pid;
const uptimeSec = process.uptime();
const m = process.memoryUsage();
const heapUsedMB = round2(m.heapUsed / 1024 / 1024);
const rssMB = round2(m.rss / 1024 / 1024);
const externalMB = round2(m.external / 1024 / 1024);
const row = {
ts,
pid,
uptimeSec,
heapUsedMB,
rssMB,
externalMB
};
await db.insert(workerProcessMetrics).values(row);
return {
ts: row.ts,
pid: row.pid,
uptimeSec: row.uptimeSec,
heapUsedMB: row.heapUsedMB,
rssMB: row.rssMB,
externalMB: row.externalMB
};
}
export {
compute
};

View File

@ -1,11 +0,0 @@
-- Migration: 0001_init
-- Creates the worker_process_metrics table for worker-process-metrics sense.
CREATE TABLE IF NOT EXISTS worker_process_metrics (
ts INTEGER PRIMARY KEY,
pid INTEGER NOT NULL,
uptime_sec REAL NOT NULL,
heap_used_mb REAL NOT NULL,
rss_mb REAL NOT NULL,
external_mb REAL NOT NULL
);

View File

@ -1,17 +0,0 @@
{
"name": "sense-worker-process-metrics",
"version": "0.0.1",
"private": true,
"type": "module",
"scripts": {
"build": "esbuild src/index.ts --bundle --platform=node --format=esm --outfile=index.js --packages=external"
},
"devDependencies": {
"@types/node": "^22.0.0",
"esbuild": "^0.27.0",
"typescript": "^5.7.0"
},
"pnpm": {
"onlyBuiltDependencies": ["esbuild"]
}
}

View File

@ -1,310 +0,0 @@
lockfileVersion: '9.0'
settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
importers:
.:
devDependencies:
'@types/node':
specifier: ^22.0.0
version: 22.19.17
esbuild:
specifier: ^0.27.0
version: 0.27.7
typescript:
specifier: ^5.7.0
version: 5.9.3
packages:
'@esbuild/aix-ppc64@0.27.7':
resolution: {integrity: sha512-EKX3Qwmhz1eMdEJokhALr0YiD0lhQNwDqkPYyPhiSwKrh7/4KRjQc04sZ8db+5DVVnZ1LmbNDI1uAMPEUBnQPg==}
engines: {node: '>=18'}
cpu: [ppc64]
os: [aix]
'@esbuild/android-arm64@0.27.7':
resolution: {integrity: sha512-62dPZHpIXzvChfvfLJow3q5dDtiNMkwiRzPylSCfriLvZeq0a1bWChrGx/BbUbPwOrsWKMn8idSllklzBy+dgQ==}
engines: {node: '>=18'}
cpu: [arm64]
os: [android]
'@esbuild/android-arm@0.27.7':
resolution: {integrity: sha512-jbPXvB4Yj2yBV7HUfE2KHe4GJX51QplCN1pGbYjvsyCZbQmies29EoJbkEc+vYuU5o45AfQn37vZlyXy4YJ8RQ==}
engines: {node: '>=18'}
cpu: [arm]
os: [android]
'@esbuild/android-x64@0.27.7':
resolution: {integrity: sha512-x5VpMODneVDb70PYV2VQOmIUUiBtY3D3mPBG8NxVk5CogneYhkR7MmM3yR/uMdITLrC1ml/NV1rj4bMJuy9MCg==}
engines: {node: '>=18'}
cpu: [x64]
os: [android]
'@esbuild/darwin-arm64@0.27.7':
resolution: {integrity: sha512-5lckdqeuBPlKUwvoCXIgI2D9/ABmPq3Rdp7IfL70393YgaASt7tbju3Ac+ePVi3KDH6N2RqePfHnXkaDtY9fkw==}
engines: {node: '>=18'}
cpu: [arm64]
os: [darwin]
'@esbuild/darwin-x64@0.27.7':
resolution: {integrity: sha512-rYnXrKcXuT7Z+WL5K980jVFdvVKhCHhUwid+dDYQpH+qu+TefcomiMAJpIiC2EM3Rjtq0sO3StMV/+3w3MyyqQ==}
engines: {node: '>=18'}
cpu: [x64]
os: [darwin]
'@esbuild/freebsd-arm64@0.27.7':
resolution: {integrity: sha512-B48PqeCsEgOtzME2GbNM2roU29AMTuOIN91dsMO30t+Ydis3z/3Ngoj5hhnsOSSwNzS+6JppqWsuhTp6E82l2w==}
engines: {node: '>=18'}
cpu: [arm64]
os: [freebsd]
'@esbuild/freebsd-x64@0.27.7':
resolution: {integrity: sha512-jOBDK5XEjA4m5IJK3bpAQF9/Lelu/Z9ZcdhTRLf4cajlB+8VEhFFRjWgfy3M1O4rO2GQ/b2dLwCUGpiF/eATNQ==}
engines: {node: '>=18'}
cpu: [x64]
os: [freebsd]
'@esbuild/linux-arm64@0.27.7':
resolution: {integrity: sha512-RZPHBoxXuNnPQO9rvjh5jdkRmVizktkT7TCDkDmQ0W2SwHInKCAV95GRuvdSvA7w4VMwfCjUiPwDi0ZO6Nfe9A==}
engines: {node: '>=18'}
cpu: [arm64]
os: [linux]
'@esbuild/linux-arm@0.27.7':
resolution: {integrity: sha512-RkT/YXYBTSULo3+af8Ib0ykH8u2MBh57o7q/DAs3lTJlyVQkgQvlrPTnjIzzRPQyavxtPtfg0EopvDyIt0j1rA==}
engines: {node: '>=18'}
cpu: [arm]
os: [linux]
'@esbuild/linux-ia32@0.27.7':
resolution: {integrity: sha512-GA48aKNkyQDbd3KtkplYWT102C5sn/EZTY4XROkxONgruHPU72l+gW+FfF8tf2cFjeHaRbWpOYa/uRBz/Xq1Pg==}
engines: {node: '>=18'}
cpu: [ia32]
os: [linux]
'@esbuild/linux-loong64@0.27.7':
resolution: {integrity: sha512-a4POruNM2oWsD4WKvBSEKGIiWQF8fZOAsycHOt6JBpZ+JN2n2JH9WAv56SOyu9X5IqAjqSIPTaJkqN8F7XOQ5Q==}
engines: {node: '>=18'}
cpu: [loong64]
os: [linux]
'@esbuild/linux-mips64el@0.27.7':
resolution: {integrity: sha512-KabT5I6StirGfIz0FMgl1I+R1H73Gp0ofL9A3nG3i/cYFJzKHhouBV5VWK1CSgKvVaG4q1RNpCTR2LuTVB3fIw==}
engines: {node: '>=18'}
cpu: [mips64el]
os: [linux]
'@esbuild/linux-ppc64@0.27.7':
resolution: {integrity: sha512-gRsL4x6wsGHGRqhtI+ifpN/vpOFTQtnbsupUF5R5YTAg+y/lKelYR1hXbnBdzDjGbMYjVJLJTd2OFmMewAgwlQ==}
engines: {node: '>=18'}
cpu: [ppc64]
os: [linux]
'@esbuild/linux-riscv64@0.27.7':
resolution: {integrity: sha512-hL25LbxO1QOngGzu2U5xeXtxXcW+/GvMN3ejANqXkxZ/opySAZMrc+9LY/WyjAan41unrR3YrmtTsUpwT66InQ==}
engines: {node: '>=18'}
cpu: [riscv64]
os: [linux]
'@esbuild/linux-s390x@0.27.7':
resolution: {integrity: sha512-2k8go8Ycu1Kb46vEelhu1vqEP+UeRVj2zY1pSuPdgvbd5ykAw82Lrro28vXUrRmzEsUV0NzCf54yARIK8r0fdw==}
engines: {node: '>=18'}
cpu: [s390x]
os: [linux]
'@esbuild/linux-x64@0.27.7':
resolution: {integrity: sha512-hzznmADPt+OmsYzw1EE33ccA+HPdIqiCRq7cQeL1Jlq2gb1+OyWBkMCrYGBJ+sxVzve2ZJEVeePbLM2iEIZSxA==}
engines: {node: '>=18'}
cpu: [x64]
os: [linux]
'@esbuild/netbsd-arm64@0.27.7':
resolution: {integrity: sha512-b6pqtrQdigZBwZxAn1UpazEisvwaIDvdbMbmrly7cDTMFnw/+3lVxxCTGOrkPVnsYIosJJXAsILG9XcQS+Yu6w==}
engines: {node: '>=18'}
cpu: [arm64]
os: [netbsd]
'@esbuild/netbsd-x64@0.27.7':
resolution: {integrity: sha512-OfatkLojr6U+WN5EDYuoQhtM+1xco+/6FSzJJnuWiUw5eVcicbyK3dq5EeV/QHT1uy6GoDhGbFpprUiHUYggrw==}
engines: {node: '>=18'}
cpu: [x64]
os: [netbsd]
'@esbuild/openbsd-arm64@0.27.7':
resolution: {integrity: sha512-AFuojMQTxAz75Fo8idVcqoQWEHIXFRbOc1TrVcFSgCZtQfSdc1RXgB3tjOn/krRHENUB4j00bfGjyl2mJrU37A==}
engines: {node: '>=18'}
cpu: [arm64]
os: [openbsd]
'@esbuild/openbsd-x64@0.27.7':
resolution: {integrity: sha512-+A1NJmfM8WNDv5CLVQYJ5PshuRm/4cI6WMZRg1by1GwPIQPCTs1GLEUHwiiQGT5zDdyLiRM/l1G0Pv54gvtKIg==}
engines: {node: '>=18'}
cpu: [x64]
os: [openbsd]
'@esbuild/openharmony-arm64@0.27.7':
resolution: {integrity: sha512-+KrvYb/C8zA9CU/g0sR6w2RBw7IGc5J2BPnc3dYc5VJxHCSF1yNMxTV5LQ7GuKteQXZtspjFbiuW5/dOj7H4Yw==}
engines: {node: '>=18'}
cpu: [arm64]
os: [openharmony]
'@esbuild/sunos-x64@0.27.7':
resolution: {integrity: sha512-ikktIhFBzQNt/QDyOL580ti9+5mL/YZeUPKU2ivGtGjdTYoqz6jObj6nOMfhASpS4GU4Q/Clh1QtxWAvcYKamA==}
engines: {node: '>=18'}
cpu: [x64]
os: [sunos]
'@esbuild/win32-arm64@0.27.7':
resolution: {integrity: sha512-7yRhbHvPqSpRUV7Q20VuDwbjW5kIMwTHpptuUzV+AA46kiPze5Z7qgt6CLCK3pWFrHeNfDd1VKgyP4O+ng17CA==}
engines: {node: '>=18'}
cpu: [arm64]
os: [win32]
'@esbuild/win32-ia32@0.27.7':
resolution: {integrity: sha512-SmwKXe6VHIyZYbBLJrhOoCJRB/Z1tckzmgTLfFYOfpMAx63BJEaL9ExI8x7v0oAO3Zh6D/Oi1gVxEYr5oUCFhw==}
engines: {node: '>=18'}
cpu: [ia32]
os: [win32]
'@esbuild/win32-x64@0.27.7':
resolution: {integrity: sha512-56hiAJPhwQ1R4i+21FVF7V8kSD5zZTdHcVuRFMW0hn753vVfQN8xlx4uOPT4xoGH0Z/oVATuR82AiqSTDIpaHg==}
engines: {node: '>=18'}
cpu: [x64]
os: [win32]
'@types/node@22.19.17':
resolution: {integrity: sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==}
esbuild@0.27.7:
resolution: {integrity: sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w==}
engines: {node: '>=18'}
hasBin: true
typescript@5.9.3:
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
engines: {node: '>=14.17'}
hasBin: true
undici-types@6.21.0:
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
snapshots:
'@esbuild/aix-ppc64@0.27.7':
optional: true
'@esbuild/android-arm64@0.27.7':
optional: true
'@esbuild/android-arm@0.27.7':
optional: true
'@esbuild/android-x64@0.27.7':
optional: true
'@esbuild/darwin-arm64@0.27.7':
optional: true
'@esbuild/darwin-x64@0.27.7':
optional: true
'@esbuild/freebsd-arm64@0.27.7':
optional: true
'@esbuild/freebsd-x64@0.27.7':
optional: true
'@esbuild/linux-arm64@0.27.7':
optional: true
'@esbuild/linux-arm@0.27.7':
optional: true
'@esbuild/linux-ia32@0.27.7':
optional: true
'@esbuild/linux-loong64@0.27.7':
optional: true
'@esbuild/linux-mips64el@0.27.7':
optional: true
'@esbuild/linux-ppc64@0.27.7':
optional: true
'@esbuild/linux-riscv64@0.27.7':
optional: true
'@esbuild/linux-s390x@0.27.7':
optional: true
'@esbuild/linux-x64@0.27.7':
optional: true
'@esbuild/netbsd-arm64@0.27.7':
optional: true
'@esbuild/netbsd-x64@0.27.7':
optional: true
'@esbuild/openbsd-arm64@0.27.7':
optional: true
'@esbuild/openbsd-x64@0.27.7':
optional: true
'@esbuild/openharmony-arm64@0.27.7':
optional: true
'@esbuild/sunos-x64@0.27.7':
optional: true
'@esbuild/win32-arm64@0.27.7':
optional: true
'@esbuild/win32-ia32@0.27.7':
optional: true
'@esbuild/win32-x64@0.27.7':
optional: true
'@types/node@22.19.17':
dependencies:
undici-types: 6.21.0
esbuild@0.27.7:
optionalDependencies:
'@esbuild/aix-ppc64': 0.27.7
'@esbuild/android-arm': 0.27.7
'@esbuild/android-arm64': 0.27.7
'@esbuild/android-x64': 0.27.7
'@esbuild/darwin-arm64': 0.27.7
'@esbuild/darwin-x64': 0.27.7
'@esbuild/freebsd-arm64': 0.27.7
'@esbuild/freebsd-x64': 0.27.7
'@esbuild/linux-arm': 0.27.7
'@esbuild/linux-arm64': 0.27.7
'@esbuild/linux-ia32': 0.27.7
'@esbuild/linux-loong64': 0.27.7
'@esbuild/linux-mips64el': 0.27.7
'@esbuild/linux-ppc64': 0.27.7
'@esbuild/linux-riscv64': 0.27.7
'@esbuild/linux-s390x': 0.27.7
'@esbuild/linux-x64': 0.27.7
'@esbuild/netbsd-arm64': 0.27.7
'@esbuild/netbsd-x64': 0.27.7
'@esbuild/openbsd-arm64': 0.27.7
'@esbuild/openbsd-x64': 0.27.7
'@esbuild/openharmony-arm64': 0.27.7
'@esbuild/sunos-x64': 0.27.7
'@esbuild/win32-arm64': 0.27.7
'@esbuild/win32-ia32': 0.27.7
'@esbuild/win32-x64': 0.27.7
typescript@5.9.3: {}
undici-types@6.21.0: {}

View File

@ -1,36 +0,0 @@
import type { LibSQLDatabase } from "drizzle-orm/libsql";
import { workerProcessMetrics } from "./schema.ts";
function round2(n: number): number {
return Math.round(n * 100) / 100;
}
export async function compute(db: LibSQLDatabase, _peers: unknown) {
const ts = Date.now();
const pid = process.pid;
const uptimeSec = process.uptime();
const m = process.memoryUsage();
const heapUsedMB = round2(m.heapUsed / 1024 / 1024);
const rssMB = round2(m.rss / 1024 / 1024);
const externalMB = round2(m.external / 1024 / 1024);
const row = {
ts,
pid,
uptimeSec,
heapUsedMB,
rssMB,
externalMB,
};
await db.insert(workerProcessMetrics).values(row);
return {
ts: row.ts,
pid: row.pid,
uptimeSec: row.uptimeSec,
heapUsedMB: row.heapUsedMB,
rssMB: row.rssMB,
externalMB: row.externalMB,
};
}

View File

@ -1,10 +0,0 @@
import { integer, real, sqliteTable } from "drizzle-orm/sqlite-core";
export const workerProcessMetrics = sqliteTable("worker_process_metrics", {
ts: integer("ts").primaryKey(),
pid: integer("pid").notNull(),
uptimeSec: real("uptime_sec").notNull(),
heapUsedMB: real("heap_used_mb").notNull(),
rssMB: real("rss_mb").notNull(),
externalMB: real("external_mb").notNull(),
});

View File

@ -7,7 +7,13 @@
"strict": true,
"skipLibCheck": true,
"noEmit": true,
"allowImportingTsExtensions": true,
"types": ["node"]
},
"include": ["./**/*.ts"]
"include": [
"senses/**/*.ts",
"workflows/**/*.ts",
"scripts/**/*.ts",
"workflows/_shared/**/*.ts"
]
}

View File

@ -0,0 +1,33 @@
import { join } from "node:path";
import { createCursorAdapter, cursorAdapter } from "@uncaged/nerve-adapter-cursor";
import { hermesAdapter } from "@uncaged/nerve-adapter-hermes";
import { createDevelopSenseWorkflow } from "@uncaged/nerve-workflow-meta";
const HOME = process.env.HOME ?? "/home/azureuser";
const NERVE_ROOT = join(HOME, ".uncaged-nerve");
const apiKey = process.env.DASHSCOPE_API_KEY;
const baseUrl = process.env.DASHSCOPE_BASE_URL;
const model = process.env.DASHSCOPE_MODEL ?? "qwen-plus";
if (!apiKey || !baseUrl) {
throw new Error("Set DASHSCOPE_API_KEY and DASHSCOPE_BASE_URL");
}
const CURSOR_TIMEOUT_MS = 300_000;
const workflow = createDevelopSenseWorkflow({
defaultAdapter: hermesAdapter,
adapters: {
planner: createCursorAdapter({
type: "cursor",
mode: "ask",
model: "auto",
timeout: CURSOR_TIMEOUT_MS,
}),
coder: cursorAdapter,
},
extract: { provider: { apiKey, baseUrl, model } },
cwd: NERVE_ROOT,
});
export default workflow;

View File

@ -0,0 +1,34 @@
import { join } from "node:path";
import { createCursorAdapter, cursorAdapter } from "@uncaged/nerve-adapter-cursor";
import { hermesAdapter } from "@uncaged/nerve-adapter-hermes";
import { createDevelopWorkflowWorkflow } from "@uncaged/nerve-workflow-meta";
const HOME = process.env.HOME ?? "/home/azureuser";
const NERVE_ROOT = join(HOME, ".uncaged-nerve");
const apiKey = process.env.DASHSCOPE_API_KEY;
const baseUrl = process.env.DASHSCOPE_BASE_URL;
const model = process.env.DASHSCOPE_MODEL ?? "qwen-plus";
if (!apiKey || !baseUrl) {
throw new Error("Set DASHSCOPE_API_KEY and DASHSCOPE_BASE_URL");
}
const CURSOR_TIMEOUT_MS = 300_000;
const workflow = createDevelopWorkflowWorkflow({
defaultAdapter: hermesAdapter,
adapters: {
planner: createCursorAdapter({
type: "cursor",
mode: "ask",
model: "auto",
timeout: CURSOR_TIMEOUT_MS,
}),
coder: cursorAdapter,
},
extract: { provider: { apiKey, baseUrl, model } },
nerveRoot: NERVE_ROOT,
});
export default workflow;

View File

@ -0,0 +1,33 @@
import type { AgentFn, WorkflowDefinition } from "@uncaged/nerve-core";
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
import { createLlmAdapter } from "@uncaged/nerve-workflow-utils";
import { moderator } from "./moderator.js";
import type { WorkflowMeta } from "./moderator.js";
import { createAnswererRole } from "./roles/answerer.js";
import { createExplorerRole } from "./roles/explorer.js";
import { createQuestionerRole } from "./roles/questioner.js";
export type CreateKnowledgeExtractionDeps = {
defaultAdapter: AgentFn;
adapters?: Partial<Record<keyof WorkflowMeta, AgentFn>>;
extract: LlmExtractorConfig;
};
export function createKnowledgeExtractionWorkflow({
defaultAdapter,
adapters,
extract,
}: CreateKnowledgeExtractionDeps): WorkflowDefinition<WorkflowMeta> {
const a = (role: keyof WorkflowMeta) => adapters?.[role] ?? defaultAdapter;
const llmAdapter = createLlmAdapter(extract.provider);
return {
name: "extract-knowledge",
roles: {
questioner: createQuestionerRole(adapters?.questioner ?? llmAdapter, { extract }),
answerer: createAnswererRole(adapters?.answerer ?? llmAdapter, { extract }),
explorer: createExplorerRole(a("explorer"), { extract }),
},
moderator,
};
}

View File

@ -0,0 +1,30 @@
import { join } from "node:path";
import { createCursorAdapter } from "@uncaged/nerve-adapter-cursor";
import { hermesAdapter } from "@uncaged/nerve-adapter-hermes";
import { createKnowledgeExtractionWorkflow } from "./build.js";
import { resolveDashScopeProvider } from "../solve-issue/lib/provider.js";
const HOME = process.env.HOME ?? "/home/azureuser";
const NERVE_ROOT = join(HOME, ".uncaged-nerve");
const provider = await resolveDashScopeProvider(NERVE_ROOT);
if (provider === null) {
throw new Error("Set DASHSCOPE_API_KEY and DASHSCOPE_BASE_URL (or cfg get equivalents)");
}
const CURSOR_TIMEOUT_MS = 300_000;
const workflow = createKnowledgeExtractionWorkflow({
defaultAdapter: hermesAdapter,
adapters: {
explorer: createCursorAdapter({
type: "cursor",
model: "claude-sonnet-4",
timeout: CURSOR_TIMEOUT_MS,
}),
},
extract: { provider },
});
export default workflow;

View File

@ -0,0 +1,74 @@
import type { Dirent } from "node:fs";
import { readdir } from "node:fs/promises";
import { join } from "node:path";
import type { StartStep, WorkflowMessage } from "@uncaged/nerve-core";
import type { ExplorerMeta } from "../roles/explorer.js";
import type { QuestionerMeta } from "../roles/questioner.js";
async function walkMarkdownFiles(rootDir: string, base: string): Promise<string[]> {
const out: string[] = [];
let entries: Dirent[];
try {
entries = (await readdir(rootDir, { withFileTypes: true })) as Dirent[];
} catch {
return out;
}
for (const e of entries) {
const name = e.name;
const rel = base ? `${base}/${name}` : name;
const full = join(rootDir, name);
if (e.isDirectory()) {
out.push(...(await walkMarkdownFiles(full, rel)));
} else if (e.isFile() && name.endsWith(".md")) {
out.push(rel.replace(/\\/g, "/"));
}
}
return out;
}
/** Enumerate all markdown files under `.knowledge/` as repo-relative paths; seed line first if present. */
export async function bootstrapKnowledgeQueue(cwd: string, startContent: string): Promise<string[]> {
const knowledgeDir = join(cwd, ".knowledge");
const relFiles = await walkMarkdownFiles(knowledgeDir, "");
const paths = relFiles.map((f) => `.knowledge/${f}`);
const seed = startContent.trim().split(/\r?\n/u)[0]?.trim() ?? "";
if (paths.length === 0 && seed.length > 0) {
return [seed];
}
if (seed.length > 0 && paths.includes(seed)) {
return [seed, ...paths.filter((p) => p !== seed)];
}
if (seed.length > 0 && !paths.includes(seed)) {
return [seed, ...paths];
}
return [...paths].sort();
}
function lastIndexOfRole(messages: WorkflowMessage[], role: string): number {
for (let i = messages.length - 1; i >= 0; i--) {
if (messages[i].role === role) return i;
}
return -1;
}
/** Next queue for questioner: bootstrap, or continue after answerer / explorer. */
export async function resolveQueueForQuestioner(
start: StartStep,
messages: WorkflowMessage[],
cwd: string,
): Promise<string[]> {
const lastQi = lastIndexOfRole(messages, "questioner");
if (lastQi === -1) {
return bootstrapKnowledgeQueue(cwd, start.content);
}
const qMeta = messages[lastQi].meta as QuestionerMeta;
const tail = messages.slice(lastQi + 1);
const explorerMsg = tail.find((m) => m.role === "explorer");
if (explorerMsg) {
const eMeta = explorerMsg.meta as ExplorerMeta;
return [...qMeta.remaining_queue, ...eMeta.new_cards];
}
return qMeta.remaining_queue;
}

View File

@ -0,0 +1,21 @@
import type { StartStep } from "@uncaged/nerve-core";
type StartMetaWithWorkdir = StartStep["meta"] & { workdir?: string | null };
/**
* Resolve the target repo working directory.
* Priority: start.meta.workdir prompt second line (if absolute path) cwd.
*/
export function resolveWorkdir(start: StartStep): string {
const m = start.meta as StartMetaWithWorkdir;
if (m.workdir) return m.workdir;
// Allow prompt to carry workdir on the second line: "seed\n/abs/path"
const lines = start.content.split(/\r?\n/);
if (lines.length >= 2) {
const candidate = lines[1]!.trim();
if (candidate.startsWith("/")) return candidate;
}
return process.cwd();
}

View File

@ -0,0 +1,84 @@
import { END } from "@uncaged/nerve-core";
import type { Moderator, ThreadContext } from "@uncaged/nerve-core";
import type { AnswererMeta } from "./roles/answerer.js";
import type { ExplorerMeta } from "./roles/explorer.js";
import type { QuestionerMeta } from "./roles/questioner.js";
export type WorkflowMeta = {
questioner: QuestionerMeta;
answerer: AnswererMeta;
explorer: ExplorerMeta;
};
type Steps = ThreadContext<WorkflowMeta>["steps"];
function lastQuestionerRemaining(steps: Steps): QuestionerMeta | undefined {
for (let i = steps.length - 1; i >= 0; i--) {
const s = steps[i];
if (s.role === "questioner") return s.meta;
}
return undefined;
}
/** End when the last two explorer invocations both added no new cards (issue #266 stagnation rule). */
function lastTwoExplorerRunsBothEmpty(steps: Steps): boolean {
const explorerSteps = steps.filter((s) => s.role === "explorer");
if (explorerSteps.length < 2) return false;
const e1 = explorerSteps[explorerSteps.length - 1].meta as ExplorerMeta;
const e2 = explorerSteps[explorerSteps.length - 2].meta as ExplorerMeta;
return e1.new_cards.length === 0 && e2.new_cards.length === 0;
}
function queueAfterSkippedExplorer(steps: Steps): string[] {
const q = lastQuestionerRemaining(steps);
return q?.remaining_queue ?? [];
}
function queueAfterExplorerStep(steps: Steps): string[] {
const last = steps[steps.length - 1];
if (!last || last.role !== "explorer") return [];
const q = lastQuestionerRemaining(steps);
if (!q) return [];
const e = last.meta as ExplorerMeta;
return [...q.remaining_queue, ...e.new_cards];
}
export const moderator: Moderator<WorkflowMeta> = (context) => {
const { steps } = context;
if (steps.length === 0) {
return "questioner";
}
const last = steps[steps.length - 1];
if (last.role === "questioner") {
return "answerer";
}
if (last.role === "answerer") {
const am = last.meta as AnswererMeta;
if (am.has_unanswered) {
return "explorer";
}
const q = queueAfterSkippedExplorer(steps);
if (q.length === 0) {
return END;
}
return "questioner";
}
if (last.role === "explorer") {
if (lastTwoExplorerRunsBothEmpty(steps)) {
return END;
}
const q = queueAfterExplorerStep(steps);
if (q.length === 0) {
return END;
}
return "questioner";
}
return END;
};

View File

@ -0,0 +1,102 @@
import type { AgentFn, Role, ThreadContext, WorkflowMessage } from "@uncaged/nerve-core";
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
import { createRole, nerveCommandEnv, spawnSafe } from "@uncaged/nerve-workflow-utils";
import { z } from "zod";
import { resolveWorkdir } from "../lib/workdir.js";
import type { QuestionerMeta } from "./questioner.js";
export const answererMetaSchema = z.object({
results: z.array(
z.object({
id: z.string(),
found: z.boolean(),
source: z.string(),
note: z.string(),
}),
),
has_unanswered: z.boolean(),
});
export type AnswererMeta = z.infer<typeof answererMetaSchema>;
export type CreateAnswererRoleDeps = {
extract: LlmExtractorConfig;
};
function lastQuestionerMeta(messages: WorkflowMessage[]): QuestionerMeta | undefined {
for (let i = messages.length - 1; i >= 0; i--) {
if (messages[i].role === "questioner") {
return messages[i].meta as QuestionerMeta;
}
}
return undefined;
}
export async function answererPrompt(ctx: ThreadContext): Promise<string> {
const messages = ctx.steps as unknown as WorkflowMessage[];
const cwd = resolveWorkdir(ctx.start);
const qm = lastQuestionerMeta(messages);
if (!qm || qm.questions.length === 0) {
throw new Error("answerer: prompt invoked without questioner questions — wrapped role should short-circuit");
}
const blocks: string[] = [];
for (const q of qm.questions) {
if ((ctx.start.meta as Record<string, unknown>).dryRun) {
blocks.push(`### ${q.id}\n[dryRun] skipped nerve knowledge query\n`);
continue;
}
const res = await spawnSafe(
"nerve",
["knowledge", "query", q.question],
{
cwd,
env: nerveCommandEnv(),
timeoutMs: 120_000,
dryRun: false,
abortSignal: null,
},
);
if (res.ok) {
blocks.push(`### ${q.id} (${q.domain})\nQuestion: ${q.question}\n---\n${res.value.stdout}\n`);
} else {
const err = res.error;
const detail =
err.kind === "non_zero_exit"
? `exit ${err.exitCode}\n${err.stderr}`
: err.kind === "timeout"
? `timeout\n${err.stderr}`
: err.kind === "spawn_failed"
? err.message
: "aborted";
blocks.push(`### ${q.id}\nnerve knowledge query failed: ${detail}\n`);
}
}
return [
"You are the **answerer**. You MUST NOT read repository source code — only the CLI retrieval excerpts below.",
"For each question id, decide whether the knowledge base already answers it.",
"Set found=true only when the excerpt supports a confident answer; otherwise found=false.",
"Set has_unanswered=true if any question remains unanswered by the knowledge base.",
"",
...blocks,
].join("\n");
}
export function createAnswererRole(adapter: AgentFn, { extract }: CreateAnswererRoleDeps): Role<AnswererMeta> {
const inner = createRole(adapter, answererPrompt, answererMetaSchema, extract);
return async (ctx: ThreadContext) => {
const messages = ctx.steps as unknown as WorkflowMessage[];
const qm = lastQuestionerMeta(messages);
if (!qm || qm.questions.length === 0) {
return {
content: "answerer: no questions from questioner; skipping CLI lookup.",
meta: { results: [], has_unanswered: false },
};
}
return inner(ctx);
};
}

View File

@ -0,0 +1,93 @@
import type { AgentFn, Role, ThreadContext, WorkflowMessage } from "@uncaged/nerve-core";
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
import { createRole } from "@uncaged/nerve-workflow-utils";
import { z } from "zod";
import { resolveWorkdir } from "../lib/workdir.js";
import type { AnswererMeta } from "./answerer.js";
import type { QuestionerMeta } from "./questioner.js";
export const explorerMetaSchema = z.object({
patches: z.array(
z.object({
card: z.string(),
section: z.string(),
}),
),
new_cards: z.array(z.string()),
});
export type ExplorerMeta = z.infer<typeof explorerMetaSchema>;
export type CreateExplorerRoleDeps = {
extract: LlmExtractorConfig;
};
function lastMeta<M>(messages: WorkflowMessage[], role: string): M | undefined {
for (let i = messages.length - 1; i >= 0; i--) {
if (messages[i].role === role) {
return messages[i].meta as M;
}
}
return undefined;
}
export function explorerPrompt(ctx: ThreadContext): string {
const messages = ctx.steps as unknown as WorkflowMessage[];
const threadId = ctx.start.meta.threadId;
const qm = lastMeta<QuestionerMeta>(messages, "questioner");
const am = lastMeta<AnswererMeta>(messages, "answerer");
const cwd = resolveWorkdir(ctx.start);
const unanswered =
am?.results.filter((r) => !r.found).map((r) => r.id) ?? [];
return `You are the **explorer** in an extract-knowledge workflow.
## Context
- Thread: \`nerve thread ${threadId}\`
- Working directory (repo root for paths): ${cwd}
- Current knowledge card (questioner): ${qm?.card ?? "(unknown)"}
## Unanswered question ids
${JSON.stringify(unanswered)}
Use the prior answerer results in the thread to map ids to full question text when you read messages above.
## Task
For each unanswered question, **read the codebase** as needed, then either:
- Add a new markdown file under \`.knowledge/\`, or
- Patch an existing card (prefer updating the card listed above when appropriate).
After any write or patch to \`.knowledge\`, run:
\`\`\`bash
nerve knowledge sync
\`\`\`
from this repo root (${cwd}), and fix failures until sync succeeds.
## Output meta
Report \`patches\` as { card, section } entries for cards you edited (section is a short heading or path hint).
Report \`new_cards\` as repo-relative paths for brand-new files you created (e.g. \`.knowledge/new-topic.md\`).
Do not claim work you did not perform.`;
}
export function createExplorerRole(
adapter: AgentFn,
{ extract }: CreateExplorerRoleDeps,
): Role<ExplorerMeta> {
return createRole(
adapter,
async (ctx: ThreadContext) => explorerPrompt(ctx),
explorerMetaSchema,
extract,
);
}

View File

@ -0,0 +1,108 @@
import { readFile } from "node:fs/promises";
import { join } from "node:path";
import type { AgentFn, Role, ThreadContext, WorkflowMessage } from "@uncaged/nerve-core";
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
import { createRole } from "@uncaged/nerve-workflow-utils";
import { z } from "zod";
import { resolveQueueForQuestioner } from "../lib/knowledge-queue.js";
import { resolveWorkdir } from "../lib/workdir.js";
const questionerExtractSchema = z.object({
questions: z
.array(
z.object({
id: z.string(),
question: z.string(),
domain: z.string(),
}),
)
.length(5),
});
export type QuestionerMeta = {
/** Empty when no .knowledge cards and no work to do. */
card: string;
questions: { id: string; question: string; domain: string }[];
remaining_queue: string[];
};
export type CreateQuestionerRoleDeps = {
extract: LlmExtractorConfig;
};
function questionerSystem(): string {
return `You are the **questioner** in an extract-knowledge workflow.
Read the given markdown knowledge card. Propose exactly **five** technical questions that are **not** already answered or covered by that card.
Rules:
- Questions must be concrete and technical.
- Each question needs a stable string id (e.g. q1, q2, q3, q4, q5), a short domain label (e.g. routing, storage), and the question text.
- Do not assume access to other files or tools reason only from the card content shown.`;
}
function questionerUser(card: string, cardBody: string, remainingHint: string[]): string {
return `Current card path: ${card}
Remaining queue after this card (paths, may be empty): ${JSON.stringify(remainingHint)}
--- Card content ---
${cardBody}`;
}
export async function questionerPrompt(ctx: ThreadContext): Promise<string> {
const messages = ctx.steps as unknown as WorkflowMessage[];
const cwd = resolveWorkdir(ctx.start);
const queue = await resolveQueueForQuestioner(ctx.start, messages, cwd);
if (queue.length === 0) {
throw new Error(
"questioner: prompt invoked with empty queue — wrapped role should short-circuit before LLM",
);
}
const card = queue[0]!;
const remaining_queue = queue.slice(1);
let cardBody: string;
try {
cardBody = await readFile(join(cwd, card), "utf8");
} catch (e) {
const msg = e instanceof Error ? e.message : String(e);
throw new Error(`questioner: failed to read ${card}: ${msg}`);
}
return `${questionerSystem()}\n\n${questionerUser(card, cardBody, remaining_queue)}`;
}
export function createQuestionerRole(adapter: AgentFn, { extract }: CreateQuestionerRoleDeps): Role<QuestionerMeta> {
const inner = createRole(adapter, questionerPrompt, questionerExtractSchema, extract);
return async (ctx: ThreadContext) => {
const messages = ctx.steps as unknown as WorkflowMessage[];
const cwd = resolveWorkdir(ctx.start);
const queue = await resolveQueueForQuestioner(ctx.start, messages, cwd);
if (queue.length === 0) {
return {
content:
"questioner: no `.knowledge` markdown files found and no seed path in the trigger prompt; queue is empty.",
meta: {
card: "",
questions: [],
remaining_queue: [],
},
};
}
const card = queue[0]!;
const remaining_queue = queue.slice(1);
const r = await inner(ctx);
return {
content: r.content,
meta: {
card,
questions: r.meta.questions,
remaining_queue,
},
};
};
}

File diff suppressed because it is too large Load Diff

View File

@ -1,22 +0,0 @@
{
"name": "gitea-issue-solver-workflow",
"version": "0.0.1",
"private": true,
"type": "module",
"dependencies": {
"@uncaged/nerve-core": "latest",
"@uncaged/nerve-workflow-utils": "latest",
"zod": "^4.3.6"
},
"devDependencies": {
"@types/node": "^22.0.0",
"typescript": "^5.7.0"
},
"pnpm": {
"overrides": {
"@uncaged/nerve-daemon": "link:../../../repos/nerve/packages/daemon",
"@uncaged/nerve-core": "link:../../../repos/nerve/packages/core",
"@uncaged/nerve-workflow-utils": "link:../../../repos/nerve/packages/workflow-utils"
}
}
}

View File

@ -1,59 +0,0 @@
lockfileVersion: '9.0'
settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
overrides:
'@uncaged/nerve-daemon': link:../../../repos/nerve/packages/daemon
'@uncaged/nerve-core': link:../../../repos/nerve/packages/core
'@uncaged/nerve-workflow-utils': link:../../../repos/nerve/packages/workflow-utils
importers:
.:
dependencies:
'@uncaged/nerve-core':
specifier: link:../../../repos/nerve/packages/core
version: link:../../../repos/nerve/packages/core
'@uncaged/nerve-workflow-utils':
specifier: link:../../../repos/nerve/packages/workflow-utils
version: link:../../../repos/nerve/packages/workflow-utils
zod:
specifier: ^4.3.6
version: 4.3.6
devDependencies:
'@types/node':
specifier: ^22.0.0
version: 22.19.17
typescript:
specifier: ^5.7.0
version: 5.9.3
packages:
'@types/node@22.19.17':
resolution: {integrity: sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==}
typescript@5.9.3:
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
engines: {node: '>=14.17'}
hasBin: true
undici-types@6.21.0:
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
zod@4.3.6:
resolution: {integrity: sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==}
snapshots:
'@types/node@22.19.17':
dependencies:
undici-types: 6.21.0
typescript@5.9.3: {}
undici-types@6.21.0: {}
zod@4.3.6: {}

View File

@ -1,13 +0,0 @@
{
"compilerOptions": {
"target": "ES2022",
"lib": ["ES2022"],
"module": "NodeNext",
"moduleResolution": "NodeNext",
"strict": true,
"skipLibCheck": true,
"noEmit": true,
"types": ["node"]
},
"include": ["./**/*.ts"]
}

View File

@ -1 +0,0 @@
dist/

View File

@ -1,27 +0,0 @@
import type { WorkflowDefinition } from "@uncaged/nerve-core";
import type { LlmProvider } from "@uncaged/nerve-workflow-utils";
import { buildPlannerRole } from "./roles/planner/index.js";
import { buildCoderRole } from "./roles/coder/index.js";
import { buildTesterRole } from "./roles/tester/index.js";
import { moderator } from "./moderator.js";
import type { SenseMeta } from "./moderator.js";
export type BuildSenseGeneratorDeps = {
provider: LlmProvider;
cwd: string;
};
export function buildSenseGenerator({
provider,
cwd,
}: BuildSenseGeneratorDeps): WorkflowDefinition<SenseMeta> {
return {
name: "sense-generator",
roles: {
planner: buildPlannerRole({ provider, cwd }),
coder: buildCoderRole({ provider, cwd }),
tester: buildTesterRole({ provider }),
},
moderator,
};
}

View File

@ -1,19 +0,0 @@
import { join } from "node:path";
import { buildSenseGenerator } from "./build.js";
const HOME = process.env.HOME ?? "/home/azureuser";
const NERVE_ROOT = join(HOME, ".uncaged-nerve");
const apiKey = process.env.DASHSCOPE_API_KEY;
const baseUrl = process.env.DASHSCOPE_BASE_URL;
const model = process.env.DASHSCOPE_MODEL ?? "qwen-plus";
if (!apiKey || !baseUrl) {
throw new Error("Set DASHSCOPE_API_KEY and DASHSCOPE_BASE_URL");
}
const workflow = buildSenseGenerator({
provider: { apiKey, baseUrl, model },
cwd: NERVE_ROOT,
});
export default workflow;

View File

@ -1,27 +0,0 @@
import { END } from "@uncaged/nerve-core";
import type { Moderator } from "@uncaged/nerve-core";
import type { PlannerMeta } from "./roles/planner/index.js";
import type { CoderMeta } from "./roles/coder/index.js";
import type { TesterMeta } from "./roles/tester/index.js";
export type SenseMeta = {
planner: PlannerMeta;
coder: CoderMeta;
tester: TesterMeta;
};
function countRole(steps: { role: string }[], name: string): number {
return steps.filter((s) => s.role === name).length;
}
export const moderator: Moderator<SenseMeta> = (context) => {
if (context.steps.length === 0) return "planner";
const last = context.steps[context.steps.length - 1];
if (last.role === "planner") return "coder";
if (last.role === "coder") return "tester";
if (last.role === "tester") {
if (last.meta.passed) return END;
return countRole(context.steps, "tester") < 3 ? "coder" : END;
}
return END;
};

View File

@ -1,26 +0,0 @@
{
"name": "sense-generator-workflow",
"version": "0.0.1",
"private": true,
"type": "module",
"scripts": {
"build": "esbuild index.ts --bundle --platform=node --format=esm --outdir=dist --packages=external"
},
"dependencies": {
"@uncaged/nerve-core": "latest",
"@uncaged/nerve-workflow-utils": "latest",
"zod": "^4.3.6"
},
"devDependencies": {
"@types/node": "^22.0.0",
"esbuild": "^0.27.0",
"typescript": "^5.7.0"
},
"pnpm": {
"overrides": {
"@uncaged/nerve-daemon": "link:../../../repos/nerve/packages/daemon",
"@uncaged/nerve-core": "link:../../../repos/nerve/packages/core",
"@uncaged/nerve-workflow-utils": "link:../../../repos/nerve/packages/workflow-utils"
}
}
}

View File

@ -1,330 +0,0 @@
lockfileVersion: '9.0'
settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
overrides:
'@uncaged/nerve-daemon': link:../../../repos/nerve/packages/daemon
'@uncaged/nerve-core': link:../../../repos/nerve/packages/core
'@uncaged/nerve-workflow-utils': link:../../../repos/nerve/packages/workflow-utils
importers:
.:
dependencies:
'@uncaged/nerve-core':
specifier: link:../../../repos/nerve/packages/core
version: link:../../../repos/nerve/packages/core
'@uncaged/nerve-workflow-utils':
specifier: link:../../../repos/nerve/packages/workflow-utils
version: link:../../../repos/nerve/packages/workflow-utils
zod:
specifier: ^4.3.6
version: 4.3.6
devDependencies:
'@types/node':
specifier: ^22.0.0
version: 22.19.17
esbuild:
specifier: ^0.27.0
version: 0.27.7
typescript:
specifier: ^5.7.0
version: 5.9.3
packages:
'@esbuild/aix-ppc64@0.27.7':
resolution: {integrity: sha512-EKX3Qwmhz1eMdEJokhALr0YiD0lhQNwDqkPYyPhiSwKrh7/4KRjQc04sZ8db+5DVVnZ1LmbNDI1uAMPEUBnQPg==}
engines: {node: '>=18'}
cpu: [ppc64]
os: [aix]
'@esbuild/android-arm64@0.27.7':
resolution: {integrity: sha512-62dPZHpIXzvChfvfLJow3q5dDtiNMkwiRzPylSCfriLvZeq0a1bWChrGx/BbUbPwOrsWKMn8idSllklzBy+dgQ==}
engines: {node: '>=18'}
cpu: [arm64]
os: [android]
'@esbuild/android-arm@0.27.7':
resolution: {integrity: sha512-jbPXvB4Yj2yBV7HUfE2KHe4GJX51QplCN1pGbYjvsyCZbQmies29EoJbkEc+vYuU5o45AfQn37vZlyXy4YJ8RQ==}
engines: {node: '>=18'}
cpu: [arm]
os: [android]
'@esbuild/android-x64@0.27.7':
resolution: {integrity: sha512-x5VpMODneVDb70PYV2VQOmIUUiBtY3D3mPBG8NxVk5CogneYhkR7MmM3yR/uMdITLrC1ml/NV1rj4bMJuy9MCg==}
engines: {node: '>=18'}
cpu: [x64]
os: [android]
'@esbuild/darwin-arm64@0.27.7':
resolution: {integrity: sha512-5lckdqeuBPlKUwvoCXIgI2D9/ABmPq3Rdp7IfL70393YgaASt7tbju3Ac+ePVi3KDH6N2RqePfHnXkaDtY9fkw==}
engines: {node: '>=18'}
cpu: [arm64]
os: [darwin]
'@esbuild/darwin-x64@0.27.7':
resolution: {integrity: sha512-rYnXrKcXuT7Z+WL5K980jVFdvVKhCHhUwid+dDYQpH+qu+TefcomiMAJpIiC2EM3Rjtq0sO3StMV/+3w3MyyqQ==}
engines: {node: '>=18'}
cpu: [x64]
os: [darwin]
'@esbuild/freebsd-arm64@0.27.7':
resolution: {integrity: sha512-B48PqeCsEgOtzME2GbNM2roU29AMTuOIN91dsMO30t+Ydis3z/3Ngoj5hhnsOSSwNzS+6JppqWsuhTp6E82l2w==}
engines: {node: '>=18'}
cpu: [arm64]
os: [freebsd]
'@esbuild/freebsd-x64@0.27.7':
resolution: {integrity: sha512-jOBDK5XEjA4m5IJK3bpAQF9/Lelu/Z9ZcdhTRLf4cajlB+8VEhFFRjWgfy3M1O4rO2GQ/b2dLwCUGpiF/eATNQ==}
engines: {node: '>=18'}
cpu: [x64]
os: [freebsd]
'@esbuild/linux-arm64@0.27.7':
resolution: {integrity: sha512-RZPHBoxXuNnPQO9rvjh5jdkRmVizktkT7TCDkDmQ0W2SwHInKCAV95GRuvdSvA7w4VMwfCjUiPwDi0ZO6Nfe9A==}
engines: {node: '>=18'}
cpu: [arm64]
os: [linux]
'@esbuild/linux-arm@0.27.7':
resolution: {integrity: sha512-RkT/YXYBTSULo3+af8Ib0ykH8u2MBh57o7q/DAs3lTJlyVQkgQvlrPTnjIzzRPQyavxtPtfg0EopvDyIt0j1rA==}
engines: {node: '>=18'}
cpu: [arm]
os: [linux]
'@esbuild/linux-ia32@0.27.7':
resolution: {integrity: sha512-GA48aKNkyQDbd3KtkplYWT102C5sn/EZTY4XROkxONgruHPU72l+gW+FfF8tf2cFjeHaRbWpOYa/uRBz/Xq1Pg==}
engines: {node: '>=18'}
cpu: [ia32]
os: [linux]
'@esbuild/linux-loong64@0.27.7':
resolution: {integrity: sha512-a4POruNM2oWsD4WKvBSEKGIiWQF8fZOAsycHOt6JBpZ+JN2n2JH9WAv56SOyu9X5IqAjqSIPTaJkqN8F7XOQ5Q==}
engines: {node: '>=18'}
cpu: [loong64]
os: [linux]
'@esbuild/linux-mips64el@0.27.7':
resolution: {integrity: sha512-KabT5I6StirGfIz0FMgl1I+R1H73Gp0ofL9A3nG3i/cYFJzKHhouBV5VWK1CSgKvVaG4q1RNpCTR2LuTVB3fIw==}
engines: {node: '>=18'}
cpu: [mips64el]
os: [linux]
'@esbuild/linux-ppc64@0.27.7':
resolution: {integrity: sha512-gRsL4x6wsGHGRqhtI+ifpN/vpOFTQtnbsupUF5R5YTAg+y/lKelYR1hXbnBdzDjGbMYjVJLJTd2OFmMewAgwlQ==}
engines: {node: '>=18'}
cpu: [ppc64]
os: [linux]
'@esbuild/linux-riscv64@0.27.7':
resolution: {integrity: sha512-hL25LbxO1QOngGzu2U5xeXtxXcW+/GvMN3ejANqXkxZ/opySAZMrc+9LY/WyjAan41unrR3YrmtTsUpwT66InQ==}
engines: {node: '>=18'}
cpu: [riscv64]
os: [linux]
'@esbuild/linux-s390x@0.27.7':
resolution: {integrity: sha512-2k8go8Ycu1Kb46vEelhu1vqEP+UeRVj2zY1pSuPdgvbd5ykAw82Lrro28vXUrRmzEsUV0NzCf54yARIK8r0fdw==}
engines: {node: '>=18'}
cpu: [s390x]
os: [linux]
'@esbuild/linux-x64@0.27.7':
resolution: {integrity: sha512-hzznmADPt+OmsYzw1EE33ccA+HPdIqiCRq7cQeL1Jlq2gb1+OyWBkMCrYGBJ+sxVzve2ZJEVeePbLM2iEIZSxA==}
engines: {node: '>=18'}
cpu: [x64]
os: [linux]
'@esbuild/netbsd-arm64@0.27.7':
resolution: {integrity: sha512-b6pqtrQdigZBwZxAn1UpazEisvwaIDvdbMbmrly7cDTMFnw/+3lVxxCTGOrkPVnsYIosJJXAsILG9XcQS+Yu6w==}
engines: {node: '>=18'}
cpu: [arm64]
os: [netbsd]
'@esbuild/netbsd-x64@0.27.7':
resolution: {integrity: sha512-OfatkLojr6U+WN5EDYuoQhtM+1xco+/6FSzJJnuWiUw5eVcicbyK3dq5EeV/QHT1uy6GoDhGbFpprUiHUYggrw==}
engines: {node: '>=18'}
cpu: [x64]
os: [netbsd]
'@esbuild/openbsd-arm64@0.27.7':
resolution: {integrity: sha512-AFuojMQTxAz75Fo8idVcqoQWEHIXFRbOc1TrVcFSgCZtQfSdc1RXgB3tjOn/krRHENUB4j00bfGjyl2mJrU37A==}
engines: {node: '>=18'}
cpu: [arm64]
os: [openbsd]
'@esbuild/openbsd-x64@0.27.7':
resolution: {integrity: sha512-+A1NJmfM8WNDv5CLVQYJ5PshuRm/4cI6WMZRg1by1GwPIQPCTs1GLEUHwiiQGT5zDdyLiRM/l1G0Pv54gvtKIg==}
engines: {node: '>=18'}
cpu: [x64]
os: [openbsd]
'@esbuild/openharmony-arm64@0.27.7':
resolution: {integrity: sha512-+KrvYb/C8zA9CU/g0sR6w2RBw7IGc5J2BPnc3dYc5VJxHCSF1yNMxTV5LQ7GuKteQXZtspjFbiuW5/dOj7H4Yw==}
engines: {node: '>=18'}
cpu: [arm64]
os: [openharmony]
'@esbuild/sunos-x64@0.27.7':
resolution: {integrity: sha512-ikktIhFBzQNt/QDyOL580ti9+5mL/YZeUPKU2ivGtGjdTYoqz6jObj6nOMfhASpS4GU4Q/Clh1QtxWAvcYKamA==}
engines: {node: '>=18'}
cpu: [x64]
os: [sunos]
'@esbuild/win32-arm64@0.27.7':
resolution: {integrity: sha512-7yRhbHvPqSpRUV7Q20VuDwbjW5kIMwTHpptuUzV+AA46kiPze5Z7qgt6CLCK3pWFrHeNfDd1VKgyP4O+ng17CA==}
engines: {node: '>=18'}
cpu: [arm64]
os: [win32]
'@esbuild/win32-ia32@0.27.7':
resolution: {integrity: sha512-SmwKXe6VHIyZYbBLJrhOoCJRB/Z1tckzmgTLfFYOfpMAx63BJEaL9ExI8x7v0oAO3Zh6D/Oi1gVxEYr5oUCFhw==}
engines: {node: '>=18'}
cpu: [ia32]
os: [win32]
'@esbuild/win32-x64@0.27.7':
resolution: {integrity: sha512-56hiAJPhwQ1R4i+21FVF7V8kSD5zZTdHcVuRFMW0hn753vVfQN8xlx4uOPT4xoGH0Z/oVATuR82AiqSTDIpaHg==}
engines: {node: '>=18'}
cpu: [x64]
os: [win32]
'@types/node@22.19.17':
resolution: {integrity: sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==}
esbuild@0.27.7:
resolution: {integrity: sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w==}
engines: {node: '>=18'}
hasBin: true
typescript@5.9.3:
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
engines: {node: '>=14.17'}
hasBin: true
undici-types@6.21.0:
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
zod@4.3.6:
resolution: {integrity: sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==}
snapshots:
'@esbuild/aix-ppc64@0.27.7':
optional: true
'@esbuild/android-arm64@0.27.7':
optional: true
'@esbuild/android-arm@0.27.7':
optional: true
'@esbuild/android-x64@0.27.7':
optional: true
'@esbuild/darwin-arm64@0.27.7':
optional: true
'@esbuild/darwin-x64@0.27.7':
optional: true
'@esbuild/freebsd-arm64@0.27.7':
optional: true
'@esbuild/freebsd-x64@0.27.7':
optional: true
'@esbuild/linux-arm64@0.27.7':
optional: true
'@esbuild/linux-arm@0.27.7':
optional: true
'@esbuild/linux-ia32@0.27.7':
optional: true
'@esbuild/linux-loong64@0.27.7':
optional: true
'@esbuild/linux-mips64el@0.27.7':
optional: true
'@esbuild/linux-ppc64@0.27.7':
optional: true
'@esbuild/linux-riscv64@0.27.7':
optional: true
'@esbuild/linux-s390x@0.27.7':
optional: true
'@esbuild/linux-x64@0.27.7':
optional: true
'@esbuild/netbsd-arm64@0.27.7':
optional: true
'@esbuild/netbsd-x64@0.27.7':
optional: true
'@esbuild/openbsd-arm64@0.27.7':
optional: true
'@esbuild/openbsd-x64@0.27.7':
optional: true
'@esbuild/openharmony-arm64@0.27.7':
optional: true
'@esbuild/sunos-x64@0.27.7':
optional: true
'@esbuild/win32-arm64@0.27.7':
optional: true
'@esbuild/win32-ia32@0.27.7':
optional: true
'@esbuild/win32-x64@0.27.7':
optional: true
'@types/node@22.19.17':
dependencies:
undici-types: 6.21.0
esbuild@0.27.7:
optionalDependencies:
'@esbuild/aix-ppc64': 0.27.7
'@esbuild/android-arm': 0.27.7
'@esbuild/android-arm64': 0.27.7
'@esbuild/android-x64': 0.27.7
'@esbuild/darwin-arm64': 0.27.7
'@esbuild/darwin-x64': 0.27.7
'@esbuild/freebsd-arm64': 0.27.7
'@esbuild/freebsd-x64': 0.27.7
'@esbuild/linux-arm': 0.27.7
'@esbuild/linux-arm64': 0.27.7
'@esbuild/linux-ia32': 0.27.7
'@esbuild/linux-loong64': 0.27.7
'@esbuild/linux-mips64el': 0.27.7
'@esbuild/linux-ppc64': 0.27.7
'@esbuild/linux-riscv64': 0.27.7
'@esbuild/linux-s390x': 0.27.7
'@esbuild/linux-x64': 0.27.7
'@esbuild/netbsd-arm64': 0.27.7
'@esbuild/netbsd-x64': 0.27.7
'@esbuild/openbsd-arm64': 0.27.7
'@esbuild/openbsd-x64': 0.27.7
'@esbuild/openharmony-arm64': 0.27.7
'@esbuild/sunos-x64': 0.27.7
'@esbuild/win32-arm64': 0.27.7
'@esbuild/win32-ia32': 0.27.7
'@esbuild/win32-x64': 0.27.7
typescript@5.9.3: {}
undici-types@6.21.0: {}
zod@4.3.6: {}

View File

@ -1,23 +0,0 @@
import type { LlmProvider } from "@uncaged/nerve-workflow-utils";
import { createCursorRole } from "@uncaged/nerve-workflow-utils";
import { coderPrompt } from "./prompt.js";
import { z } from "zod";
export const coderMetaSchema = z.object({
filesCreated: z.boolean().describe("true if the sense files were created"),
});
export type CoderMeta = z.infer<typeof coderMetaSchema>;
export type BuildCoderDeps = {
provider: LlmProvider;
cwd: string;
};
export function buildCoderRole({ provider, cwd }: BuildCoderDeps) {
return createCursorRole<CoderMeta>({
cwd,
mode: "default",
prompt: async (threadId) => coderPrompt({ threadId }),
extract: { provider, schema: coderMetaSchema },
});
}

View File

@ -1,43 +0,0 @@
export function coderPrompt({ threadId }: { threadId: string }): string {
return `Read the workflow thread for the planner's sense design: \`nerve thread ${threadId}\`
Read the nerve-dev skill for sense file structure and conventions: \`cat node_modules/@uncaged/nerve-skills/nerve-dev/SKILL.md\`
Implement the sense following the patterns from existing senses and the skill guide.
File structure for each sense:
- \`senses/<name>/src/index.ts\` — TypeScript source with proper types; import schema as \`./schema.ts\`
- \`senses/<name>/src/schema.ts\` — Drizzle schema (TypeScript)
- \`senses/<name>/migrations/\` — Drizzle migration files (at sense root, not inside src/)
- \`senses/<name>/package.json\` — with esbuild build script (see below)
- \`senses/<name>/index.js\` — bundled output generated by \`pnpm build\` (do NOT edit by hand)
package.json template for each sense:
\`\`\`json
{
"name": "sense-<name>",
"version": "0.0.1",
"private": true,
"type": "module",
"scripts": {
"build": "esbuild src/index.ts --bundle --platform=node --format=esm --outfile=index.js --packages=external"
},
"devDependencies": {
"@types/node": "^22.0.0",
"esbuild": "^0.27.0",
"typescript": "^5.7.0"
},
"pnpm": {
"onlyBuiltDependencies": ["esbuild"]
}
}
\`\`\`
After creating all files, run inside the sense directory:
\`\`\`
pnpm install --no-cache && pnpm build
\`\`\`
This generates the bundled \`index.js\` at the sense root that the daemon loads.
Then update nerve.yaml and run any required migrations.`;
}

View File

@ -1,23 +0,0 @@
import type { LlmProvider } from "@uncaged/nerve-workflow-utils";
import { createCursorRole } from "@uncaged/nerve-workflow-utils";
import { plannerPrompt } from "./prompt.js";
import { z } from "zod";
export const plannerMetaSchema = z.object({
senseName: z.string().describe("kebab-case sense name from the plan"),
});
export type PlannerMeta = z.infer<typeof plannerMetaSchema>;
export type BuildPlannerDeps = {
provider: LlmProvider;
cwd: string;
};
export function buildPlannerRole({ provider, cwd }: BuildPlannerDeps) {
return createCursorRole<PlannerMeta>({
cwd,
mode: "ask",
prompt: async (threadId) => plannerPrompt({ threadId }),
extract: { provider, schema: plannerMetaSchema },
});
}

View File

@ -1,17 +0,0 @@
export function plannerPrompt({ threadId }: { threadId: string }): string {
return `You are planning a new Nerve sense.
Read the workflow thread for the user's request: \`nerve thread ${threadId}\`
Read the nerve-dev skill for sense conventions: \`cat node_modules/@uncaged/nerve-skills/nerve-dev/SKILL.md\`
Also look at existing senses in the \`senses/\` directory for patterns.
Pick a good kebab-case name for this sense. Produce a PLAN (not code) in markdown:
## Sense Design
### Name kebab-case
### Fields name, type (integer/real/text), description
### Compute Logic step-by-step, specific Node.js APIs or shell commands
### Trigger Config group, interval, throttle, timeout
Output ONLY the plan. Be precise and implementation-ready.`;
}

View File

@ -1,20 +0,0 @@
import type { LlmProvider } from "@uncaged/nerve-workflow-utils";
import { createHermesRole } from "@uncaged/nerve-workflow-utils";
import { testerPrompt } from "./prompt.js";
import { z } from "zod";
export const testerMetaSchema = z.object({
passed: z.boolean().describe("true if all e2e checks passed"),
});
export type TesterMeta = z.infer<typeof testerMetaSchema>;
export type BuildTesterDeps = {
provider: LlmProvider;
};
export function buildTesterRole({ provider }: BuildTesterDeps) {
return createHermesRole<TesterMeta>({
prompt: async (threadId) => testerPrompt({ threadId }),
extract: { provider, schema: testerMetaSchema },
});
}

View File

@ -1,16 +0,0 @@
export function testerPrompt({ threadId }: { threadId: string }): string {
return `You are testing a newly created Nerve sense end-to-end.
Read the workflow thread for context: \`nerve thread ${threadId}\`
Read the nerve-dev skill for expected file structure: \`cat node_modules/@uncaged/nerve-skills/nerve-dev/SKILL.md\`
Verify the full lifecycle:
1. Check all required sense files exist
2. Check nerve.yaml has the sense config
3. Run \`nerve sense list\` — confirm the sense appears
4. Run \`nerve sense trigger <sense-name>\` — should complete without error
5. Run \`nerve sense query <sense-name>\` — retry up to 20s until rows appear
6. If any step fails, run \`nerve logs\` and include relevant errors
Output a clear summary: what you checked, what passed, what failed, and why.`;
}

View File

@ -1,14 +0,0 @@
{
"compilerOptions": {
"target": "ES2022",
"lib": ["ES2022"],
"module": "NodeNext",
"moduleResolution": "NodeNext",
"strict": true,
"skipLibCheck": true,
"noEmit": false,
"declaration": false,
"types": ["node"]
},
"include": ["./**/*.ts"]
}

View File

@ -0,0 +1,43 @@
import type { AgentFn, WorkflowDefinition } from "@uncaged/nerve-core";
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
import { moderator } from "./moderator.js";
import type { WorkflowMeta } from "./moderator.js";
import { createCommitterRole } from "./roles/committer.js";
import { createImplementRole } from "./roles/implement.js";
import { createPlanRole } from "./roles/plan.js";
import { createPrepareRole } from "./roles/prepare.js";
import { createPublishRole } from "./roles/publish.js";
import { createReadIssueRole } from "./roles/read-issue.js";
import { createReviewRole } from "./roles/review.js";
import { createTestRole } from "./roles/test.js";
export type CreateSolveIssueDeps = {
defaultAdapter: AgentFn;
adapters?: Partial<Record<keyof WorkflowMeta, AgentFn>>;
nerveRoot: string;
extract: LlmExtractorConfig;
};
export function createSolveIssueWorkflow({
defaultAdapter,
adapters,
nerveRoot,
extract,
}: CreateSolveIssueDeps): WorkflowDefinition<WorkflowMeta> {
const a = (role: keyof WorkflowMeta) => adapters?.[role] ?? defaultAdapter;
return {
name: "solve-issue",
roles: {
"read-issue": createReadIssueRole(a("read-issue"), extract),
prepare: createPrepareRole(a("prepare"), extract),
plan: createPlanRole(a("plan"), { extract, nerveRoot }),
implement: createImplementRole(a("implement"), { extract, nerveRoot }),
committer: createCommitterRole(a("committer"), extract),
review: createReviewRole(a("review"), extract, nerveRoot),
test: createTestRole(a("test"), extract),
publish: createPublishRole(a("publish"), { extract, nerveRoot }),
},
moderator,
};
}

View File

@ -0,0 +1,37 @@
import { join } from "node:path";
import { createCursorAdapter } from "@uncaged/nerve-adapter-cursor";
import { hermesAdapter } from "@uncaged/nerve-adapter-hermes";
import { createSolveIssueWorkflow } from "./build.js";
import { resolveDashScopeProvider } from "./lib/provider.js";
const HOME = process.env.HOME ?? "/home/azureuser";
const NERVE_ROOT = join(HOME, ".uncaged-nerve");
const provider = await resolveDashScopeProvider(NERVE_ROOT);
if (provider === null) {
throw new Error("Set DASHSCOPE_API_KEY and DASHSCOPE_BASE_URL (or cfg get equivalents)");
}
const CURSOR_TIMEOUT_MS = 300_000;
const workflow = createSolveIssueWorkflow({
defaultAdapter: hermesAdapter,
adapters: {
plan: createCursorAdapter({
type: "cursor",
mode: "ask",
model: "auto",
timeout: CURSOR_TIMEOUT_MS,
}),
implement: createCursorAdapter({
type: "cursor",
model: "auto",
timeout: CURSOR_TIMEOUT_MS,
}),
},
nerveRoot: NERVE_ROOT,
extract: { provider },
});
export default workflow;

View File

@ -0,0 +1,26 @@
import type { LlmProvider } from "@uncaged/nerve-workflow-utils";
import { spawnSafe } from "@uncaged/nerve-workflow-utils";
export async function cfgGet(nerveRoot: string, key: string): Promise<string | null> {
const result = await spawnSafe("cfg", ["get", key], {
cwd: nerveRoot,
env: null,
timeoutMs: 10_000,
abortSignal: null,
});
if (!result.ok) {
return null;
}
const value = result.value.stdout.trim();
return value.length > 0 ? value : null;
}
export async function resolveDashScopeProvider(nerveRoot: string): Promise<LlmProvider | null> {
const apiKey = process.env.DASHSCOPE_API_KEY ?? (await cfgGet(nerveRoot, "DASHSCOPE_API_KEY"));
const baseUrl = process.env.DASHSCOPE_BASE_URL ?? (await cfgGet(nerveRoot, "DASHSCOPE_BASE_URL"));
const model = process.env.DASHSCOPE_MODEL ?? (await cfgGet(nerveRoot, "DASHSCOPE_MODEL")) ?? "qwen-plus";
if (!apiKey || !baseUrl) {
return null;
}
return { apiKey, baseUrl, model };
}

View File

@ -0,0 +1,86 @@
import { join } from "node:path";
import type { RoleStep, WorkflowMessage } from "@uncaged/nerve-core";
type SolveIssueParse = {
host: string;
owner: string;
repo: string;
number: number;
};
type SolveIssueRepo = {
path: string;
defaultBranch: string;
packageManager: string;
};
const HOME = process.env.HOME ?? "/home/azureuser";
function extractMarkedSection(text: string, marker: string): Record<string, string> | null {
const escaped = marker.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const re = new RegExp(`---${escaped}---\\s*([\\s\\S]*?)(?:\\n---|$)`);
const m = text.match(re);
if (m === null) {
return null;
}
const rec: Record<string, string> = {};
for (const line of m[1].split("\n")) {
const kv = line.match(/^([a-zA-Z]+):\s*(.+)$/);
if (kv !== null) {
rec[kv[1]] = kv[2].trim();
}
}
return Object.keys(rec).length > 0 ? rec : null;
}
function parseSolveIssueParse(text: string): SolveIssueParse | null {
const rec = extractMarkedSection(text, "SOLVE_ISSUE_PARSE");
if (rec === null) {
return null;
}
const host = rec.host ?? "";
const owner = rec.owner ?? "";
const repo = rec.repo ?? "";
const num = Number(rec.number ?? "");
if (host.length === 0 || owner.length === 0 || repo.length === 0 || !Number.isFinite(num) || num <= 0) {
return null;
}
return { host, owner, repo, number: num };
}
function parseSolveIssueRepo(text: string): SolveIssueRepo | null {
const rec = extractMarkedSection(text, "SOLVE_ISSUE_REPO");
if (rec === null) {
return null;
}
const path = rec.path ?? "";
if (path.length === 0) {
return null;
}
return {
path,
defaultBranch: rec.defaultBranch ?? "main",
packageManager: rec.packageManager ?? "pnpm",
};
}
/** Prefer explicit prepare marker; else ~/Code/<owner>/<repo> from read-issue parse block. */
export function resolveRepoCwd(messages: WorkflowMessage[]): string | null {
for (let i = messages.length - 1; i >= 0; i--) {
if (messages[i].role === "prepare") {
const repo = parseSolveIssueRepo(messages[i].content);
if (repo !== null) {
return repo.path;
}
}
}
for (let i = messages.length - 1; i >= 0; i--) {
if (messages[i].role === "read-issue") {
const parsed = parseSolveIssueParse(messages[i].content);
if (parsed !== null) {
return join(HOME, "Code", parsed.owner, parsed.repo);
}
}
}
return null;
}

View File

@ -0,0 +1,99 @@
import { END } from "@uncaged/nerve-core";
import type { Moderator } from "@uncaged/nerve-core";
import type { ReadIssueMeta } from "./roles/read-issue.js";
import type { PrepareMeta } from "./roles/prepare.js";
import type { PlanMeta } from "./roles/plan.js";
import type { ImplementMeta } from "./roles/implement.js";
import type { CommitterMeta } from "./roles/committer.js";
import type { ReviewMeta } from "./roles/review.js";
import type { TestMeta } from "./roles/test.js";
import type { PublishMeta } from "./roles/publish.js";
export type WorkflowMeta = {
"read-issue": ReadIssueMeta;
prepare: PrepareMeta;
plan: PlanMeta;
implement: ImplementMeta;
committer: CommitterMeta;
review: ReviewMeta;
test: TestMeta;
publish: PublishMeta;
};
const MAX_IMPLEMENT_ROUNDS = 20;
const MAX_TOTAL_REJECTIONS = 10;
function implementRounds(steps: { role: string }[]): number {
return steps.filter((s) => s.role === "implement").length;
}
function totalRejections(steps: { role: string; meta: unknown }[]): number {
return steps.filter((s) => {
if (s.role === "review") return !(s.meta as Record<string, boolean>).approved;
if (s.role === "test") return !(s.meta as Record<string, boolean>).passed;
if (s.role === "committer") return !(s.meta as Record<string, boolean>).committed;
if (s.role === "publish") return !(s.meta as Record<string, boolean>).success;
return false;
}).length;
}
function canRetryImplement(steps: { role: string; meta: unknown }[]): boolean {
return implementRounds(steps) < MAX_IMPLEMENT_ROUNDS && totalRejections(steps) < MAX_TOTAL_REJECTIONS;
}
export const moderator: Moderator<WorkflowMeta> = (context) => {
if (context.steps.length === 0) {
return "read-issue";
}
const last = context.steps[context.steps.length - 1];
if (last.role === "read-issue") {
return last.meta.ready ? "prepare" : END;
}
if (last.role === "prepare") {
return last.meta.ready ? "plan" : END;
}
if (last.role === "plan") {
return last.meta.ready ? "implement" : END;
}
if (last.role === "implement") {
if (last.meta.done) {
return "committer";
}
return canRetryImplement(context.steps) ? "implement" : END;
}
if (last.role === "committer") {
if (last.meta.committed) {
return "review";
}
return canRetryImplement(context.steps) ? "implement" : END;
}
if (last.role === "review") {
if (last.meta.approved) {
return "test";
}
return canRetryImplement(context.steps) ? "implement" : END;
}
if (last.role === "test") {
if (last.meta.passed) {
return "publish";
}
return canRetryImplement(context.steps) ? "implement" : END;
}
if (last.role === "publish") {
if (last.meta.success) {
return END;
}
return canRetryImplement(context.steps) ? "implement" : END;
}
return END;
};

View File

@ -0,0 +1,57 @@
import type { AgentFn, Role, ThreadContext } from "@uncaged/nerve-core";
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
import { createRole, decorateRole, withDryRun, onFail } from "@uncaged/nerve-workflow-utils";
import { z } from "zod";
function committerPrompt({ threadId }: { threadId: string }): string {
return `You are the committer agent. The **implement** step finished with a passing build; your job is to branch, commit, and push.
1. Read the workflow thread: \`nerve thread show ${threadId}\` — understand what was planned, implemented, and reviewed.
2. In the thread, locate \`---SOLVE_ISSUE_PARSE---\` and \`---SOLVE_ISSUE_REPO---\`. From them you need issue **number**, **title** (for the branch slug), repo **path**, and **defaultBranch**.
3. \`cd\` to the repo **path** from the markers. Optionally read \`CONVENTIONS.md\` in that repo root if present.
4. Run \`git rev-parse --abbrev-ref HEAD\` and compare with **defaultBranch** from the markers. Implement leaves changes uncommitted on the default branch — you should be on that branch with a dirty working tree. If you are not on the default branch, or the tree is clean when you expected changes, set **committed** to false and explain.
5. Run \`git status\`. If there is nothing to commit, set **committed** to false and explain.
6. Create a feature branch (do not commit directly on the default branch if it would mix unrelated work):
- Name: \`fix/<number>-<short-slug>\` for fixes, or \`feat/<number>-<short-slug>\` if the issue is clearly a feature.
- **slug**: lowercase, hyphens only, short (from issue title words).
- Example: \`git checkout -b fix/42-auth-timeout\`
7. \`git add -A\`
8. Write a **conventional commit** message describing what changed and why, using the thread context.
9. \`git commit -m "<message>"\` — do NOT pass \`--author\`, use repo git config.
10. \`git push -u origin <branch-name>\`
**committed=true** only if branch was created, commit succeeded, and **push** succeeded.
End your reply with a JSON line:
\`\`\`json
{ "committed": true }
\`\`\`
or
\`\`\`json
{ "committed": false }
\`\`\``;
}
export const committerMetaSchema = z.object({
committed: z
.boolean()
.describe("true if branch created, changes committed, and pushed successfully"),
});
export type CommitterMeta = z.infer<typeof committerMetaSchema>;
export function createCommitterRole(
adapter: AgentFn,
extract: LlmExtractorConfig,
): Role<CommitterMeta> {
const inner = createRole(
adapter,
async (ctx: ThreadContext) => committerPrompt({ threadId: ctx.start.meta.threadId }),
committerMetaSchema,
extract,
);
return decorateRole(inner, [
withDryRun({ label: "committer", meta: { committed: true } as CommitterMeta }),
onFail({ label: "committer", meta: { committed: false } as CommitterMeta }),
]) as Role<CommitterMeta>;
}

View File

@ -0,0 +1,86 @@
import type { AgentFn, Role, RoleResult, ThreadContext, WorkflowMessage } from "@uncaged/nerve-core";
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
import { createRole } from "@uncaged/nerve-workflow-utils";
import { z } from "zod";
import { resolveRepoCwd } from "../lib/repo-context.js";
function buildImplementPrompt({ threadId, nerveRoot }: { threadId: string; nerveRoot: string }): string {
return `You are the **implement** agent. You apply code changes for the issue.
Read workflow context (plan, reviewer/test feedback): \`nerve thread show ${threadId}\`
Read Nerve workspace conventions: \`cat ${nerveRoot}/CONVENTIONS.md\`
Your cwd is the target repository.
## Requirements
1. Implement the planned changes; address reviewer/tester feedback from the thread if any.
2. Run the project **build** (\`pnpm build\`, \`npm run build\`, etc.) and fix issues until build passes.
3. Multi-step: if you cannot finish this round, explain why and set **done** to false.
Do **not** run \`git checkout -b\`, \`git add\`, \`git commit\`, or \`git push\`. **Never** create commits on any branch — branching and commits are handled by the **committer** step after you finish.
Then close with JSON:
\`\`\`json
{ "done": true }
\`\`\`
or \`{ "done": false }\` matching whether implementation is complete.
**done=true** only when changes are complete **and** build passes in this round.`;
}
export const implementMetaSchema = z.object({
done: z.boolean().describe("true when changes are complete and build passes this round"),
});
export type ImplementMeta = z.infer<typeof implementMetaSchema>;
export type CreateImplementRoleDeps = {
extract: LlmExtractorConfig;
nerveRoot: string;
};
export function createImplementRole(
adapter: AgentFn,
{ extract, nerveRoot }: CreateImplementRoleDeps,
): Role<ImplementMeta> {
return async (ctx: ThreadContext): Promise<RoleResult<ImplementMeta>> => {
const messages = ctx.steps as unknown as WorkflowMessage[];
const cwd = resolveRepoCwd(messages);
if (cwd === null) {
return {
content: "implement cannot run: missing repo path in thread markers",
meta: { done: false },
};
}
const innerRole = createRole(
adapter,
async (innerCtx: ThreadContext) =>
buildImplementPrompt({
threadId: innerCtx.start.meta.threadId,
nerveRoot,
}),
implementMetaSchema,
extract,
);
const innerCtx: ThreadContext = {
...ctx,
start: {
...ctx.start,
meta: { ...ctx.start.meta, workdir: cwd },
},
};
try {
return await innerRole(innerCtx);
} catch (e) {
const msg = e instanceof Error ? e.message : String(e);
return {
content: `implement failed: ${msg}`,
meta: { done: false },
};
}
};
}

View File

@ -0,0 +1,88 @@
import type { AgentFn, Role, RoleResult, ThreadContext, WorkflowMessage } from "@uncaged/nerve-core";
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
import { createRole } from "@uncaged/nerve-workflow-utils";
import { z } from "zod";
import { resolveRepoCwd } from "../lib/repo-context.js";
function buildPlanPrompt({ threadId, nerveRoot }: { threadId: string; nerveRoot: string }): string {
return `You are the **plan** agent (analysis only — ask mode). You produce an implementation plan for fixing the issue.
Read workflow context: \`nerve thread show ${threadId}\`
Read Nerve workspace conventions (coding rules for agents): \`cat ${nerveRoot}/CONVENTIONS.md\`
In the **target repository** (your cwd), skim relevant files and read \`CONVENTIONS.md\` **if it exists** there.
## Output
Write an implementation plan in **markdown** with:
1. Problem understanding
2. Change strategy
3. Target files (paths)
4. **Test commands** to run (explicit shell commands, e.g. \`pnpm test\`, \`pnpm vitest run\`)
5. Risks
End your reply with a JSON code block (meta signal):
\`\`\`json
{ "ready": true }
\`\`\`
Use \`{ "ready": false }\` if the plan cannot be made actionable.
**ready=true** only when the plan is clear and actionable.`;
}
export const planMetaSchema = z.object({
ready: z.boolean().describe("true if plan is clear and actionable"),
});
export type PlanMeta = z.infer<typeof planMetaSchema>;
export type CreatePlanRoleDeps = {
extract: LlmExtractorConfig;
nerveRoot: string;
};
export function createPlanRole(
adapter: AgentFn,
{ extract, nerveRoot }: CreatePlanRoleDeps,
): Role<PlanMeta> {
return async (ctx: ThreadContext): Promise<RoleResult<PlanMeta>> => {
const messages = ctx.steps as unknown as WorkflowMessage[];
const cwd = resolveRepoCwd(messages);
if (cwd === null) {
return {
content: "plan cannot run: missing ---SOLVE_ISSUE_REPO--- or ---SOLVE_ISSUE_PARSE--- in thread",
meta: { ready: false },
};
}
const innerRole = createRole(
adapter,
async (innerCtx: ThreadContext) =>
buildPlanPrompt({
threadId: innerCtx.start.meta.threadId,
nerveRoot,
}),
planMetaSchema,
extract,
);
const innerCtx: ThreadContext = {
...ctx,
start: {
...ctx.start,
meta: { ...ctx.start.meta, workdir: cwd },
},
};
try {
return await innerRole(innerCtx);
} catch (e) {
const msg = e instanceof Error ? e.message : String(e);
return {
content: `plan failed: ${msg}`,
meta: { ready: false },
};
}
};
}

View File

@ -0,0 +1,73 @@
import type { AgentFn, Role, ThreadContext } from "@uncaged/nerve-core";
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
import { createRole } from "@uncaged/nerve-workflow-utils";
import { z } from "zod";
function preparePrompt({ threadId }: { threadId: string }): string {
return `You are the **prepare** agent. You ensure the target repository is ready for work.
Read prior messages / thread for issue markers: \`nerve thread show ${threadId}\`
## Goal
Find **owner**, **repo**, and **host** from \`---SOLVE_ISSUE_PARSE---\` in the thread (from read-issue).
Check the **initial user prompt** (the trigger message) for a local repo path. The user may specify it like:
- \`--repo /path/to/repo\`
- \`repo: /path/to/repo\`
- or just mention an absolute path to the local clone
## Steps
### If a local path is provided in the trigger prompt:
1. Verify \`<path>/.git\` exists — if not, fail with \`ready: false\`
2. \`cd "<path>" && git fetch --all\`
3. Ensure working tree clean: if \`git status --porcelain\` is non-empty, \`git stash push -u -m "solve-issue stash"\`
4. Detect default branch (\`main\` or \`master\`) and \`git checkout <default> && git pull --ff-only\`
5. Use this path as REPOPATH
### If no local path is provided:
1. Let \`REPOPATH=$HOME/Code/<owner>/<repo>\` (expand \`$HOME\`)
2. \`mkdir -p "$HOME/Code/<owner>"\`
3. If \`REPOPATH/.git\` is missing: \`git clone https://<host>/<owner>/<repo>.git "$REPOPATH"\`
Else: \`cd "$REPOPATH" && git fetch --all && git pull --ff-only\`
4. Ensure working tree clean: if \`git status --porcelain\` is non-empty, \`git stash push -u -m "solve-issue stash"\`
5. Detect default branch and \`git checkout <default>\`
### Then (both paths):
6. Detect package manager: \`pnpm-lock.yaml\` → pnpm, \`yarn.lock\` → yarn, \`package-lock.json\` → npm; run install (\`pnpm install --no-frozen-lockfile\` / \`npm ci\` or \`npm install\` / \`yarn\`).
7. If \`package.json\` has a \`build\` script, run the build (\`pnpm build\`, etc.) and fix nothing — only verify baseline passes.
## Required marker block
Emit **exactly**:
\`\`\`
---SOLVE_ISSUE_REPO---
path: <absolute path to REPOPATH>
defaultBranch: <main or master>
packageManager: <pnpm|npm|yarn>
---
\`\`\`
End with:
\`\`\`json
{ "ready": true }
\`\`\`
or \`{ "ready": false }\` if the repo is invalid, or install/build baseline failed.
**ready=true** only when the repo exists at \`path\`, is clean, dependencies installed, and baseline build succeeded (or no build script).`;
}
export const prepareMetaSchema = z.object({
ready: z.boolean().describe("true if repo is ready and baseline build ok"),
});
export type PrepareMeta = z.infer<typeof prepareMetaSchema>;
export function createPrepareRole(adapter: AgentFn, extract: LlmExtractorConfig): Role<PrepareMeta> {
return createRole(
adapter,
async (ctx: ThreadContext) => preparePrompt({ threadId: ctx.start.meta.threadId }),
prepareMetaSchema,
extract,
);
}

View File

@ -0,0 +1,110 @@
import { mkdirSync, writeFileSync } from "node:fs";
import { join } from "node:path";
import type { AgentFn, Role, RoleResult, ThreadContext } from "@uncaged/nerve-core";
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
import { createRole, isDryRun } from "@uncaged/nerve-workflow-utils";
import { z } from "zod";
function buildPublishPrompt({ threadId, nerveRoot }: { threadId: string; nerveRoot: string }): string {
return `You are the **publish** agent (Hermes). Test has passed. Open a pull request for the current branch using the **tea** CLI.
## Context
- Read the full workflow thread: \`nerve thread show ${threadId}\`
- Nerve workspace conventions (for tone/consistency, optional): \`cat ${nerveRoot}/CONVENTIONS.md\`
## Repo and issue (from the thread)
Find \`---SOLVE_ISSUE_PARSE---\` and \`---SOLVE_ISSUE_REPO---\` in prior messages. You need:
- \`path\` — clone checkout directory (this is your working copy)
- \`host\`, \`owner\`, \`repo\`, \`number\` for the issue
- \`defaultBranch\` (for PR base) from SOLVE_ISSUE_REPO
**Issue link** for the Ref section: \`https://<host>/<owner>/<repo>/issues/<number>\`
## Steps (in order)
1. \`cd\` to the **repo \`path\`**. Run \`git rev-parse --abbrev-ref HEAD\` to get the current branch name. The **committer** step should already have pushed this branch; run \`git push -u origin <that-branch>\` only if the branch is not yet on the remote.
2. Choose a **PR title** that reflects the real change (not a generic \`fix: issue #N\`): derive it from the issue title, plan, and thread summary (keep it concise; Conventional Commits style is fine, e.g. \`fix(auth): handle session expiry\`).
3. Write a **PR body** in Markdown with exactly these sections, in this order, each with a \`##\` heading (fill with concise content based on the thread: plan, implement, review, test):
- **## What** one short paragraph: what this PR does
- **## Why** one short paragraph: motivation / issue
- **## Changes** bullet list of notable changes
- **## Ref** include one line \`Fixes #<number>\` (same \`number\` from SOLVE_ISSUE_PARSE; closes/links the issue where supported) **and** the issue URL \`https://<host>/<owner>/<repo>/issues/<number>\`
4. Create the PR with **tea** (not curl/fetch to Gitea):
- \`tea pr create --repo <owner>/<repo> --base <defaultBranch> --head <branch> --title "<your meaningful title>" --body <your markdown body>\`
- You may use a heredoc or a temp file for \`--body\` if the shell requires it; keep the four sections in the body.
5. Confirm the PR was created (tea prints a URL or PR number in typical setups).
**success=true** only if both **push** and **tea** PR creation succeed. If any step fails, set **success=false** and say why.
End your reply with a JSON line:
\`\`\`json
{ "success": true }
\`\`\`
or
\`\`\`json
{ "success": false }
\`\`\``;
}
export const publishMetaSchema = z.object({
success: z.boolean().describe("true if git push and tea pr create both succeeded"),
});
export type PublishMeta = z.infer<typeof publishMetaSchema>;
export type CreatePublishRoleDeps = {
extract: LlmExtractorConfig;
nerveRoot: string;
};
function logPath(nerveRoot: string): string {
return join(nerveRoot, "logs", `solve-issue-publish-${Date.now()}.log`);
}
export function createPublishRole(
adapter: AgentFn,
{ extract, nerveRoot }: CreatePublishRoleDeps,
): Role<PublishMeta> {
const innerRole = createRole(
adapter,
async (ctx: ThreadContext) =>
buildPublishPrompt({ threadId: ctx.start.meta.threadId, nerveRoot }),
publishMetaSchema,
extract,
);
return async (ctx: ThreadContext): Promise<RoleResult<PublishMeta>> => {
const file = logPath(nerveRoot);
mkdirSync(join(file, ".."), { recursive: true });
if (isDryRun(ctx.start)) {
const msg = "[dry-run] publish skipped (no git push / PR)";
writeFileSync(file, `${msg}\n`, "utf-8");
return {
content: `[dry-run] publish skipped — log: ${file}`,
meta: { success: true },
};
}
const innerCtx: ThreadContext = {
...ctx,
start: {
...ctx.start,
meta: { ...ctx.start.meta, workdir: nerveRoot },
},
};
try {
return await innerRole(innerCtx);
} catch (e) {
const msg = e instanceof Error ? e.message : String(e);
const body = `publish failed: ${msg}\n`;
writeFileSync(file, body, "utf-8");
return {
content: `publish failed: ${msg}\nLog: ${file}`,
meta: { success: false },
};
}
};
}

View File

@ -0,0 +1,53 @@
import type { AgentFn, Role, ThreadContext } from "@uncaged/nerve-core";
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
import { createRole } from "@uncaged/nerve-workflow-utils";
import { z } from "zod";
function readIssuePrompt({ threadId }: { threadId: string }): string {
return `You are the **read-issue** agent. You fetch Gitea issue content via the \`tea\` CLI.
Read the workflow thread start prompt for the issue URL (same run): \`nerve thread show ${threadId}\`
## Steps
1. From the **initial user prompt** (issue URL), extract **host**, **owner**, **repo**, and **issue number**. Supported shape:
\`https://<host>/<owner>/<repo>/issues/<number>\`
2. Run:
\`tea issue show <number> --repo <owner>/<repo> --comments\`
(Add \`--json\` if helpful for parsing.)
3. In your reply, include **structured issue text**: title, body, labels, and each comment (author + body + time).
4. You **must** emit this marker block **exactly** (fill in real values):
\`\`\`
---SOLVE_ISSUE_PARSE---
host: <host>
owner: <owner>
repo: <repo>
number: <number>
---
\`\`\`
5. End with JSON meta (verbatim block):
\`\`\`json
{ "ready": true }
\`\`\`
Use \`{ "ready": false }\` if you could not fetch or parse the issue.
**ready=true** only if the issue was fetched successfully and the marker block is correct.`;
}
export const readIssueMetaSchema = z.object({
ready: z.boolean().describe("true if issue content was fetched and markers are present"),
});
export type ReadIssueMeta = z.infer<typeof readIssueMetaSchema>;
export function createReadIssueRole(adapter: AgentFn, extract: LlmExtractorConfig): Role<ReadIssueMeta> {
return createRole(
adapter,
async (ctx: ThreadContext) => readIssuePrompt({ threadId: ctx.start.meta.threadId }),
readIssueMetaSchema,
extract,
);
}

View File

@ -0,0 +1,59 @@
import type { AgentFn, Role, ThreadContext } from "@uncaged/nerve-core";
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
import { createRole } from "@uncaged/nerve-workflow-utils";
import { z } from "zod";
function reviewPrompt({ threadId, nerveRoot }: { threadId: string; nerveRoot: string }): string {
return `You are a **code reviewer** (Hermes). You run after implement and before test.
Read Nerve workspace conventions: \`cat ${nerveRoot}/CONVENTIONS.md\`
Read workflow context: \`nerve thread show ${threadId}\`
Find **repo path** from \`---SOLVE_ISSUE_REPO--- path:\` in the thread (prepare step). \`cd\` there before any git commands.
## Static analysis
Run:
1. \`cd <repo-path> && git diff --stat\`
2. \`cd <repo-path> && git diff\`
3. \`cd <repo-path> && git status --short\`
## Checklist
Reject (**approved: false**) if you find:
- Garbage files, secrets/credentials, unrelated changes
- Violations of CONVENTIONS.md (e.g. \`interface\` vs \`type\`, dynamic \`import()\`)
Approve (**approved: true**) if the diff is clean and focused.
End with:
\`\`\`json
{ "approved": true }
\`\`\`
or
\`\`\`json
{ "approved": false }
\`\`\``;
}
export const reviewMetaSchema = z.object({
approved: z.boolean().describe("true if diff is clean and ready for tests"),
});
export type ReviewMeta = z.infer<typeof reviewMetaSchema>;
export function createReviewRole(
adapter: AgentFn,
extract: LlmExtractorConfig,
nerveRoot: string,
): Role<ReviewMeta> {
return createRole(
adapter,
async (ctx: ThreadContext) =>
reviewPrompt({ threadId: ctx.start.meta.threadId, nerveRoot }),
reviewMetaSchema,
extract,
);
}

View File

@ -0,0 +1,40 @@
import type { AgentFn, Role, ThreadContext } from "@uncaged/nerve-core";
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
import { createRole } from "@uncaged/nerve-workflow-utils";
import { z } from "zod";
function testPrompt({ threadId }: { threadId: string }): string {
return `You are the **test** agent (Hermes). You execute automated tests for the change.
Read workflow context: \`nerve thread show ${threadId}\`
Find **repo path** from \`---SOLVE_ISSUE_REPO--- path:\` in the thread.
From the **plan** step output, locate **Test commands** (explicit shell commands). Run each command with cwd = repo path, in order.
If the plan lists **no** test commands, try **pnpm test**, then **npm test** if pnpm is unavailable; if neither applies, explain skip.
Collect stdout/stderr snippets on failure.
End with JSON only:
\`\`\`json
{ "passed": true }
\`\`\`
or \`{ "passed": false }\`
**passed=true** only if every executed command exited 0 (or skip was justified with no failing command).`;
}
export const testMetaSchema = z.object({
passed: z.boolean().describe("true if all test commands passed"),
});
export type TestMeta = z.infer<typeof testMetaSchema>;
export function createTestRole(adapter: AgentFn, extract: LlmExtractorConfig): Role<TestMeta> {
return createRole(
adapter,
async (ctx: ThreadContext) => testPrompt({ threadId: ctx.start.meta.threadId }),
testMetaSchema,
extract,
);
}

View File

@ -1,807 +0,0 @@
import { existsSync, readFileSync } from "node:fs";
import { join } from "node:path";
import type { RoleResult, StartStep, WorkflowDefinition, WorkflowMessage } from "@uncaged/nerve-core";
import { END } from "@uncaged/nerve-core";
import type { SpawnError } from "@uncaged/nerve-workflow-utils";
import {
cursorAgent,
isDryRun,
llmExtract,
nerveAgentContext,
readNerveYaml,
spawnSafe,
} from "@uncaged/nerve-workflow-utils";
import { z } from "zod";
const HOME = process.env.HOME ?? "/home/azureuser";
const NERVE_ROOT = join(HOME, ".uncaged-nerve");
const WORKFLOWS_DIR = join(NERVE_ROOT, "workflows");
type PlannerRole = {
name: string;
goal: string;
io: string;
};
type WorkflowMeta = {
planner: {
userPrompt: string;
workflowName: string;
roles: PlannerRole[];
flowTransitions: string;
validationLoopsDesign: string;
externalDeps: string;
dataFlow: string;
planMarkdown: string;
};
coder: {
workflowName: string;
attempt: number;
files: { indexTs: boolean; packageJson: boolean; tsconfigJson: boolean };
lintPassed: boolean;
buildPassed: boolean;
lintLog: string;
buildLog: string;
cursorOutput: string;
reason: string | null;
};
tester: {
workflowName: string;
attempt: number;
passed: boolean;
dryRunLog: string;
reason: string;
};
committer: {
invoked: boolean;
success: boolean;
branch: string | null;
commitHash: string | null;
pushed: boolean | null;
log: string;
error: string | null;
};
};
const roleSchema = z
.object({
name: z.string().default(""),
goal: z.string().default(""),
io: z.string().default(""),
})
.default({ name: "", goal: "", io: "" });
const plannerExtractSchema = z.object({
workflowName: z
.string()
.default("")
.describe("kebab-case workflow name under workflows/, e.g. issue-fixer"),
roles: z.array(roleSchema).default([]),
flowTransitions: z.preprocess((v) => (Array.isArray(v) ? v.join("\n") : v), z.string().default("")),
validationLoopsDesign: z.preprocess((v) => (Array.isArray(v) ? v.join("\n") : v), z.string().default("")),
externalDeps: z.preprocess((v) => (Array.isArray(v) ? v.join(", ") : v), z.string().default("")),
dataFlow: z.preprocess((v) => (Array.isArray(v) ? v.join("\n") : v), z.string().default("")),
planMarkdown: z.preprocess((v) => (Array.isArray(v) ? v.join("\n") : v), z.string().default("")),
});
function getNerveYaml(): string {
const result = readNerveYaml({ nerveRoot: NERVE_ROOT });
return result.ok ? result.value : "# nerve.yaml unavailable";
}
function buildSenseGeneratorReference(): string {
const p = join(WORKFLOWS_DIR, "sense-generator", "index.ts");
if (!existsSync(p)) {
return "(missing workflows/sense-generator/index.ts)";
}
return readFileSync(p, "utf-8");
}
function formatSpawnFailure(error: SpawnError): string {
if (error.kind === "spawn_failed") {
return error.message;
}
if (error.kind === "timeout") {
return `timeout stdout=${error.stdout.slice(0, 300)} stderr=${error.stderr.slice(0, 300)}`;
}
return `exit ${error.exitCode} stderr=${error.stderr.slice(0, 500)}`;
}
async function cfgGet(key: string): Promise<string | null> {
const result = await spawnSafe("cfg", ["get", key], {
cwd: NERVE_ROOT,
env: null,
timeoutMs: 10_000,
});
if (!result.ok) {
return null;
}
const v = result.value.stdout.trim();
return v.length > 0 ? v : null;
}
async function resolveDashScopeProvider(): Promise<{
baseUrl: string;
apiKey: string;
model: string;
} | null> {
const apiKey = process.env.DASHSCOPE_API_KEY ?? (await cfgGet("DASHSCOPE_API_KEY"));
const baseUrl = process.env.DASHSCOPE_BASE_URL ?? (await cfgGet("DASHSCOPE_BASE_URL"));
const model = process.env.DASHSCOPE_MODEL ?? (await cfgGet("DASHSCOPE_MODEL")) ?? "qwen-plus";
if (!apiKey || !baseUrl) {
return null;
}
return { apiKey, baseUrl, model };
}
function lastMetaForRole<M>(messages: WorkflowMessage[], role: string): M | null {
for (let i = messages.length - 1; i >= 0; i--) {
if (messages[i].role === role) {
return messages[i].meta as M;
}
}
return null;
}
function scanGeneratedCodePitfalls(source: string): string[] {
const issues: string[] = [];
if (/\bawait\s+import\s*\(/.test(source)) {
issues.push("Found await import() in generated workflow code");
}
if (/\bimport\s*\(\s*["'`]/.test(source) && !source.includes("Dynamic import required")) {
issues.push("Found undocumented dynamic import() call");
}
if (!/\bexport\s+default\s+/.test(source)) {
issues.push("Missing default export of WorkflowDefinition");
}
return issues;
}
function inferWorkflowName(messages: WorkflowMessage[]): string {
const tester = lastMetaForRole<WorkflowMeta["tester"]>(messages, "tester");
if (tester !== null && tester.workflowName.trim().length > 0) {
return tester.workflowName.trim();
}
const coder = lastMetaForRole<WorkflowMeta["coder"]>(messages, "coder");
if (coder !== null && coder.workflowName.trim().length > 0) {
return coder.workflowName.trim();
}
const planner = lastMetaForRole<WorkflowMeta["planner"]>(messages, "planner");
if (planner !== null && planner.workflowName.trim().length > 0) {
return planner.workflowName.trim();
}
return "";
}
async function runLintAndBuild(
workflowDir: string,
dry: boolean,
): Promise<{
lintPassed: boolean;
buildPassed: boolean;
lintLog: string;
buildLog: string;
reason: string | null;
}> {
const lintRun = await spawnSafe("pnpm", ["run", "check"], {
cwd: workflowDir,
env: null,
timeoutMs: 300_000,
dryRun: dry,
});
if (!lintRun.ok) {
return {
lintPassed: false,
buildPassed: false,
lintLog: formatSpawnFailure(lintRun.error),
buildLog: "",
reason: `lint failed: ${formatSpawnFailure(lintRun.error)}`,
};
}
const lintLog = lintRun.value.stderr.trim() || lintRun.value.stdout.trim() || "(no output)";
const tscRun = await spawnSafe("npx", ["tsc", "--noEmit"], {
cwd: workflowDir,
env: null,
timeoutMs: 300_000,
dryRun: dry,
});
if (!tscRun.ok) {
return {
lintPassed: true,
buildPassed: false,
lintLog,
buildLog: formatSpawnFailure(tscRun.error),
reason: `build failed: ${formatSpawnFailure(tscRun.error)}`,
};
}
const buildLog = tscRun.value.stderr.trim() || tscRun.value.stdout.trim() || "(no output)";
return { lintPassed: true, buildPassed: true, lintLog, buildLog, reason: null };
}
async function runTesterDryRun(
workflowName: string,
planner: WorkflowMeta["planner"],
coder: WorkflowMeta["coder"],
dry: boolean,
): Promise<{ passed: boolean; reason: string; log: string }> {
if (dry) {
return {
passed: true,
reason: "dry-run mode",
log: "[dry-run] tester skipped external checks",
};
}
const prompt = `You are testing a generated Nerve workflow by doing a dry-run review.
Workflow: ${workflowName}
Planner specification:
${JSON.stringify(
{
roles: planner.roles,
flowTransitions: planner.flowTransitions,
validationLoopsDesign: planner.validationLoopsDesign,
externalDeps: planner.externalDeps,
dataFlow: planner.dataFlow,
},
null,
2,
)}
Coder output summary:
${coder.cursorOutput.slice(0, 6000)}
Required checks:
1) Verify role transitions are coherent and terminates to END.
2) Verify generated workflow adheres to planner intent.
3) Verify retry loops are explicit for recoverable failures.
4) Verify no obvious runtime-breaking issue in generated index.ts.
Return exactly:
PASS|<reason>|<compact markdown log>
or
FAIL|<reason>|<compact markdown log>`;
const run = await cursorAgent({
prompt,
mode: "ask",
cwd: NERVE_ROOT,
env: null,
timeoutMs: null,
dryRun: false,
});
if (!run.ok) {
return {
passed: false,
reason: `tester agent failed: ${formatSpawnFailure(run.error)}`,
log: "",
};
}
const text = run.value.trim();
const pass = text.startsWith("PASS|");
const fail = text.startsWith("FAIL|");
if (!pass && !fail) {
return { passed: false, reason: "tester format invalid", log: text };
}
const parts = text.split("|");
const reason = parts[1] ?? "no reason";
const log = parts.slice(2).join("|").trim();
return { passed: pass, reason, log };
}
async function runHermesCommitter(
workflowName: string,
userPrompt: string,
testerReason: string,
dry: boolean,
): Promise<{
invoked: boolean;
success: boolean;
branch: string | null;
commitHash: string | null;
pushed: boolean | null;
log: string;
error: string | null;
}> {
const task = `You are a git committer subagent for Nerve workflow generation.
Repository root: ${NERVE_ROOT}
Goal:
- Commit and push generated workflow "${workflowName}".
- Handle dirty worktree safely (do not discard unrelated user edits).
- Detect default branch automatically.
- Create a focused branch for this workflow update.
- Stage only workflow files and required config updates.
Context:
- User prompt summary: ${userPrompt.slice(0, 500)}
- Tester result: ${testerReason}
Expected output format:
BRANCH=<branch-or-empty>
COMMIT=<hash-or-empty>
PUSHED=<true|false|unknown>
LOG_START
<details>
LOG_END`;
if (dry) {
return {
invoked: true,
success: true,
branch: "wf/dry-run",
commitHash: null,
pushed: null,
log: "[dry-run] skipped hermes committer",
error: null,
};
}
const commandAttempts: Array<{ cmd: string; args: string[] }> = [
{ cmd: "hermes-agent", args: ["--cwd", NERVE_ROOT, "--task", task] },
{ cmd: "hermes", args: ["agent", "--cwd", NERVE_ROOT, "--task", task] },
];
for (const candidate of commandAttempts) {
const run = await spawnSafe(candidate.cmd, candidate.args, {
cwd: NERVE_ROOT,
env: null,
timeoutMs: 600_000,
dryRun: false,
});
if (!run.ok) {
continue;
}
const text = `${run.value.stdout}\n${run.value.stderr}`;
const branch = text.match(/^BRANCH=(.*)$/m)?.[1]?.trim() ?? null;
const commitHash = text.match(/^COMMIT=(.*)$/m)?.[1]?.trim() ?? null;
const pushedText = text.match(/^PUSHED=(.*)$/m)?.[1]?.trim().toLowerCase() ?? "unknown";
const pushed = pushedText === "true" ? true : pushedText === "false" ? false : null;
return {
invoked: true,
success: true,
branch: branch && branch.length > 0 ? branch : null,
commitHash: commitHash && commitHash.length > 0 ? commitHash : null,
pushed,
log: text.slice(0, 20_000),
error: null,
};
}
const fallback = await cursorAgent({
prompt: `Run this git committer task in repository ${NERVE_ROOT}:\n\n${task}`,
mode: "default",
cwd: NERVE_ROOT,
env: null,
timeoutMs: null,
dryRun: false,
});
if (!fallback.ok) {
return {
invoked: true,
success: false,
branch: null,
commitHash: null,
pushed: null,
log: "",
error: `hermes and fallback both failed: ${formatSpawnFailure(fallback.error)}`,
};
}
const out = fallback.value;
const branch = out.match(/(?:branch|BRANCH)\s*[:=]\s*([^\s]+)/)?.[1] ?? null;
const commitHash = out.match(/[a-f0-9]{7,40}/)?.[0] ?? null;
return {
invoked: true,
success: true,
branch,
commitHash,
pushed: out.toLowerCase().includes("push") ? true : null,
log: out.slice(0, 20_000),
error: null,
};
}
const workflow: WorkflowDefinition<WorkflowMeta> = {
name: "workflow-generator",
roles: {
async planner(
start: StartStep,
_messages: WorkflowMessage[],
): Promise<RoleResult<WorkflowMeta["planner"]>> {
const dry = isDryRun(start);
const provider = await resolveDashScopeProvider();
const userPrompt = start.content;
if (provider === null) {
return {
content: "Cannot run planner: missing DASHSCOPE_API_KEY or DASHSCOPE_BASE_URL.",
meta: {
userPrompt,
workflowName: "",
roles: [],
flowTransitions: "",
validationLoopsDesign: "",
externalDeps: "",
dataFlow: "",
planMarkdown: "",
},
};
}
const planningText = `Design a Nerve workflow plan from this request.
${nerveAgentContext}
User request:
${userPrompt}
Target root: ${NERVE_ROOT}
Workflow dir root: ${WORKFLOWS_DIR}
Reference structure:
\`\`\`ts
${buildSenseGeneratorReference().slice(0, 18_000)}
\`\`\`
Current nerve.yaml:
\`\`\`yaml
${getNerveYaml()}
\`\`\`
Produce a complete markdown plan that includes:
- workflow name
- roles list
- flow/transitions
- validation loops design
- external deps
- data flow`;
const extracted = await llmExtract({
text: planningText,
schema: plannerExtractSchema,
provider,
dryRun: dry,
});
if (!extracted.ok) {
return {
content: `[planner] llmExtract failed: ${JSON.stringify(extracted.error)}`,
meta: {
userPrompt,
workflowName: "",
roles: [],
flowTransitions: "",
validationLoopsDesign: "",
externalDeps: "",
dataFlow: "",
planMarkdown: "",
},
};
}
const value = extracted.value;
const planMarkdown =
value.planMarkdown.length > 0
? value.planMarkdown
: [
`# Workflow Plan`,
`- workflowName: ${value.workflowName}`,
``,
`## Roles`,
...value.roles.map((r) => `- ${r.name}: ${r.goal} (${r.io})`),
``,
`## Flow Transitions`,
value.flowTransitions,
``,
`## Validation Loops`,
value.validationLoopsDesign,
``,
`## External Dependencies`,
value.externalDeps,
``,
`## Data Flow`,
value.dataFlow,
].join("\n");
return {
content: planMarkdown,
meta: {
userPrompt,
workflowName: value.workflowName,
roles: value.roles,
flowTransitions: value.flowTransitions,
validationLoopsDesign: value.validationLoopsDesign,
externalDeps: value.externalDeps,
dataFlow: value.dataFlow,
planMarkdown,
},
};
},
async coder(start: StartStep, messages: WorkflowMessage[]): Promise<RoleResult<WorkflowMeta["coder"]>> {
const dry = isDryRun(start);
const plannerMeta = lastMetaForRole<WorkflowMeta["planner"]>(messages, "planner");
const previousTester = lastMetaForRole<WorkflowMeta["tester"]>(messages, "tester");
const attempt = messages.filter((m) => m.role === "coder").length + 1;
if (plannerMeta === null || plannerMeta.workflowName.trim().length === 0) {
return {
content: "coder cannot continue: missing planner output",
meta: {
workflowName: "",
attempt,
files: { indexTs: false, packageJson: false, tsconfigJson: false },
lintPassed: false,
buildPassed: false,
lintLog: "",
buildLog: "",
cursorOutput: "",
reason: "missing planner output",
},
};
}
const wfName = plannerMeta.workflowName.trim();
const feedback =
previousTester !== null && previousTester.passed === false
? `\n\nPrevious tester failure to fix:\n${previousTester.reason}\n${previousTester.dryRunLog}\n`
: "";
const codingPrompt = `Implement a Nerve workflow package under ${WORKFLOWS_DIR}/${wfName}/.
Planner output:
${plannerMeta.planMarkdown}
Structured planner fields:
${JSON.stringify(
{
workflowName: plannerMeta.workflowName,
roles: plannerMeta.roles,
flowTransitions: plannerMeta.flowTransitions,
validationLoopsDesign: plannerMeta.validationLoopsDesign,
externalDeps: plannerMeta.externalDeps,
dataFlow: plannerMeta.dataFlow,
},
null,
2,
)}
${feedback}
Required files:
1) ${WORKFLOWS_DIR}/${wfName}/index.ts
2) ${WORKFLOWS_DIR}/${wfName}/package.json
3) ${WORKFLOWS_DIR}/${wfName}/tsconfig.json
4) update ${NERVE_ROOT}/nerve.yaml with workflows.${wfName}
Rules:
- keep WorkflowDefinition<WorkflowMeta> pattern
- no dynamic import()
- use types (not interfaces)
- include retry-aware moderator routing
- write compile-ready TypeScript`;
const agentRun = await cursorAgent({
prompt: codingPrompt,
mode: "default",
cwd: NERVE_ROOT,
env: null,
timeoutMs: null,
dryRun: dry,
});
const workflowDir = join(WORKFLOWS_DIR, wfName);
const files = {
indexTs: existsSync(join(workflowDir, "index.ts")),
packageJson: existsSync(join(workflowDir, "package.json")),
tsconfigJson: existsSync(join(workflowDir, "tsconfig.json")),
};
const missing = [
files.indexTs ? null : "index.ts",
files.packageJson ? null : "package.json",
files.tsconfigJson ? null : "tsconfig.json",
].filter((x) => x !== null) as string[];
if (!agentRun.ok) {
return {
content: `coder failed: ${formatSpawnFailure(agentRun.error)}`,
meta: {
workflowName: wfName,
attempt,
files,
lintPassed: false,
buildPassed: false,
lintLog: "",
buildLog: "",
cursorOutput: "",
reason: formatSpawnFailure(agentRun.error),
},
};
}
if (missing.length > 0) {
return {
content: `coder failed: missing required files (${missing.join(", ")})`,
meta: {
workflowName: wfName,
attempt,
files,
lintPassed: false,
buildPassed: false,
lintLog: "",
buildLog: "",
cursorOutput: agentRun.value,
reason: `missing files: ${missing.join(", ")}`,
},
};
}
const source = readFileSync(join(workflowDir, "index.ts"), "utf-8");
const pitfalls = scanGeneratedCodePitfalls(source);
if (pitfalls.length > 0) {
return {
content: `coder static check failed:\n${pitfalls.join("\n")}`,
meta: {
workflowName: wfName,
attempt,
files,
lintPassed: false,
buildPassed: false,
lintLog: pitfalls.join("\n"),
buildLog: "",
cursorOutput: agentRun.value,
reason: pitfalls.join("; "),
},
};
}
const check = await runLintAndBuild(workflowDir, dry);
const passed = check.lintPassed && check.buildPassed;
return {
content: passed
? `coder PASS: lint+build ok\n\n${check.lintLog}\n\n${check.buildLog}`
: `coder FAIL: ${check.reason ?? "unknown error"}`,
meta: {
workflowName: wfName,
attempt,
files,
lintPassed: check.lintPassed,
buildPassed: check.buildPassed,
lintLog: check.lintLog,
buildLog: check.buildLog,
cursorOutput: agentRun.value,
reason: check.reason,
},
};
},
async tester(start: StartStep, messages: WorkflowMessage[]): Promise<RoleResult<WorkflowMeta["tester"]>> {
const dry = isDryRun(start);
const plannerMeta = lastMetaForRole<WorkflowMeta["planner"]>(messages, "planner");
const coderMeta = lastMetaForRole<WorkflowMeta["coder"]>(messages, "coder");
const attempt = messages.filter((m) => m.role === "tester").length + 1;
if (plannerMeta === null || coderMeta === null) {
return {
content: "tester cannot continue: missing planner/coder output",
meta: {
workflowName: "",
attempt,
passed: false,
dryRunLog: "",
reason: "missing planner/coder output",
},
};
}
if (!coderMeta.lintPassed || !coderMeta.buildPassed) {
return {
content: "tester blocked: coder has not passed lint+build",
meta: {
workflowName: coderMeta.workflowName,
attempt,
passed: false,
dryRunLog: `${coderMeta.lintLog}\n\n${coderMeta.buildLog}`,
reason: "coder did not pass lint+build",
},
};
}
const dryRun = await runTesterDryRun(coderMeta.workflowName, plannerMeta, coderMeta, dry);
return {
content: `${dryRun.passed ? "PASS" : "FAIL"}${dryRun.reason}`,
meta: {
workflowName: coderMeta.workflowName,
attempt,
passed: dryRun.passed,
dryRunLog: dryRun.log,
reason: dryRun.reason,
},
};
},
async committer(
start: StartStep,
messages: WorkflowMessage[],
): Promise<RoleResult<WorkflowMeta["committer"]>> {
const dry = isDryRun(start);
const planner = lastMetaForRole<WorkflowMeta["planner"]>(messages, "planner");
const tester = lastMetaForRole<WorkflowMeta["tester"]>(messages, "tester");
const workflowName = inferWorkflowName(messages);
if (planner === null || tester === null || workflowName.length === 0) {
return {
content: "committer skipped: missing planner/tester/workflowName context",
meta: {
invoked: false,
success: false,
branch: null,
commitHash: null,
pushed: null,
log: "",
error: "missing committer context",
},
};
}
if (!tester.passed) {
return {
content: "committer skipped: tester not passed",
meta: {
invoked: false,
success: false,
branch: null,
commitHash: null,
pushed: null,
log: "",
error: "tester not passed",
},
};
}
const committed = await runHermesCommitter(
workflowName,
planner.userPrompt,
tester.reason,
dry,
);
return {
content: committed.success ? committed.log : `committer failed: ${committed.error ?? "unknown"}`,
meta: committed,
};
},
},
moderator(context) {
if (context.steps.length === 0) {
return "planner";
}
const last = context.steps[context.steps.length - 1];
if (last.role === "planner") {
if (last.meta.workflowName.trim().length > 0) return "coder";
const plannerAttempts = context.steps.filter((s) => s.role === "planner").length;
return plannerAttempts < 3 ? "planner" : END;
}
if (last.role === "coder") {
if (last.meta.lintPassed && last.meta.buildPassed) {
return "tester";
}
if (last.meta.attempt < 3) {
return "coder";
}
return END;
}
if (last.role === "tester") {
if (last.meta.passed) {
return "committer";
}
if (last.meta.attempt < 3) {
return "coder";
}
return END;
}
return END;
},
};
export default workflow;

View File

@ -1,22 +0,0 @@
{
"name": "workflow-generator-workflow",
"version": "0.0.1",
"private": true,
"type": "module",
"dependencies": {
"@uncaged/nerve-core": "latest",
"@uncaged/nerve-workflow-utils": "latest",
"zod": "^4.3.6"
},
"devDependencies": {
"@types/node": "^22.0.0",
"typescript": "^5.7.0"
},
"pnpm": {
"overrides": {
"@uncaged/nerve-daemon": "link:../../../repos/nerve/packages/daemon",
"@uncaged/nerve-core": "link:../../../repos/nerve/packages/core",
"@uncaged/nerve-workflow-utils": "link:../../../repos/nerve/packages/workflow-utils"
}
}
}

View File

@ -1,59 +0,0 @@
lockfileVersion: '9.0'
settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
overrides:
'@uncaged/nerve-daemon': link:../../../repos/nerve/packages/daemon
'@uncaged/nerve-core': link:../../../repos/nerve/packages/core
'@uncaged/nerve-workflow-utils': link:../../../repos/nerve/packages/workflow-utils
importers:
.:
dependencies:
'@uncaged/nerve-core':
specifier: link:../../../repos/nerve/packages/core
version: link:../../../repos/nerve/packages/core
'@uncaged/nerve-workflow-utils':
specifier: link:../../../repos/nerve/packages/workflow-utils
version: link:../../../repos/nerve/packages/workflow-utils
zod:
specifier: ^4.3.6
version: 4.3.6
devDependencies:
'@types/node':
specifier: ^22.0.0
version: 22.19.17
typescript:
specifier: ^5.7.0
version: 5.9.3
packages:
'@types/node@22.19.17':
resolution: {integrity: sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==}
typescript@5.9.3:
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
engines: {node: '>=14.17'}
hasBin: true
undici-types@6.21.0:
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
zod@4.3.6:
resolution: {integrity: sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==}
snapshots:
'@types/node@22.19.17':
dependencies:
undici-types: 6.21.0
typescript@5.9.3: {}
undici-types@6.21.0: {}
zod@4.3.6: {}