Compare commits
No commits in common. "master" and "feat/use-role-reviewer-package" have entirely different histories.
master
...
feat/use-r
1
.gitignore
vendored
1
.gitignore
vendored
@ -5,4 +5,3 @@ nerve.pid
|
|||||||
nerve.sock
|
nerve.sock
|
||||||
false/
|
false/
|
||||||
*.db
|
*.db
|
||||||
dist/
|
|
||||||
|
|||||||
23
nerve.yaml
23
nerve.yaml
@ -5,11 +5,31 @@ extract:
|
|||||||
model: qwen-plus
|
model: qwen-plus
|
||||||
|
|
||||||
senses:
|
senses:
|
||||||
|
linux-system-health:
|
||||||
|
group: system
|
||||||
|
interval: 30s
|
||||||
|
throttle: 10s
|
||||||
|
timeout: 15s
|
||||||
hermes-gateway-health:
|
hermes-gateway-health:
|
||||||
group: system
|
group: system
|
||||||
interval: 2m
|
interval: 2m
|
||||||
throttle: 30s
|
throttle: 30s
|
||||||
timeout: 30s
|
timeout: 30s
|
||||||
|
hermes-session-message-stats:
|
||||||
|
group: hermes
|
||||||
|
interval: 15m
|
||||||
|
throttle: 30s
|
||||||
|
timeout: 60s
|
||||||
|
worker-process-metrics:
|
||||||
|
group: system
|
||||||
|
interval: 1m
|
||||||
|
throttle: 15s
|
||||||
|
timeout: 5s
|
||||||
|
git-workspace-status:
|
||||||
|
group: workspace
|
||||||
|
interval: 2m
|
||||||
|
throttle: 30s
|
||||||
|
timeout: 15s
|
||||||
|
|
||||||
workflows:
|
workflows:
|
||||||
develop-sense:
|
develop-sense:
|
||||||
@ -21,6 +41,3 @@ workflows:
|
|||||||
solve-issue:
|
solve-issue:
|
||||||
concurrency: 1
|
concurrency: 1
|
||||||
overflow: queue
|
overflow: queue
|
||||||
extract-knowledge:
|
|
||||||
concurrency: 1
|
|
||||||
overflow: queue
|
|
||||||
|
|||||||
11
package.json
11
package.json
@ -4,7 +4,7 @@
|
|||||||
"private": true,
|
"private": true,
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "node scripts/build.mjs"
|
"build": "pnpm -r build"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@uncaged/nerve-adapter-cursor": "link:../repos/nerve/packages/adapter-cursor",
|
"@uncaged/nerve-adapter-cursor": "link:../repos/nerve/packages/adapter-cursor",
|
||||||
@ -13,16 +13,12 @@
|
|||||||
"@uncaged/nerve-daemon": "link:../repos/nerve/packages/daemon",
|
"@uncaged/nerve-daemon": "link:../repos/nerve/packages/daemon",
|
||||||
"@uncaged/nerve-role-committer": "link:../repos/nerve/packages/role-committer",
|
"@uncaged/nerve-role-committer": "link:../repos/nerve/packages/role-committer",
|
||||||
"@uncaged/nerve-role-reviewer": "link:../repos/nerve/packages/role-reviewer",
|
"@uncaged/nerve-role-reviewer": "link:../repos/nerve/packages/role-reviewer",
|
||||||
"@uncaged/nerve-workflow-meta": "link:../repos/nerve/packages/workflow-meta",
|
|
||||||
"@uncaged/nerve-workflow-utils": "link:../repos/nerve/packages/workflow-utils",
|
"@uncaged/nerve-workflow-utils": "link:../repos/nerve/packages/workflow-utils",
|
||||||
"drizzle-orm": "latest",
|
"drizzle-orm": "latest",
|
||||||
"zod": "^4.3.6"
|
"zod": "^4.3.6"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/node": "^22.0.0",
|
"drizzle-kit": "latest"
|
||||||
"drizzle-kit": "latest",
|
|
||||||
"esbuild": "^0.27.0",
|
|
||||||
"typescript": "^5.7.0"
|
|
||||||
},
|
},
|
||||||
"pnpm": {
|
"pnpm": {
|
||||||
"onlyBuiltDependencies": [
|
"onlyBuiltDependencies": [
|
||||||
@ -34,8 +30,7 @@
|
|||||||
"@uncaged/nerve-daemon": "link:../repos/nerve/packages/daemon",
|
"@uncaged/nerve-daemon": "link:../repos/nerve/packages/daemon",
|
||||||
"@uncaged/nerve-core": "link:../repos/nerve/packages/core",
|
"@uncaged/nerve-core": "link:../repos/nerve/packages/core",
|
||||||
"@uncaged/nerve-workflow-utils": "link:../repos/nerve/packages/workflow-utils",
|
"@uncaged/nerve-workflow-utils": "link:../repos/nerve/packages/workflow-utils",
|
||||||
"@uncaged/nerve-role-committer": "link:../repos/nerve/packages/role-committer",
|
"@uncaged/nerve-role-committer": "link:../repos/nerve/packages/role-committer"
|
||||||
"@uncaged/nerve-workflow-meta": "link:../repos/nerve/packages/workflow-meta"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
47
pnpm-lock.yaml
generated
47
pnpm-lock.yaml
generated
@ -11,7 +11,6 @@ overrides:
|
|||||||
'@uncaged/nerve-core': link:../repos/nerve/packages/core
|
'@uncaged/nerve-core': link:../repos/nerve/packages/core
|
||||||
'@uncaged/nerve-workflow-utils': link:../repos/nerve/packages/workflow-utils
|
'@uncaged/nerve-workflow-utils': link:../repos/nerve/packages/workflow-utils
|
||||||
'@uncaged/nerve-role-committer': link:../repos/nerve/packages/role-committer
|
'@uncaged/nerve-role-committer': link:../repos/nerve/packages/role-committer
|
||||||
'@uncaged/nerve-workflow-meta': link:../repos/nerve/packages/workflow-meta
|
|
||||||
|
|
||||||
importers:
|
importers:
|
||||||
|
|
||||||
@ -35,9 +34,6 @@ importers:
|
|||||||
'@uncaged/nerve-role-reviewer':
|
'@uncaged/nerve-role-reviewer':
|
||||||
specifier: link:../repos/nerve/packages/role-reviewer
|
specifier: link:../repos/nerve/packages/role-reviewer
|
||||||
version: link:../repos/nerve/packages/role-reviewer
|
version: link:../repos/nerve/packages/role-reviewer
|
||||||
'@uncaged/nerve-workflow-meta':
|
|
||||||
specifier: link:../repos/nerve/packages/workflow-meta
|
|
||||||
version: link:../repos/nerve/packages/workflow-meta
|
|
||||||
'@uncaged/nerve-workflow-utils':
|
'@uncaged/nerve-workflow-utils':
|
||||||
specifier: link:../repos/nerve/packages/workflow-utils
|
specifier: link:../repos/nerve/packages/workflow-utils
|
||||||
version: link:../repos/nerve/packages/workflow-utils
|
version: link:../repos/nerve/packages/workflow-utils
|
||||||
@ -48,18 +44,9 @@ importers:
|
|||||||
specifier: ^4.3.6
|
specifier: ^4.3.6
|
||||||
version: 4.3.6
|
version: 4.3.6
|
||||||
devDependencies:
|
devDependencies:
|
||||||
'@types/node':
|
|
||||||
specifier: ^22.0.0
|
|
||||||
version: 22.19.17
|
|
||||||
drizzle-kit:
|
drizzle-kit:
|
||||||
specifier: latest
|
specifier: latest
|
||||||
version: 0.31.10
|
version: 0.31.10
|
||||||
esbuild:
|
|
||||||
specifier: ^0.27.0
|
|
||||||
version: 0.27.7
|
|
||||||
typescript:
|
|
||||||
specifier: ^5.7.0
|
|
||||||
version: 5.9.3
|
|
||||||
|
|
||||||
senses/git-workspace-status:
|
senses/git-workspace-status:
|
||||||
devDependencies:
|
devDependencies:
|
||||||
@ -132,9 +119,6 @@ importers:
|
|||||||
'@uncaged/nerve-core':
|
'@uncaged/nerve-core':
|
||||||
specifier: link:../../../repos/nerve/packages/core
|
specifier: link:../../../repos/nerve/packages/core
|
||||||
version: link:../../../repos/nerve/packages/core
|
version: link:../../../repos/nerve/packages/core
|
||||||
'@uncaged/nerve-workflow-meta':
|
|
||||||
specifier: link:../../../repos/nerve/packages/workflow-meta
|
|
||||||
version: link:../../../repos/nerve/packages/workflow-meta
|
|
||||||
'@uncaged/nerve-workflow-utils':
|
'@uncaged/nerve-workflow-utils':
|
||||||
specifier: link:../../../repos/nerve/packages/workflow-utils
|
specifier: link:../../../repos/nerve/packages/workflow-utils
|
||||||
version: link:../../../repos/nerve/packages/workflow-utils
|
version: link:../../../repos/nerve/packages/workflow-utils
|
||||||
@ -153,37 +137,6 @@ importers:
|
|||||||
version: 5.9.3
|
version: 5.9.3
|
||||||
|
|
||||||
workflows/develop-workflow:
|
workflows/develop-workflow:
|
||||||
dependencies:
|
|
||||||
'@uncaged/nerve-adapter-cursor':
|
|
||||||
specifier: link:../../../repos/nerve/packages/adapter-cursor
|
|
||||||
version: link:../../../repos/nerve/packages/adapter-cursor
|
|
||||||
'@uncaged/nerve-adapter-hermes':
|
|
||||||
specifier: link:../../../repos/nerve/packages/adapter-hermes
|
|
||||||
version: link:../../../repos/nerve/packages/adapter-hermes
|
|
||||||
'@uncaged/nerve-core':
|
|
||||||
specifier: link:../../../repos/nerve/packages/core
|
|
||||||
version: link:../../../repos/nerve/packages/core
|
|
||||||
'@uncaged/nerve-workflow-meta':
|
|
||||||
specifier: link:../../../repos/nerve/packages/workflow-meta
|
|
||||||
version: link:../../../repos/nerve/packages/workflow-meta
|
|
||||||
'@uncaged/nerve-workflow-utils':
|
|
||||||
specifier: link:../../../repos/nerve/packages/workflow-utils
|
|
||||||
version: link:../../../repos/nerve/packages/workflow-utils
|
|
||||||
zod:
|
|
||||||
specifier: ^4.3.6
|
|
||||||
version: 4.3.6
|
|
||||||
devDependencies:
|
|
||||||
'@types/node':
|
|
||||||
specifier: ^22.0.0
|
|
||||||
version: 22.19.17
|
|
||||||
esbuild:
|
|
||||||
specifier: ^0.27.0
|
|
||||||
version: 0.27.7
|
|
||||||
typescript:
|
|
||||||
specifier: ^5.7.0
|
|
||||||
version: 5.9.3
|
|
||||||
|
|
||||||
workflows/extract-knowledge:
|
|
||||||
dependencies:
|
dependencies:
|
||||||
'@uncaged/nerve-adapter-cursor':
|
'@uncaged/nerve-adapter-cursor':
|
||||||
specifier: link:../../../repos/nerve/packages/adapter-cursor
|
specifier: link:../../../repos/nerve/packages/adapter-cursor
|
||||||
|
|||||||
3
pnpm-workspace.yaml
Normal file
3
pnpm-workspace.yaml
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
packages:
|
||||||
|
- "workflows/*"
|
||||||
|
- "senses/*"
|
||||||
@ -1,46 +0,0 @@
|
|||||||
import * as esbuild from "esbuild";
|
|
||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
import { fileURLToPath } from "node:url";
|
|
||||||
|
|
||||||
const root = path.join(path.dirname(fileURLToPath(import.meta.url)), "..");
|
|
||||||
const dist = path.join(root, "dist");
|
|
||||||
|
|
||||||
const opts = {
|
|
||||||
bundle: true,
|
|
||||||
platform: "node",
|
|
||||||
format: "esm",
|
|
||||||
packages: "external",
|
|
||||||
};
|
|
||||||
|
|
||||||
function listDirs(dir) {
|
|
||||||
if (!fs.existsSync(dir)) return [];
|
|
||||||
return fs
|
|
||||||
.readdirSync(dir)
|
|
||||||
.filter((name) => !name.startsWith(".") && !name.startsWith("_"))
|
|
||||||
.map((name) => ({ name, full: path.join(dir, name) }))
|
|
||||||
.filter(({ full }) => fs.statSync(full).isDirectory());
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
// Clean dist/
|
|
||||||
fs.rmSync(dist, { recursive: true, force: true });
|
|
||||||
|
|
||||||
for (const { name, full } of listDirs(path.join(root, "senses"))) {
|
|
||||||
const entry = path.join(full, "src", "index.ts");
|
|
||||||
if (!fs.existsSync(entry)) continue;
|
|
||||||
const outfile = path.join(dist, "senses", name, "index.js");
|
|
||||||
fs.mkdirSync(path.dirname(outfile), { recursive: true });
|
|
||||||
await esbuild.build({ ...opts, entryPoints: [entry], outfile });
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const { name, full } of listDirs(path.join(root, "workflows"))) {
|
|
||||||
const entry = path.join(full, "index.ts");
|
|
||||||
if (!fs.existsSync(entry)) continue;
|
|
||||||
const outfile = path.join(dist, "workflows", name, "index.js");
|
|
||||||
fs.mkdirSync(path.dirname(outfile), { recursive: true });
|
|
||||||
await esbuild.build({ ...opts, entryPoints: [entry], outfile });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await main();
|
|
||||||
122
senses/git-workspace-status/index.js
Normal file
122
senses/git-workspace-status/index.js
Normal file
@ -0,0 +1,122 @@
|
|||||||
|
// src/index.ts
|
||||||
|
import { execFileSync } from "node:child_process";
|
||||||
|
import { resolve } from "node:path";
|
||||||
|
|
||||||
|
// src/schema.ts
|
||||||
|
import { integer, sqliteTable, text } from "drizzle-orm/sqlite-core";
|
||||||
|
var snapshots = sqliteTable("snapshots", {
|
||||||
|
ts: integer("ts").primaryKey(),
|
||||||
|
branch: text("branch").notNull(),
|
||||||
|
headShort: text("head_short").notNull(),
|
||||||
|
porcelainLines: integer("porcelain_lines").notNull(),
|
||||||
|
hasUpstream: integer("has_upstream").notNull(),
|
||||||
|
aheadCount: integer("ahead_count").notNull(),
|
||||||
|
behindCount: integer("behind_count").notNull(),
|
||||||
|
/** Empty string when the snapshot succeeded; otherwise a short error summary. */
|
||||||
|
gitError: text("git_error").notNull()
|
||||||
|
});
|
||||||
|
|
||||||
|
// src/index.ts
|
||||||
|
var GIT_TIMEOUT_MS = 15e3;
|
||||||
|
function workspaceRoot() {
|
||||||
|
const raw = process.env.GIT_WORKSPACE_ROOT;
|
||||||
|
return raw ? resolve(raw) : resolve(process.cwd());
|
||||||
|
}
|
||||||
|
function gitErrorMessage(err) {
|
||||||
|
if (err instanceof Error) {
|
||||||
|
const m = err.message.trim();
|
||||||
|
return m.length > 200 ? `${m.slice(0, 197)}...` : m;
|
||||||
|
}
|
||||||
|
return String(err);
|
||||||
|
}
|
||||||
|
function runGit(cwd, args) {
|
||||||
|
return execFileSync("git", args, {
|
||||||
|
cwd,
|
||||||
|
encoding: "utf8",
|
||||||
|
timeout: GIT_TIMEOUT_MS,
|
||||||
|
maxBuffer: 2 * 1024 * 1024
|
||||||
|
}).trimEnd();
|
||||||
|
}
|
||||||
|
function countPorcelainLines(output) {
|
||||||
|
if (!output) return 0;
|
||||||
|
return output.split("\n").filter((line) => line.length > 0).length;
|
||||||
|
}
|
||||||
|
async function compute(db, _peers) {
|
||||||
|
const root = workspaceRoot();
|
||||||
|
const ts = Date.now();
|
||||||
|
let branch = "";
|
||||||
|
let headShort = "";
|
||||||
|
let porcelainLines = 0;
|
||||||
|
let hasUpstream = 0;
|
||||||
|
let aheadCount = 0;
|
||||||
|
let behindCount = 0;
|
||||||
|
let gitError = "";
|
||||||
|
try {
|
||||||
|
const inside = runGit(root, ["rev-parse", "--is-inside-work-tree"]).trim();
|
||||||
|
if (inside !== "true") {
|
||||||
|
gitError = "not a git work tree";
|
||||||
|
await db.insert(snapshots).values({
|
||||||
|
ts,
|
||||||
|
branch,
|
||||||
|
headShort,
|
||||||
|
porcelainLines,
|
||||||
|
hasUpstream,
|
||||||
|
aheadCount,
|
||||||
|
behindCount,
|
||||||
|
gitError
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
workspaceRoot: root,
|
||||||
|
branch,
|
||||||
|
headShort,
|
||||||
|
porcelainLines,
|
||||||
|
hasUpstream: false,
|
||||||
|
aheadCount,
|
||||||
|
behindCount,
|
||||||
|
gitError
|
||||||
|
};
|
||||||
|
}
|
||||||
|
branch = runGit(root, ["rev-parse", "--abbrev-ref", "HEAD"]);
|
||||||
|
headShort = runGit(root, ["rev-parse", "--short", "HEAD"]);
|
||||||
|
porcelainLines = countPorcelainLines(runGit(root, ["status", "--porcelain"]));
|
||||||
|
try {
|
||||||
|
runGit(root, ["rev-parse", "--abbrev-ref", "@{upstream}"]);
|
||||||
|
hasUpstream = 1;
|
||||||
|
const lb = runGit(root, ["rev-list", "--left-right", "--count", "HEAD...@{upstream}"]);
|
||||||
|
const parts = lb.split(/[\t\s]+/).filter(Boolean);
|
||||||
|
if (parts.length >= 2) {
|
||||||
|
aheadCount = Number.parseInt(parts[0], 10) || 0;
|
||||||
|
behindCount = Number.parseInt(parts[1], 10) || 0;
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
hasUpstream = 0;
|
||||||
|
aheadCount = 0;
|
||||||
|
behindCount = 0;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
gitError = gitErrorMessage(e);
|
||||||
|
}
|
||||||
|
await db.insert(snapshots).values({
|
||||||
|
ts,
|
||||||
|
branch,
|
||||||
|
headShort,
|
||||||
|
porcelainLines,
|
||||||
|
hasUpstream,
|
||||||
|
aheadCount,
|
||||||
|
behindCount,
|
||||||
|
gitError
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
workspaceRoot: root,
|
||||||
|
branch,
|
||||||
|
headShort,
|
||||||
|
porcelainLines,
|
||||||
|
hasUpstream: hasUpstream === 1,
|
||||||
|
aheadCount,
|
||||||
|
behindCount,
|
||||||
|
gitError: gitError || void 0
|
||||||
|
};
|
||||||
|
}
|
||||||
|
export {
|
||||||
|
compute
|
||||||
|
};
|
||||||
13
senses/git-workspace-status/migrations/0001_init.sql
Normal file
13
senses/git-workspace-status/migrations/0001_init.sql
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
-- Migration: 0001_init
|
||||||
|
-- Creates the snapshots table for git-workspace-status sense.
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS snapshots (
|
||||||
|
ts INTEGER PRIMARY KEY,
|
||||||
|
branch TEXT NOT NULL,
|
||||||
|
head_short TEXT NOT NULL,
|
||||||
|
porcelain_lines INTEGER NOT NULL,
|
||||||
|
has_upstream INTEGER NOT NULL,
|
||||||
|
ahead_count INTEGER NOT NULL,
|
||||||
|
behind_count INTEGER NOT NULL,
|
||||||
|
git_error TEXT NOT NULL
|
||||||
|
);
|
||||||
14
senses/git-workspace-status/package.json
Normal file
14
senses/git-workspace-status/package.json
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"name": "sense-git-workspace-status",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"private": true,
|
||||||
|
"type": "module",
|
||||||
|
"scripts": {
|
||||||
|
"build": "esbuild src/index.ts --bundle --platform=node --format=esm --outfile=index.js --packages=external"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/node": "^22.0.0",
|
||||||
|
"esbuild": "^0.27.0",
|
||||||
|
"typescript": "^5.7.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
116
senses/git-workspace-status/src/index.ts
Normal file
116
senses/git-workspace-status/src/index.ts
Normal file
@ -0,0 +1,116 @@
|
|||||||
|
import { execFileSync } from "node:child_process";
|
||||||
|
import { resolve } from "node:path";
|
||||||
|
import type { LibSQLDatabase } from "drizzle-orm/libsql";
|
||||||
|
import { snapshots } from "./schema.ts";
|
||||||
|
|
||||||
|
const GIT_TIMEOUT_MS = 15_000;
|
||||||
|
|
||||||
|
function workspaceRoot(): string {
|
||||||
|
const raw = process.env.GIT_WORKSPACE_ROOT;
|
||||||
|
return raw ? resolve(raw) : resolve(process.cwd());
|
||||||
|
}
|
||||||
|
|
||||||
|
function gitErrorMessage(err: unknown): string {
|
||||||
|
if (err instanceof Error) {
|
||||||
|
const m = err.message.trim();
|
||||||
|
return m.length > 200 ? `${m.slice(0, 197)}...` : m;
|
||||||
|
}
|
||||||
|
return String(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
function runGit(cwd: string, args: string[]): string {
|
||||||
|
return execFileSync("git", args, {
|
||||||
|
cwd,
|
||||||
|
encoding: "utf8",
|
||||||
|
timeout: GIT_TIMEOUT_MS,
|
||||||
|
maxBuffer: 2 * 1024 * 1024,
|
||||||
|
}).trimEnd();
|
||||||
|
}
|
||||||
|
|
||||||
|
function countPorcelainLines(output: string): number {
|
||||||
|
if (!output) return 0;
|
||||||
|
return output.split("\n").filter((line) => line.length > 0).length;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function compute(db: LibSQLDatabase, _peers: unknown) {
|
||||||
|
const root = workspaceRoot();
|
||||||
|
const ts = Date.now();
|
||||||
|
|
||||||
|
let branch = "";
|
||||||
|
let headShort = "";
|
||||||
|
let porcelainLines = 0;
|
||||||
|
let hasUpstream = 0;
|
||||||
|
let aheadCount = 0;
|
||||||
|
let behindCount = 0;
|
||||||
|
let gitError = "";
|
||||||
|
|
||||||
|
try {
|
||||||
|
const inside = runGit(root, ["rev-parse", "--is-inside-work-tree"]).trim();
|
||||||
|
if (inside !== "true") {
|
||||||
|
gitError = "not a git work tree";
|
||||||
|
await db.insert(snapshots).values({
|
||||||
|
ts,
|
||||||
|
branch,
|
||||||
|
headShort,
|
||||||
|
porcelainLines,
|
||||||
|
hasUpstream,
|
||||||
|
aheadCount,
|
||||||
|
behindCount,
|
||||||
|
gitError,
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
workspaceRoot: root,
|
||||||
|
branch,
|
||||||
|
headShort,
|
||||||
|
porcelainLines,
|
||||||
|
hasUpstream: false,
|
||||||
|
aheadCount,
|
||||||
|
behindCount,
|
||||||
|
gitError,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
branch = runGit(root, ["rev-parse", "--abbrev-ref", "HEAD"]);
|
||||||
|
headShort = runGit(root, ["rev-parse", "--short", "HEAD"]);
|
||||||
|
porcelainLines = countPorcelainLines(runGit(root, ["status", "--porcelain"]));
|
||||||
|
|
||||||
|
try {
|
||||||
|
runGit(root, ["rev-parse", "--abbrev-ref", "@{upstream}"]);
|
||||||
|
hasUpstream = 1;
|
||||||
|
const lb = runGit(root, ["rev-list", "--left-right", "--count", "HEAD...@{upstream}"]);
|
||||||
|
const parts = lb.split(/[\t\s]+/).filter(Boolean);
|
||||||
|
if (parts.length >= 2) {
|
||||||
|
aheadCount = Number.parseInt(parts[0], 10) || 0;
|
||||||
|
behindCount = Number.parseInt(parts[1], 10) || 0;
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
hasUpstream = 0;
|
||||||
|
aheadCount = 0;
|
||||||
|
behindCount = 0;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
gitError = gitErrorMessage(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
await db.insert(snapshots).values({
|
||||||
|
ts,
|
||||||
|
branch,
|
||||||
|
headShort,
|
||||||
|
porcelainLines,
|
||||||
|
hasUpstream,
|
||||||
|
aheadCount,
|
||||||
|
behindCount,
|
||||||
|
gitError,
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
workspaceRoot: root,
|
||||||
|
branch,
|
||||||
|
headShort,
|
||||||
|
porcelainLines,
|
||||||
|
hasUpstream: hasUpstream === 1,
|
||||||
|
aheadCount,
|
||||||
|
behindCount,
|
||||||
|
gitError: gitError || undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
13
senses/git-workspace-status/src/schema.ts
Normal file
13
senses/git-workspace-status/src/schema.ts
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
import { integer, sqliteTable, text } from "drizzle-orm/sqlite-core";
|
||||||
|
|
||||||
|
export const snapshots = sqliteTable("snapshots", {
|
||||||
|
ts: integer("ts").primaryKey(),
|
||||||
|
branch: text("branch").notNull(),
|
||||||
|
headShort: text("head_short").notNull(),
|
||||||
|
porcelainLines: integer("porcelain_lines").notNull(),
|
||||||
|
hasUpstream: integer("has_upstream").notNull(),
|
||||||
|
aheadCount: integer("ahead_count").notNull(),
|
||||||
|
behindCount: integer("behind_count").notNull(),
|
||||||
|
/** Empty string when the snapshot succeeded; otherwise a short error summary. */
|
||||||
|
gitError: text("git_error").notNull(),
|
||||||
|
});
|
||||||
374
senses/hermes-gateway-health/index.js
Normal file
374
senses/hermes-gateway-health/index.js
Normal file
@ -0,0 +1,374 @@
|
|||||||
|
// src/index.ts
|
||||||
|
import { execFile } from "node:child_process";
|
||||||
|
|
||||||
|
// src/schema.ts
|
||||||
|
import { integer, real, sqliteTable, text } from "drizzle-orm/sqlite-core";
|
||||||
|
var hermesGatewayHealth = sqliteTable("hermes_gateway_health", {
|
||||||
|
id: integer("id").primaryKey({ autoIncrement: true }),
|
||||||
|
ts: integer("ts").notNull(),
|
||||||
|
alive: integer("alive").notNull(),
|
||||||
|
mainPid: integer("main_pid").notNull(),
|
||||||
|
rssBytes: integer("rss_bytes").notNull(),
|
||||||
|
cpuPercent: real("cpu_percent").notNull(),
|
||||||
|
uptimeSec: integer("uptime_sec").notNull(),
|
||||||
|
activeSessions: integer("active_sessions").notNull(),
|
||||||
|
childProcessCount: integer("child_process_count").notNull(),
|
||||||
|
httpOk: integer("http_ok").notNull(),
|
||||||
|
httpStatusCode: integer("http_status_code").notNull(),
|
||||||
|
httpLatencyMs: integer("http_latency_ms").notNull(),
|
||||||
|
httpError: text("http_error").notNull()
|
||||||
|
});
|
||||||
|
|
||||||
|
// src/index.ts
|
||||||
|
var EXEC_TIMEOUT_MS = 25e3;
|
||||||
|
var HTTP_TIMEOUT_MS = Math.min(23e3, EXEC_TIMEOUT_MS - 2e3);
|
||||||
|
var HTTP_ERROR_MAX_LEN = 256;
|
||||||
|
function gatewayProbeUrl() {
|
||||||
|
const u = process.env.HERMES_GATEWAY_HEALTH_URL ?? process.env.NERVE_HERMES_GATEWAY_URL ?? "";
|
||||||
|
return String(u).trim();
|
||||||
|
}
|
||||||
|
function truncateHttpError(err) {
|
||||||
|
const raw = err && typeof err === "object" && "code" in err && err.code ? String(err.code) : String(err?.message ?? err ?? "error");
|
||||||
|
const s = raw.trim() || "error";
|
||||||
|
return s.length > HTTP_ERROR_MAX_LEN ? s.slice(0, HTTP_ERROR_MAX_LEN) : s;
|
||||||
|
}
|
||||||
|
async function probeGatewayHttp(url) {
|
||||||
|
if (!url) {
|
||||||
|
return {
|
||||||
|
httpOk: 0,
|
||||||
|
httpStatusCode: 0,
|
||||||
|
httpLatencyMs: 0,
|
||||||
|
httpError: "missing_url"
|
||||||
|
};
|
||||||
|
}
|
||||||
|
const t0 = Date.now();
|
||||||
|
try {
|
||||||
|
const signal = AbortSignal.timeout(HTTP_TIMEOUT_MS);
|
||||||
|
const res = await fetch(url, {
|
||||||
|
method: "GET",
|
||||||
|
signal,
|
||||||
|
redirect: "follow"
|
||||||
|
});
|
||||||
|
const httpLatencyMs = Date.now() - t0;
|
||||||
|
const code = res.status;
|
||||||
|
const ok = code >= 200 && code < 400;
|
||||||
|
return {
|
||||||
|
httpOk: ok ? 1 : 0,
|
||||||
|
httpStatusCode: code,
|
||||||
|
httpLatencyMs,
|
||||||
|
httpError: ok ? "" : truncateHttpError({ message: `HTTP ${code}` })
|
||||||
|
};
|
||||||
|
} catch (err) {
|
||||||
|
const httpLatencyMs = Date.now() - t0;
|
||||||
|
return {
|
||||||
|
httpOk: 0,
|
||||||
|
httpStatusCode: 0,
|
||||||
|
httpLatencyMs,
|
||||||
|
httpError: truncateHttpError(err)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function etimeToSeconds(etime) {
|
||||||
|
let s = String(etime).trim();
|
||||||
|
if (!s) return 0;
|
||||||
|
let days = 0;
|
||||||
|
if (s.includes("-")) {
|
||||||
|
const idx = s.indexOf("-");
|
||||||
|
const d = Number.parseInt(s.slice(0, idx), 10);
|
||||||
|
days = Number.isFinite(d) ? d : 0;
|
||||||
|
s = s.slice(idx + 1);
|
||||||
|
}
|
||||||
|
const parts = s.split(":").map((x) => Number.parseInt(String(x).trim(), 10));
|
||||||
|
if (parts.some((n) => !Number.isFinite(n))) return 0;
|
||||||
|
if (parts.length === 3) {
|
||||||
|
return Math.trunc(days * 86400 + parts[0] * 3600 + parts[1] * 60 + parts[2]);
|
||||||
|
}
|
||||||
|
if (parts.length === 2) {
|
||||||
|
return Math.trunc(days * 86400 + parts[0] * 60 + parts[1]);
|
||||||
|
}
|
||||||
|
if (parts.length === 1) {
|
||||||
|
return Math.trunc(days * 86400 + parts[0]);
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
function execFileUtf8(file, args, opts = {}) {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
execFile(
|
||||||
|
file,
|
||||||
|
args,
|
||||||
|
{
|
||||||
|
encoding: "utf8",
|
||||||
|
maxBuffer: 8 * 1024 * 1024,
|
||||||
|
timeout: EXEC_TIMEOUT_MS,
|
||||||
|
...opts
|
||||||
|
},
|
||||||
|
(err, stdout, stderr) => {
|
||||||
|
const exitCode = err && typeof err.status === "number" ? err.status : err ? -1 : 0;
|
||||||
|
resolve({
|
||||||
|
exitCode,
|
||||||
|
errCode: err?.code,
|
||||||
|
stdout: String(stdout ?? ""),
|
||||||
|
stderr: String(stderr ?? "")
|
||||||
|
});
|
||||||
|
}
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function parseMainPidFromStatus(text2) {
|
||||||
|
const m = text2.match(/Main PID:\s*(\d+)/i);
|
||||||
|
return m ? Math.trunc(Number.parseInt(m[1], 10)) || 0 : 0;
|
||||||
|
}
|
||||||
|
function parseActiveLineFromStatus(text2) {
|
||||||
|
for (const line of text2.split("\n")) {
|
||||||
|
if (/^\s*Active:/i.test(line)) {
|
||||||
|
const m = line.match(/Active:\s*(\S+)\s*\(([^)]*)\)/i);
|
||||||
|
if (m) {
|
||||||
|
return {
|
||||||
|
active: m[1].toLowerCase() === "active",
|
||||||
|
subRunning: m[2].toLowerCase().includes("running")
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return { active: false, subRunning: false };
|
||||||
|
}
|
||||||
|
function parseSystemctlShow(text2) {
|
||||||
|
let mainPid = 0;
|
||||||
|
let active = false;
|
||||||
|
let subRunning = false;
|
||||||
|
for (const line of text2.split("\n")) {
|
||||||
|
const t = line.trim();
|
||||||
|
if (t.startsWith("MainPID=")) {
|
||||||
|
mainPid = Math.trunc(Number.parseInt(t.slice("MainPID=".length), 10)) || 0;
|
||||||
|
} else if (t.startsWith("ActiveState=")) {
|
||||||
|
active = t.slice("ActiveState=".length).trim().toLowerCase() === "active";
|
||||||
|
} else if (t.startsWith("SubState=")) {
|
||||||
|
subRunning = t.slice("SubState=".length).trim().toLowerCase() === "running";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return { mainPid, active, subRunning };
|
||||||
|
}
|
||||||
|
async function readSystemdState() {
|
||||||
|
const status = await execFileUtf8("systemctl", [
|
||||||
|
"--user",
|
||||||
|
"--no-pager",
|
||||||
|
"status",
|
||||||
|
"hermes-gateway"
|
||||||
|
]);
|
||||||
|
const combined = `${status.stdout}
|
||||||
|
${status.stderr}`.trim();
|
||||||
|
let mainPid = parseMainPidFromStatus(combined);
|
||||||
|
let { active, subRunning } = parseActiveLineFromStatus(combined);
|
||||||
|
const needShow = mainPid <= 0 || !active || !subRunning;
|
||||||
|
if (needShow) {
|
||||||
|
const show = await execFileUtf8("systemctl", [
|
||||||
|
"--user",
|
||||||
|
"--no-pager",
|
||||||
|
"show",
|
||||||
|
"hermes-gateway",
|
||||||
|
"-p",
|
||||||
|
"MainPID",
|
||||||
|
"-p",
|
||||||
|
"ActiveState",
|
||||||
|
"-p",
|
||||||
|
"SubState"
|
||||||
|
]);
|
||||||
|
const showText = `${show.stdout}
|
||||||
|
${show.stderr}`;
|
||||||
|
const s = parseSystemctlShow(showText);
|
||||||
|
if (mainPid <= 0 && s.mainPid > 0) mainPid = s.mainPid;
|
||||||
|
if (!active) active = s.active;
|
||||||
|
if (!subRunning) subRunning = s.subRunning;
|
||||||
|
}
|
||||||
|
return { mainPid, systemdActiveRunning: active && subRunning };
|
||||||
|
}
|
||||||
|
async function processExists(mainPid) {
|
||||||
|
if (mainPid <= 0) return false;
|
||||||
|
const r = await execFileUtf8("ps", ["-p", String(mainPid), "-o", "pid="]);
|
||||||
|
if (r.errCode === "ENOENT") return false;
|
||||||
|
return r.stdout.trim().length > 0;
|
||||||
|
}
|
||||||
|
async function readPsMetrics(mainPid) {
|
||||||
|
if (mainPid <= 0) {
|
||||||
|
return { rssBytes: 0, cpuPercent: 0, uptimeSec: 0 };
|
||||||
|
}
|
||||||
|
let r = await execFileUtf8("ps", [
|
||||||
|
"-p",
|
||||||
|
String(mainPid),
|
||||||
|
"-o",
|
||||||
|
"rss=,%cpu=,etimes="
|
||||||
|
]);
|
||||||
|
let line = r.stdout.trim().replace(/\s+/g, " ");
|
||||||
|
if (r.errCode === "ENOENT" || !line) {
|
||||||
|
return { rssBytes: 0, cpuPercent: 0, uptimeSec: 0 };
|
||||||
|
}
|
||||||
|
let parts = line.split(" ").filter(Boolean);
|
||||||
|
if (parts.length < 3) {
|
||||||
|
r = await execFileUtf8("ps", [
|
||||||
|
"-p",
|
||||||
|
String(mainPid),
|
||||||
|
"-o",
|
||||||
|
"rss=,%cpu=,etime="
|
||||||
|
]);
|
||||||
|
line = r.stdout.trim().replace(/\s+/g, " ");
|
||||||
|
parts = line.split(" ").filter(Boolean);
|
||||||
|
if (parts.length < 3) {
|
||||||
|
return { rssBytes: 0, cpuPercent: 0, uptimeSec: 0 };
|
||||||
|
}
|
||||||
|
const rssKiB2 = Number(parts[0]);
|
||||||
|
const cpu2 = Number(parts[1]);
|
||||||
|
const uptimeSec2 = etimeToSeconds(parts.slice(2).join(" "));
|
||||||
|
const rssBytes2 = Number.isFinite(rssKiB2) ? Math.trunc(rssKiB2 * 1024) : 0;
|
||||||
|
const cpuPercent2 = Number.isFinite(cpu2) ? Math.round(cpu2 * 100) / 100 : 0;
|
||||||
|
return { rssBytes: rssBytes2, cpuPercent: cpuPercent2, uptimeSec: uptimeSec2 };
|
||||||
|
}
|
||||||
|
const rssKiB = Number(parts[0]);
|
||||||
|
const cpu = Number(parts[1]);
|
||||||
|
const etimes = Number(parts[2]);
|
||||||
|
const rssBytes = Number.isFinite(rssKiB) ? Math.trunc(rssKiB * 1024) : 0;
|
||||||
|
const cpuPercent = Number.isFinite(cpu) ? Math.round(cpu * 100) / 100 : 0;
|
||||||
|
const uptimeSec = Number.isFinite(etimes) ? Math.trunc(etimes) : 0;
|
||||||
|
return { rssBytes, cpuPercent, uptimeSec };
|
||||||
|
}
|
||||||
|
function parseActiveSessionsFromHermesStats(text2) {
|
||||||
|
const src = String(text2);
|
||||||
|
const patterns = [
|
||||||
|
/^\s*Active\s+sessions?:\s*(\d+)/gim,
|
||||||
|
/^\s*active\s+sessions?:\s*(\d+)/gim,
|
||||||
|
/^\s*Total\s+sessions?:\s*(\d+)/gim
|
||||||
|
];
|
||||||
|
for (const re of patterns) {
|
||||||
|
re.lastIndex = 0;
|
||||||
|
const m = re.exec(src);
|
||||||
|
if (m) {
|
||||||
|
const n = Math.trunc(Number.parseInt(m[1], 10));
|
||||||
|
return Number.isFinite(n) ? n : 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
async function readActiveSessions() {
|
||||||
|
try {
|
||||||
|
const r = await execFileUtf8("hermes", ["sessions", "stats"]);
|
||||||
|
if (r.errCode === "ENOENT") return 0;
|
||||||
|
return parseActiveSessionsFromHermesStats(`${r.stdout}
|
||||||
|
${r.stderr}`);
|
||||||
|
} catch {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async function countDirectChildren(mainPid) {
|
||||||
|
if (mainPid <= 0) return 0;
|
||||||
|
try {
|
||||||
|
const r = await execFileUtf8("ps", [
|
||||||
|
"--no-headers",
|
||||||
|
"-o",
|
||||||
|
"pid",
|
||||||
|
"--ppid",
|
||||||
|
String(mainPid)
|
||||||
|
]);
|
||||||
|
if (r.errCode === "ENOENT") return 0;
|
||||||
|
const lines = r.stdout.split("\n").map((l) => l.trim()).filter(Boolean);
|
||||||
|
return lines.length;
|
||||||
|
} catch {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async function compute(db, _peers) {
|
||||||
|
const ts = Date.now();
|
||||||
|
let mainPid = 0;
|
||||||
|
let systemdActiveRunning = false;
|
||||||
|
try {
|
||||||
|
const st = await readSystemdState();
|
||||||
|
mainPid = st.mainPid;
|
||||||
|
systemdActiveRunning = st.systemdActiveRunning;
|
||||||
|
} catch {
|
||||||
|
mainPid = 0;
|
||||||
|
systemdActiveRunning = false;
|
||||||
|
}
|
||||||
|
let psOk = false;
|
||||||
|
try {
|
||||||
|
psOk = await processExists(mainPid);
|
||||||
|
} catch {
|
||||||
|
psOk = false;
|
||||||
|
}
|
||||||
|
let rssBytes = 0;
|
||||||
|
let cpuPercent = 0;
|
||||||
|
let uptimeSec = 0;
|
||||||
|
if (psOk) {
|
||||||
|
try {
|
||||||
|
const m = await readPsMetrics(mainPid);
|
||||||
|
rssBytes = m.rssBytes;
|
||||||
|
cpuPercent = m.cpuPercent;
|
||||||
|
uptimeSec = m.uptimeSec;
|
||||||
|
} catch {
|
||||||
|
rssBytes = 0;
|
||||||
|
cpuPercent = 0;
|
||||||
|
uptimeSec = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const alive = systemdActiveRunning && mainPid > 0 && psOk ? 1 : 0;
|
||||||
|
let activeSessions = 0;
|
||||||
|
try {
|
||||||
|
activeSessions = await readActiveSessions();
|
||||||
|
} catch {
|
||||||
|
activeSessions = 0;
|
||||||
|
}
|
||||||
|
let childProcessCount = 0;
|
||||||
|
if (alive && mainPid > 0) {
|
||||||
|
try {
|
||||||
|
childProcessCount = await countDirectChildren(mainPid);
|
||||||
|
} catch {
|
||||||
|
childProcessCount = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let httpOk = 0;
|
||||||
|
let httpStatusCode = 0;
|
||||||
|
let httpLatencyMs = 0;
|
||||||
|
let httpError = "";
|
||||||
|
try {
|
||||||
|
const h = await probeGatewayHttp(gatewayProbeUrl());
|
||||||
|
httpOk = h.httpOk;
|
||||||
|
httpStatusCode = h.httpStatusCode;
|
||||||
|
httpLatencyMs = h.httpLatencyMs;
|
||||||
|
httpError = h.httpError;
|
||||||
|
} catch {
|
||||||
|
httpOk = 0;
|
||||||
|
httpStatusCode = 0;
|
||||||
|
httpLatencyMs = 0;
|
||||||
|
httpError = "probe_failed";
|
||||||
|
}
|
||||||
|
const storedMainPid = mainPid > 0 ? mainPid : 0;
|
||||||
|
const row = {
|
||||||
|
ts,
|
||||||
|
alive,
|
||||||
|
mainPid: storedMainPid,
|
||||||
|
rssBytes: alive ? rssBytes : 0,
|
||||||
|
cpuPercent: alive ? cpuPercent : 0,
|
||||||
|
uptimeSec: alive ? uptimeSec : 0,
|
||||||
|
activeSessions,
|
||||||
|
childProcessCount: alive ? childProcessCount : 0,
|
||||||
|
httpOk,
|
||||||
|
httpStatusCode,
|
||||||
|
httpLatencyMs,
|
||||||
|
httpError
|
||||||
|
};
|
||||||
|
await db.insert(hermesGatewayHealth).values(row);
|
||||||
|
return {
|
||||||
|
ts: row.ts,
|
||||||
|
alive: row.alive,
|
||||||
|
mainPid: row.mainPid,
|
||||||
|
rssBytes: row.rssBytes,
|
||||||
|
cpuPercent: row.cpuPercent,
|
||||||
|
uptimeSec: row.uptimeSec,
|
||||||
|
activeSessions: row.activeSessions,
|
||||||
|
childProcessCount: row.childProcessCount,
|
||||||
|
httpOk: row.httpOk,
|
||||||
|
httpStatusCode: row.httpStatusCode,
|
||||||
|
httpLatencyMs: row.httpLatencyMs,
|
||||||
|
httpError: row.httpError
|
||||||
|
};
|
||||||
|
}
|
||||||
|
export {
|
||||||
|
compute
|
||||||
|
};
|
||||||
14
senses/hermes-gateway-health/migrations/0001_init.sql
Normal file
14
senses/hermes-gateway-health/migrations/0001_init.sql
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
-- Migration: 0001_init
|
||||||
|
-- Creates the hermes_gateway_health table for hermes-gateway-health sense.
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS hermes_gateway_health (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
ts INTEGER NOT NULL,
|
||||||
|
alive INTEGER NOT NULL,
|
||||||
|
main_pid INTEGER NOT NULL,
|
||||||
|
rss_bytes INTEGER NOT NULL,
|
||||||
|
cpu_percent REAL NOT NULL,
|
||||||
|
uptime_sec INTEGER NOT NULL,
|
||||||
|
active_sessions INTEGER NOT NULL,
|
||||||
|
child_process_count INTEGER NOT NULL
|
||||||
|
);
|
||||||
@ -0,0 +1,7 @@
|
|||||||
|
-- Migration: 0002_add_http_probe
|
||||||
|
-- HTTP reachability columns for hermes-gateway-health sense.
|
||||||
|
|
||||||
|
ALTER TABLE hermes_gateway_health ADD COLUMN http_ok INTEGER NOT NULL DEFAULT 0;
|
||||||
|
ALTER TABLE hermes_gateway_health ADD COLUMN http_status_code INTEGER NOT NULL DEFAULT 0;
|
||||||
|
ALTER TABLE hermes_gateway_health ADD COLUMN http_latency_ms INTEGER NOT NULL DEFAULT 0;
|
||||||
|
ALTER TABLE hermes_gateway_health ADD COLUMN http_error TEXT NOT NULL DEFAULT '';
|
||||||
14
senses/hermes-gateway-health/package.json
Normal file
14
senses/hermes-gateway-health/package.json
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"name": "sense-hermes-gateway-health",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"private": true,
|
||||||
|
"type": "module",
|
||||||
|
"scripts": {
|
||||||
|
"build": "esbuild src/index.ts --bundle --platform=node --format=esm --outfile=index.js --packages=external"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/node": "^22.0.0",
|
||||||
|
"esbuild": "^0.27.0",
|
||||||
|
"typescript": "^5.7.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,4 +1,7 @@
|
|||||||
import { execFile } from "node:child_process";
|
import { execFile } from "node:child_process";
|
||||||
|
import type { LibSQLDatabase } from "drizzle-orm/libsql";
|
||||||
|
import { hermesGatewayHealth } from "./schema.ts";
|
||||||
|
|
||||||
/** Keep subprocess deadlines slightly under typical sense timeout (30s). */
|
/** Keep subprocess deadlines slightly under typical sense timeout (30s). */
|
||||||
const EXEC_TIMEOUT_MS = 25_000;
|
const EXEC_TIMEOUT_MS = 25_000;
|
||||||
|
|
||||||
@ -7,22 +10,6 @@ const HTTP_TIMEOUT_MS = Math.min(23_000, EXEC_TIMEOUT_MS - 2000);
|
|||||||
|
|
||||||
const HTTP_ERROR_MAX_LEN = 256;
|
const HTTP_ERROR_MAX_LEN = 256;
|
||||||
|
|
||||||
/** How many consecutive failures before triggering a restart. */
|
|
||||||
const FAILURE_THRESHOLD = 3;
|
|
||||||
|
|
||||||
type SenseState = {
|
|
||||||
consecutiveFailures: number;
|
|
||||||
lastRestartTs: number;
|
|
||||||
/** Minimum ms between restart attempts to avoid restart loops. */
|
|
||||||
restartCooldownMs: number;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const initialState: SenseState = {
|
|
||||||
consecutiveFailures: 0,
|
|
||||||
lastRestartTs: 0,
|
|
||||||
restartCooldownMs: 300_000, // 5 minutes
|
|
||||||
};
|
|
||||||
|
|
||||||
function gatewayProbeUrl(): string {
|
function gatewayProbeUrl(): string {
|
||||||
const u =
|
const u =
|
||||||
process.env.HERMES_GATEWAY_HEALTH_URL ??
|
process.env.HERMES_GATEWAY_HEALTH_URL ??
|
||||||
@ -40,13 +27,17 @@ function truncateHttpError(err: unknown): string {
|
|||||||
return s.length > HTTP_ERROR_MAX_LEN ? s.slice(0, HTTP_ERROR_MAX_LEN) : s;
|
return s.length > HTTP_ERROR_MAX_LEN ? s.slice(0, HTTP_ERROR_MAX_LEN) : s;
|
||||||
}
|
}
|
||||||
|
|
||||||
type HttpProbeResult = {
|
interface HttpProbeResult {
|
||||||
httpOk: number;
|
httpOk: number;
|
||||||
httpStatusCode: number;
|
httpStatusCode: number;
|
||||||
httpLatencyMs: number;
|
httpLatencyMs: number;
|
||||||
httpError: string;
|
httpError: string;
|
||||||
};
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET the gateway URL; success = HTTP 200–399.
|
||||||
|
* URL must be set via HERMES_GATEWAY_HEALTH_URL or NERVE_HERMES_GATEWAY_URL.
|
||||||
|
*/
|
||||||
async function probeGatewayHttp(url: string): Promise<HttpProbeResult> {
|
async function probeGatewayHttp(url: string): Promise<HttpProbeResult> {
|
||||||
if (!url) {
|
if (!url) {
|
||||||
return {
|
return {
|
||||||
@ -84,6 +75,10 @@ async function probeGatewayHttp(url: string): Promise<HttpProbeResult> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When `ps` lacks `etimes` (wall-clock seconds since start), parse `etime`
|
||||||
|
* ([[dd-]hh:]mm:ss) into seconds. See ps(1) `etime` field description.
|
||||||
|
*/
|
||||||
function etimeToSeconds(etime: string): number {
|
function etimeToSeconds(etime: string): number {
|
||||||
let s = String(etime).trim();
|
let s = String(etime).trim();
|
||||||
if (!s) return 0;
|
if (!s) return 0;
|
||||||
@ -108,12 +103,12 @@ function etimeToSeconds(etime: string): number {
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
type ExecResult = {
|
interface ExecResult {
|
||||||
exitCode: number;
|
exitCode: number;
|
||||||
errCode: string | undefined;
|
errCode: string | undefined;
|
||||||
stdout: string;
|
stdout: string;
|
||||||
stderr: string;
|
stderr: string;
|
||||||
};
|
}
|
||||||
|
|
||||||
function execFileUtf8(file: string, args: string[], opts: Record<string, unknown> = {}): Promise<ExecResult> {
|
function execFileUtf8(file: string, args: string[], opts: Record<string, unknown> = {}): Promise<ExecResult> {
|
||||||
return new Promise((resolve) => {
|
return new Promise((resolve) => {
|
||||||
@ -222,11 +217,11 @@ async function processExists(mainPid: number): Promise<boolean> {
|
|||||||
return r.stdout.trim().length > 0;
|
return r.stdout.trim().length > 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
type PsMetrics = {
|
interface PsMetrics {
|
||||||
rssBytes: number;
|
rssBytes: number;
|
||||||
cpuPercent: number;
|
cpuPercent: number;
|
||||||
uptimeSec: number;
|
uptimeSec: number;
|
||||||
};
|
}
|
||||||
|
|
||||||
async function readPsMetrics(mainPid: number): Promise<PsMetrics> {
|
async function readPsMetrics(mainPid: number): Promise<PsMetrics> {
|
||||||
if (mainPid <= 0) {
|
if (mainPid <= 0) {
|
||||||
@ -271,12 +266,61 @@ async function readPsMetrics(mainPid: number): Promise<PsMetrics> {
|
|||||||
return { rssBytes, cpuPercent, uptimeSec };
|
return { rssBytes, cpuPercent, uptimeSec };
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function compute(prevState: SenseState) {
|
function parseActiveSessionsFromHermesStats(text: string): number {
|
||||||
const now = Date.now();
|
const src = String(text);
|
||||||
|
const patterns = [
|
||||||
|
/^\s*Active\s+sessions?:\s*(\d+)/gim,
|
||||||
|
/^\s*active\s+sessions?:\s*(\d+)/gim,
|
||||||
|
/^\s*Total\s+sessions?:\s*(\d+)/gim,
|
||||||
|
];
|
||||||
|
for (const re of patterns) {
|
||||||
|
re.lastIndex = 0;
|
||||||
|
const m = re.exec(src);
|
||||||
|
if (m) {
|
||||||
|
const n = Math.trunc(Number.parseInt(m[1], 10));
|
||||||
|
return Number.isFinite(n) ? n : 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function readActiveSessions(): Promise<number> {
|
||||||
|
try {
|
||||||
|
const r = await execFileUtf8("hermes", ["sessions", "stats"]);
|
||||||
|
if (r.errCode === "ENOENT") return 0;
|
||||||
|
return parseActiveSessionsFromHermesStats(`${r.stdout}\n${r.stderr}`);
|
||||||
|
} catch {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function countDirectChildren(mainPid: number): Promise<number> {
|
||||||
|
if (mainPid <= 0) return 0;
|
||||||
|
try {
|
||||||
|
const r = await execFileUtf8("ps", [
|
||||||
|
"--no-headers",
|
||||||
|
"-o",
|
||||||
|
"pid",
|
||||||
|
"--ppid",
|
||||||
|
String(mainPid),
|
||||||
|
]);
|
||||||
|
if (r.errCode === "ENOENT") return 0;
|
||||||
|
const lines = r.stdout
|
||||||
|
.split("\n")
|
||||||
|
.map((l) => l.trim())
|
||||||
|
.filter(Boolean);
|
||||||
|
return lines.length;
|
||||||
|
} catch {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function compute(db: LibSQLDatabase, _peers: unknown) {
|
||||||
|
const ts = Date.now();
|
||||||
|
|
||||||
// --- probe gateway ---
|
|
||||||
let mainPid = 0;
|
let mainPid = 0;
|
||||||
let systemdActiveRunning = false;
|
let systemdActiveRunning = false;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const st = await readSystemdState();
|
const st = await readSystemdState();
|
||||||
mainPid = st.mainPid;
|
mainPid = st.mainPid;
|
||||||
@ -311,6 +355,22 @@ export async function compute(prevState: SenseState) {
|
|||||||
|
|
||||||
const alive = systemdActiveRunning && mainPid > 0 && psOk ? 1 : 0;
|
const alive = systemdActiveRunning && mainPid > 0 && psOk ? 1 : 0;
|
||||||
|
|
||||||
|
let activeSessions = 0;
|
||||||
|
try {
|
||||||
|
activeSessions = await readActiveSessions();
|
||||||
|
} catch {
|
||||||
|
activeSessions = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
let childProcessCount = 0;
|
||||||
|
if (alive && mainPid > 0) {
|
||||||
|
try {
|
||||||
|
childProcessCount = await countDirectChildren(mainPid);
|
||||||
|
} catch {
|
||||||
|
childProcessCount = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let httpOk = 0;
|
let httpOk = 0;
|
||||||
let httpStatusCode = 0;
|
let httpStatusCode = 0;
|
||||||
let httpLatencyMs = 0;
|
let httpLatencyMs = 0;
|
||||||
@ -328,42 +388,37 @@ export async function compute(prevState: SenseState) {
|
|||||||
httpError = "probe_failed";
|
httpError = "probe_failed";
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- decide health ---
|
const storedMainPid = mainPid > 0 ? mainPid : 0;
|
||||||
const healthy = alive === 1 && httpOk === 1;
|
|
||||||
|
|
||||||
// --- state machine: track consecutive failures ---
|
const row = {
|
||||||
const consecutiveFailures = healthy ? 0 : prevState.consecutiveFailures + 1;
|
ts,
|
||||||
const lastRestartTs = prevState.lastRestartTs;
|
|
||||||
const cooldown = prevState.restartCooldownMs;
|
|
||||||
const cooldownElapsed = now - lastRestartTs >= cooldown;
|
|
||||||
|
|
||||||
// --- trigger restart? ---
|
|
||||||
const shouldRestart =
|
|
||||||
consecutiveFailures >= FAILURE_THRESHOLD && cooldownElapsed;
|
|
||||||
|
|
||||||
const nextState: SenseState = {
|
|
||||||
consecutiveFailures,
|
|
||||||
lastRestartTs: shouldRestart ? now : lastRestartTs,
|
|
||||||
restartCooldownMs: cooldown,
|
|
||||||
};
|
|
||||||
|
|
||||||
const signal = {
|
|
||||||
ts: now,
|
|
||||||
alive,
|
alive,
|
||||||
mainPid: mainPid > 0 ? mainPid : 0,
|
mainPid: storedMainPid,
|
||||||
rssBytes: alive ? rssBytes : 0,
|
rssBytes: alive ? rssBytes : 0,
|
||||||
cpuPercent: alive ? cpuPercent : 0,
|
cpuPercent: alive ? cpuPercent : 0,
|
||||||
uptimeSec: alive ? uptimeSec : 0,
|
uptimeSec: alive ? uptimeSec : 0,
|
||||||
|
activeSessions,
|
||||||
|
childProcessCount: alive ? childProcessCount : 0,
|
||||||
httpOk,
|
httpOk,
|
||||||
httpStatusCode,
|
httpStatusCode,
|
||||||
httpLatencyMs,
|
httpLatencyMs,
|
||||||
httpError,
|
httpError,
|
||||||
consecutiveFailures,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const trigger = shouldRestart
|
await db.insert(hermesGatewayHealth).values(row);
|
||||||
? { command: "systemctl --user restart hermes-gateway" }
|
|
||||||
: null;
|
|
||||||
|
|
||||||
return { state: nextState, signal, trigger };
|
return {
|
||||||
|
ts: row.ts,
|
||||||
|
alive: row.alive,
|
||||||
|
mainPid: row.mainPid,
|
||||||
|
rssBytes: row.rssBytes,
|
||||||
|
cpuPercent: row.cpuPercent,
|
||||||
|
uptimeSec: row.uptimeSec,
|
||||||
|
activeSessions: row.activeSessions,
|
||||||
|
childProcessCount: row.childProcessCount,
|
||||||
|
httpOk: row.httpOk,
|
||||||
|
httpStatusCode: row.httpStatusCode,
|
||||||
|
httpLatencyMs: row.httpLatencyMs,
|
||||||
|
httpError: row.httpError,
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
17
senses/hermes-gateway-health/src/schema.ts
Normal file
17
senses/hermes-gateway-health/src/schema.ts
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
import { integer, real, sqliteTable, text } from "drizzle-orm/sqlite-core";
|
||||||
|
|
||||||
|
export const hermesGatewayHealth = sqliteTable("hermes_gateway_health", {
|
||||||
|
id: integer("id").primaryKey({ autoIncrement: true }),
|
||||||
|
ts: integer("ts").notNull(),
|
||||||
|
alive: integer("alive").notNull(),
|
||||||
|
mainPid: integer("main_pid").notNull(),
|
||||||
|
rssBytes: integer("rss_bytes").notNull(),
|
||||||
|
cpuPercent: real("cpu_percent").notNull(),
|
||||||
|
uptimeSec: integer("uptime_sec").notNull(),
|
||||||
|
activeSessions: integer("active_sessions").notNull(),
|
||||||
|
childProcessCount: integer("child_process_count").notNull(),
|
||||||
|
httpOk: integer("http_ok").notNull(),
|
||||||
|
httpStatusCode: integer("http_status_code").notNull(),
|
||||||
|
httpLatencyMs: integer("http_latency_ms").notNull(),
|
||||||
|
httpError: text("http_error").notNull(),
|
||||||
|
});
|
||||||
118
senses/hermes-session-message-stats/index.js
Normal file
118
senses/hermes-session-message-stats/index.js
Normal file
@ -0,0 +1,118 @@
|
|||||||
|
// src/index.ts
|
||||||
|
import { createReadStream } from "node:fs";
|
||||||
|
import { readdir } from "node:fs/promises";
|
||||||
|
import { homedir } from "node:os";
|
||||||
|
import { join } from "node:path";
|
||||||
|
import { createInterface } from "node:readline";
|
||||||
|
|
||||||
|
// src/schema.ts
|
||||||
|
import { integer, sqliteTable } from "drizzle-orm/sqlite-core";
|
||||||
|
var hermesSessionMessageStats = sqliteTable("hermes_session_message_stats", {
|
||||||
|
id: integer("id").primaryKey({ autoIncrement: true }),
|
||||||
|
ts: integer("ts").notNull(),
|
||||||
|
totalUserMessages: integer("total_user_messages").notNull(),
|
||||||
|
totalAssistantMessages: integer("total_assistant_messages").notNull(),
|
||||||
|
totalToolMessages: integer("total_tool_messages").notNull(),
|
||||||
|
totalMessages: integer("total_messages").notNull(),
|
||||||
|
activeSessions: integer("active_sessions").notNull(),
|
||||||
|
measurementWindowSeconds: integer("measurement_window_seconds").notNull()
|
||||||
|
});
|
||||||
|
|
||||||
|
// src/index.ts
|
||||||
|
var MEASUREMENT_WINDOW_MS = 9e5;
|
||||||
|
var MEASUREMENT_WINDOW_SECONDS = 900;
|
||||||
|
async function aggregateJsonlFile(filePath, cutoffMs, nowMs) {
|
||||||
|
let user = 0;
|
||||||
|
let assistant = 0;
|
||||||
|
let tool = 0;
|
||||||
|
let fileHadActivity = false;
|
||||||
|
const input = createReadStream(filePath, { encoding: "utf8" });
|
||||||
|
const rl = createInterface({ input, crlfDelay: Infinity });
|
||||||
|
try {
|
||||||
|
for await (const line of rl) {
|
||||||
|
const trimmed = line.trim();
|
||||||
|
if (!trimmed) continue;
|
||||||
|
let obj;
|
||||||
|
try {
|
||||||
|
obj = JSON.parse(trimmed);
|
||||||
|
} catch {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (typeof obj !== "object" || obj === null || typeof obj.role !== "string" || typeof obj.timestamp !== "string") {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const record = obj;
|
||||||
|
const t = Date.parse(record.timestamp);
|
||||||
|
if (!Number.isFinite(t) || t < cutoffMs || t > nowMs) continue;
|
||||||
|
const roleNorm = record.role.trim().toLowerCase();
|
||||||
|
if (roleNorm === "user") {
|
||||||
|
user++;
|
||||||
|
fileHadActivity = true;
|
||||||
|
} else if (roleNorm === "assistant") {
|
||||||
|
assistant++;
|
||||||
|
fileHadActivity = true;
|
||||||
|
} else if (roleNorm === "tool") {
|
||||||
|
tool++;
|
||||||
|
fileHadActivity = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
rl.close();
|
||||||
|
}
|
||||||
|
return { user, assistant, tool, fileHadActivity };
|
||||||
|
}
|
||||||
|
async function compute(db, _peers) {
|
||||||
|
const nowMs = Date.now();
|
||||||
|
const cutoffMs = nowMs - MEASUREMENT_WINDOW_MS;
|
||||||
|
const ts = nowMs;
|
||||||
|
let totalUserMessages = 0;
|
||||||
|
let totalAssistantMessages = 0;
|
||||||
|
let totalToolMessages = 0;
|
||||||
|
let activeSessions = 0;
|
||||||
|
const sessionsDir = join(homedir(), ".hermes", "sessions");
|
||||||
|
let files = [];
|
||||||
|
try {
|
||||||
|
const entries = await readdir(sessionsDir, { withFileTypes: true });
|
||||||
|
files = entries.filter((e) => e.isFile() && e.name.endsWith(".jsonl")).map((e) => join(sessionsDir, e.name));
|
||||||
|
} catch (err) {
|
||||||
|
if (err && typeof err === "object" && "code" in err && err.code === "ENOENT") {
|
||||||
|
files = [];
|
||||||
|
} else {
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (const filePath of files) {
|
||||||
|
const { user, assistant, tool, fileHadActivity } = await aggregateJsonlFile(
|
||||||
|
filePath,
|
||||||
|
cutoffMs,
|
||||||
|
nowMs
|
||||||
|
);
|
||||||
|
totalUserMessages += user;
|
||||||
|
totalAssistantMessages += assistant;
|
||||||
|
totalToolMessages += tool;
|
||||||
|
if (fileHadActivity) activeSessions++;
|
||||||
|
}
|
||||||
|
const totalMessages = totalUserMessages + totalAssistantMessages + totalToolMessages;
|
||||||
|
const row = {
|
||||||
|
ts,
|
||||||
|
totalUserMessages,
|
||||||
|
totalAssistantMessages,
|
||||||
|
totalToolMessages,
|
||||||
|
totalMessages,
|
||||||
|
activeSessions,
|
||||||
|
measurementWindowSeconds: MEASUREMENT_WINDOW_SECONDS
|
||||||
|
};
|
||||||
|
await db.insert(hermesSessionMessageStats).values(row);
|
||||||
|
return {
|
||||||
|
ts: row.ts,
|
||||||
|
totalUserMessages: row.totalUserMessages,
|
||||||
|
totalAssistantMessages: row.totalAssistantMessages,
|
||||||
|
totalToolMessages: row.totalToolMessages,
|
||||||
|
totalMessages: row.totalMessages,
|
||||||
|
activeSessions: row.activeSessions,
|
||||||
|
measurementWindowSeconds: row.measurementWindowSeconds
|
||||||
|
};
|
||||||
|
}
|
||||||
|
export {
|
||||||
|
compute
|
||||||
|
};
|
||||||
13
senses/hermes-session-message-stats/migrations/0001_init.sql
Normal file
13
senses/hermes-session-message-stats/migrations/0001_init.sql
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
-- Migration: 0001_init
|
||||||
|
-- Creates the hermes_session_message_stats table for hermes-session-message-stats sense.
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS hermes_session_message_stats (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
ts INTEGER NOT NULL,
|
||||||
|
total_user_messages INTEGER NOT NULL,
|
||||||
|
total_assistant_messages INTEGER NOT NULL,
|
||||||
|
total_tool_messages INTEGER NOT NULL,
|
||||||
|
total_messages INTEGER NOT NULL,
|
||||||
|
active_sessions INTEGER NOT NULL,
|
||||||
|
measurement_window_seconds INTEGER NOT NULL
|
||||||
|
);
|
||||||
14
senses/hermes-session-message-stats/package.json
Normal file
14
senses/hermes-session-message-stats/package.json
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"name": "sense-hermes-session-message-stats",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"private": true,
|
||||||
|
"type": "module",
|
||||||
|
"scripts": {
|
||||||
|
"build": "esbuild src/index.ts --bundle --platform=node --format=esm --outfile=index.js --packages=external"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/node": "^22.0.0",
|
||||||
|
"esbuild": "^0.27.0",
|
||||||
|
"typescript": "^5.7.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
128
senses/hermes-session-message-stats/src/index.ts
Normal file
128
senses/hermes-session-message-stats/src/index.ts
Normal file
@ -0,0 +1,128 @@
|
|||||||
|
import { createReadStream } from "node:fs";
|
||||||
|
import { readdir } from "node:fs/promises";
|
||||||
|
import { homedir } from "node:os";
|
||||||
|
import { join } from "node:path";
|
||||||
|
import { createInterface } from "node:readline";
|
||||||
|
import type { LibSQLDatabase } from "drizzle-orm/libsql";
|
||||||
|
import { hermesSessionMessageStats } from "./schema.ts";
|
||||||
|
|
||||||
|
const MEASUREMENT_WINDOW_MS = 900_000;
|
||||||
|
const MEASUREMENT_WINDOW_SECONDS = 900;
|
||||||
|
|
||||||
|
interface MessageCounts {
|
||||||
|
user: number;
|
||||||
|
assistant: number;
|
||||||
|
tool: number;
|
||||||
|
fileHadActivity: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function aggregateJsonlFile(filePath: string, cutoffMs: number, nowMs: number): Promise<MessageCounts> {
|
||||||
|
let user = 0;
|
||||||
|
let assistant = 0;
|
||||||
|
let tool = 0;
|
||||||
|
let fileHadActivity = false;
|
||||||
|
|
||||||
|
const input = createReadStream(filePath, { encoding: "utf8" });
|
||||||
|
const rl = createInterface({ input, crlfDelay: Infinity });
|
||||||
|
try {
|
||||||
|
for await (const line of rl) {
|
||||||
|
const trimmed = line.trim();
|
||||||
|
if (!trimmed) continue;
|
||||||
|
let obj: unknown;
|
||||||
|
try {
|
||||||
|
obj = JSON.parse(trimmed);
|
||||||
|
} catch {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
typeof obj !== "object" || obj === null ||
|
||||||
|
typeof (obj as Record<string, unknown>).role !== "string" ||
|
||||||
|
typeof (obj as Record<string, unknown>).timestamp !== "string"
|
||||||
|
) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const record = obj as { role: string; timestamp: string };
|
||||||
|
const t = Date.parse(record.timestamp);
|
||||||
|
if (!Number.isFinite(t) || t < cutoffMs || t > nowMs) continue;
|
||||||
|
|
||||||
|
const roleNorm = record.role.trim().toLowerCase();
|
||||||
|
if (roleNorm === "user") {
|
||||||
|
user++;
|
||||||
|
fileHadActivity = true;
|
||||||
|
} else if (roleNorm === "assistant") {
|
||||||
|
assistant++;
|
||||||
|
fileHadActivity = true;
|
||||||
|
} else if (roleNorm === "tool") {
|
||||||
|
tool++;
|
||||||
|
fileHadActivity = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
rl.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
return { user, assistant, tool, fileHadActivity };
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function compute(db: LibSQLDatabase, _peers: unknown) {
|
||||||
|
const nowMs = Date.now();
|
||||||
|
const cutoffMs = nowMs - MEASUREMENT_WINDOW_MS;
|
||||||
|
const ts = nowMs;
|
||||||
|
|
||||||
|
let totalUserMessages = 0;
|
||||||
|
let totalAssistantMessages = 0;
|
||||||
|
let totalToolMessages = 0;
|
||||||
|
let activeSessions = 0;
|
||||||
|
|
||||||
|
const sessionsDir = join(homedir(), ".hermes", "sessions");
|
||||||
|
let files: string[] = [];
|
||||||
|
try {
|
||||||
|
const entries = await readdir(sessionsDir, { withFileTypes: true });
|
||||||
|
files = entries
|
||||||
|
.filter((e) => e.isFile() && e.name.endsWith(".jsonl"))
|
||||||
|
.map((e) => join(sessionsDir, e.name));
|
||||||
|
} catch (err) {
|
||||||
|
if (err && typeof err === "object" && "code" in err && (err as NodeJS.ErrnoException).code === "ENOENT") {
|
||||||
|
files = [];
|
||||||
|
} else {
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const filePath of files) {
|
||||||
|
const { user, assistant, tool, fileHadActivity } = await aggregateJsonlFile(
|
||||||
|
filePath,
|
||||||
|
cutoffMs,
|
||||||
|
nowMs,
|
||||||
|
);
|
||||||
|
totalUserMessages += user;
|
||||||
|
totalAssistantMessages += assistant;
|
||||||
|
totalToolMessages += tool;
|
||||||
|
if (fileHadActivity) activeSessions++;
|
||||||
|
}
|
||||||
|
|
||||||
|
const totalMessages =
|
||||||
|
totalUserMessages + totalAssistantMessages + totalToolMessages;
|
||||||
|
|
||||||
|
const row = {
|
||||||
|
ts,
|
||||||
|
totalUserMessages,
|
||||||
|
totalAssistantMessages,
|
||||||
|
totalToolMessages,
|
||||||
|
totalMessages,
|
||||||
|
activeSessions,
|
||||||
|
measurementWindowSeconds: MEASUREMENT_WINDOW_SECONDS,
|
||||||
|
};
|
||||||
|
|
||||||
|
await db.insert(hermesSessionMessageStats).values(row);
|
||||||
|
|
||||||
|
return {
|
||||||
|
ts: row.ts,
|
||||||
|
totalUserMessages: row.totalUserMessages,
|
||||||
|
totalAssistantMessages: row.totalAssistantMessages,
|
||||||
|
totalToolMessages: row.totalToolMessages,
|
||||||
|
totalMessages: row.totalMessages,
|
||||||
|
activeSessions: row.activeSessions,
|
||||||
|
measurementWindowSeconds: row.measurementWindowSeconds,
|
||||||
|
};
|
||||||
|
}
|
||||||
12
senses/hermes-session-message-stats/src/schema.ts
Normal file
12
senses/hermes-session-message-stats/src/schema.ts
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
import { integer, sqliteTable } from "drizzle-orm/sqlite-core";
|
||||||
|
|
||||||
|
export const hermesSessionMessageStats = sqliteTable("hermes_session_message_stats", {
|
||||||
|
id: integer("id").primaryKey({ autoIncrement: true }),
|
||||||
|
ts: integer("ts").notNull(),
|
||||||
|
totalUserMessages: integer("total_user_messages").notNull(),
|
||||||
|
totalAssistantMessages: integer("total_assistant_messages").notNull(),
|
||||||
|
totalToolMessages: integer("total_tool_messages").notNull(),
|
||||||
|
totalMessages: integer("total_messages").notNull(),
|
||||||
|
activeSessions: integer("active_sessions").notNull(),
|
||||||
|
measurementWindowSeconds: integer("measurement_window_seconds").notNull(),
|
||||||
|
});
|
||||||
112
senses/linux-system-health/index.js
Normal file
112
senses/linux-system-health/index.js
Normal file
@ -0,0 +1,112 @@
|
|||||||
|
// src/index.ts
|
||||||
|
import { loadavg, totalmem, freemem, uptime } from "node:os";
|
||||||
|
import { execSync } from "node:child_process";
|
||||||
|
import { readFile } from "node:fs/promises";
|
||||||
|
|
||||||
|
// src/schema.ts
|
||||||
|
import { integer, real, sqliteTable } from "drizzle-orm/sqlite-core";
|
||||||
|
var snapshots = sqliteTable("snapshots", {
|
||||||
|
ts: integer("ts").primaryKey(),
|
||||||
|
cpuLoad1m: real("cpu_load_1m").notNull(),
|
||||||
|
cpuLoad5m: real("cpu_load_5m").notNull(),
|
||||||
|
cpuLoad15m: real("cpu_load_15m").notNull(),
|
||||||
|
memTotalMB: integer("mem_total_mb").notNull(),
|
||||||
|
memUsedMB: integer("mem_used_mb").notNull(),
|
||||||
|
memUsedPct: real("mem_used_pct").notNull(),
|
||||||
|
diskTotalGB: real("disk_total_gb").notNull(),
|
||||||
|
diskUsedGB: real("disk_used_gb").notNull(),
|
||||||
|
diskUsedPct: real("disk_used_pct").notNull(),
|
||||||
|
uptimeSec: integer("uptime_sec").notNull(),
|
||||||
|
// TCP socket stats (merged from linux-tcp-socket-stats)
|
||||||
|
socketsUsed: integer("sockets_used"),
|
||||||
|
tcpInuse: integer("tcp_inuse"),
|
||||||
|
tcpOrphan: integer("tcp_orphan"),
|
||||||
|
tcpTw: integer("tcp_tw"),
|
||||||
|
tcpAlloc: integer("tcp_alloc"),
|
||||||
|
tcpMemPages: integer("tcp_mem_pages")
|
||||||
|
});
|
||||||
|
|
||||||
|
// src/index.ts
|
||||||
|
var SOCKSTAT_PATH = "/proc/net/sockstat";
|
||||||
|
function parseSockstat(content) {
|
||||||
|
let socketsUsed = 0, tcpInuse = 0, tcpOrphan = 0, tcpTw = 0, tcpAlloc = 0, tcpMemPages = 0;
|
||||||
|
for (const line of content.split("\n")) {
|
||||||
|
const trimmed = line.trim();
|
||||||
|
if (trimmed.startsWith("sockets:")) {
|
||||||
|
const parts = trimmed.split(/\s+/);
|
||||||
|
const idx = parts.indexOf("used");
|
||||||
|
if (idx !== -1 && idx + 1 < parts.length) {
|
||||||
|
socketsUsed = Number.parseInt(parts[idx + 1], 10) || 0;
|
||||||
|
}
|
||||||
|
} else if (trimmed.startsWith("TCP:")) {
|
||||||
|
const parts = trimmed.split(/\s+/);
|
||||||
|
const map = {};
|
||||||
|
for (let i = 1; i + 1 < parts.length; i += 2) {
|
||||||
|
map[parts[i]] = Number.parseInt(parts[i + 1], 10) || 0;
|
||||||
|
}
|
||||||
|
tcpInuse = map.inuse ?? 0;
|
||||||
|
tcpOrphan = map.orphan ?? 0;
|
||||||
|
tcpTw = map.tw ?? 0;
|
||||||
|
tcpAlloc = map.alloc ?? 0;
|
||||||
|
tcpMemPages = map.mem ?? 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return { socketsUsed, tcpInuse, tcpOrphan, tcpTw, tcpAlloc, tcpMemPages };
|
||||||
|
}
|
||||||
|
async function compute(db, _peers) {
|
||||||
|
const [load1, load5, load15] = loadavg();
|
||||||
|
const memTotal = totalmem();
|
||||||
|
const memFree = freemem();
|
||||||
|
const memUsed = memTotal - memFree;
|
||||||
|
const memTotalMB = Math.round(memTotal / 1024 / 1024);
|
||||||
|
const memUsedMB = Math.round(memUsed / 1024 / 1024);
|
||||||
|
const memUsedPct = Math.round(memUsed / memTotal * 1e4) / 100;
|
||||||
|
let diskTotalGB = 0, diskUsedGB = 0, diskUsedPct = 0;
|
||||||
|
try {
|
||||||
|
const df = execSync("df -B1 / | tail -1", { encoding: "utf-8" }).trim();
|
||||||
|
const parts = df.split(/\s+/);
|
||||||
|
const total = Number(parts[1]);
|
||||||
|
const used = Number(parts[2]);
|
||||||
|
diskTotalGB = Math.round(total / 1024 / 1024 / 1024 * 100) / 100;
|
||||||
|
diskUsedGB = Math.round(used / 1024 / 1024 / 1024 * 100) / 100;
|
||||||
|
diskUsedPct = total > 0 ? Math.round(used / total * 1e4) / 100 : 0;
|
||||||
|
} catch {
|
||||||
|
}
|
||||||
|
let tcp = { socketsUsed: 0, tcpInuse: 0, tcpOrphan: 0, tcpTw: 0, tcpAlloc: 0, tcpMemPages: 0 };
|
||||||
|
try {
|
||||||
|
const content = await readFile(SOCKSTAT_PATH, "utf8");
|
||||||
|
tcp = parseSockstat(content);
|
||||||
|
} catch {
|
||||||
|
}
|
||||||
|
const ts = Date.now();
|
||||||
|
const uptimeSec = Math.round(uptime());
|
||||||
|
await db.insert(snapshots).values({
|
||||||
|
ts,
|
||||||
|
cpuLoad1m: load1,
|
||||||
|
cpuLoad5m: load5,
|
||||||
|
cpuLoad15m: load15,
|
||||||
|
memTotalMB,
|
||||||
|
memUsedMB,
|
||||||
|
memUsedPct,
|
||||||
|
diskTotalGB,
|
||||||
|
diskUsedGB,
|
||||||
|
diskUsedPct,
|
||||||
|
uptimeSec,
|
||||||
|
socketsUsed: tcp.socketsUsed,
|
||||||
|
tcpInuse: tcp.tcpInuse,
|
||||||
|
tcpOrphan: tcp.tcpOrphan,
|
||||||
|
tcpTw: tcp.tcpTw,
|
||||||
|
tcpAlloc: tcp.tcpAlloc,
|
||||||
|
tcpMemPages: tcp.tcpMemPages
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
cpu: { load1m: load1, load5m: load5, load15m: load15 },
|
||||||
|
memory: { totalMB: memTotalMB, usedMB: memUsedMB, usedPct: memUsedPct },
|
||||||
|
disk: { totalGB: diskTotalGB, usedGB: diskUsedGB, usedPct: diskUsedPct },
|
||||||
|
tcp: { socketsUsed: tcp.socketsUsed, inuse: tcp.tcpInuse, orphan: tcp.tcpOrphan, tw: tcp.tcpTw, alloc: tcp.tcpAlloc, memPages: tcp.tcpMemPages },
|
||||||
|
uptimeSec
|
||||||
|
};
|
||||||
|
}
|
||||||
|
export {
|
||||||
|
compute
|
||||||
|
};
|
||||||
16
senses/linux-system-health/migrations/0001_init.sql
Normal file
16
senses/linux-system-health/migrations/0001_init.sql
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
-- Migration: 0001_init
|
||||||
|
-- Creates the snapshots table for linux-system-health sense.
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS snapshots (
|
||||||
|
ts INTEGER PRIMARY KEY,
|
||||||
|
cpu_load_1m REAL NOT NULL,
|
||||||
|
cpu_load_5m REAL NOT NULL,
|
||||||
|
cpu_load_15m REAL NOT NULL,
|
||||||
|
mem_total_mb INTEGER NOT NULL,
|
||||||
|
mem_used_mb INTEGER NOT NULL,
|
||||||
|
mem_used_pct REAL NOT NULL,
|
||||||
|
disk_total_gb REAL NOT NULL,
|
||||||
|
disk_used_gb REAL NOT NULL,
|
||||||
|
disk_used_pct REAL NOT NULL,
|
||||||
|
uptime_sec INTEGER NOT NULL
|
||||||
|
);
|
||||||
@ -0,0 +1,6 @@
|
|||||||
|
ALTER TABLE snapshots ADD COLUMN sockets_used INTEGER;
|
||||||
|
ALTER TABLE snapshots ADD COLUMN tcp_inuse INTEGER;
|
||||||
|
ALTER TABLE snapshots ADD COLUMN tcp_orphan INTEGER;
|
||||||
|
ALTER TABLE snapshots ADD COLUMN tcp_tw INTEGER;
|
||||||
|
ALTER TABLE snapshots ADD COLUMN tcp_alloc INTEGER;
|
||||||
|
ALTER TABLE snapshots ADD COLUMN tcp_mem_pages INTEGER;
|
||||||
14
senses/linux-system-health/package.json
Normal file
14
senses/linux-system-health/package.json
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"name": "sense-linux-system-health",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"private": true,
|
||||||
|
"type": "module",
|
||||||
|
"scripts": {
|
||||||
|
"build": "esbuild src/index.ts --bundle --platform=node --format=esm --outfile=index.js --packages=external"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/node": "^22.0.0",
|
||||||
|
"esbuild": "^0.27.0",
|
||||||
|
"typescript": "^5.7.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
96
senses/linux-system-health/src/index.ts
Normal file
96
senses/linux-system-health/src/index.ts
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
import { loadavg, totalmem, freemem, uptime } from "node:os";
|
||||||
|
import { execSync } from "node:child_process";
|
||||||
|
import { readFile } from "node:fs/promises";
|
||||||
|
import type { LibSQLDatabase } from "drizzle-orm/libsql";
|
||||||
|
import { snapshots } from "./schema.ts";
|
||||||
|
|
||||||
|
const SOCKSTAT_PATH = "/proc/net/sockstat";
|
||||||
|
|
||||||
|
interface SockstatResult {
|
||||||
|
socketsUsed: number;
|
||||||
|
tcpInuse: number;
|
||||||
|
tcpOrphan: number;
|
||||||
|
tcpTw: number;
|
||||||
|
tcpAlloc: number;
|
||||||
|
tcpMemPages: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseSockstat(content: string): SockstatResult {
|
||||||
|
let socketsUsed = 0, tcpInuse = 0, tcpOrphan = 0, tcpTw = 0, tcpAlloc = 0, tcpMemPages = 0;
|
||||||
|
|
||||||
|
for (const line of content.split("\n")) {
|
||||||
|
const trimmed = line.trim();
|
||||||
|
if (trimmed.startsWith("sockets:")) {
|
||||||
|
const parts = trimmed.split(/\s+/);
|
||||||
|
const idx = parts.indexOf("used");
|
||||||
|
if (idx !== -1 && idx + 1 < parts.length) {
|
||||||
|
socketsUsed = Number.parseInt(parts[idx + 1], 10) || 0;
|
||||||
|
}
|
||||||
|
} else if (trimmed.startsWith("TCP:")) {
|
||||||
|
const parts = trimmed.split(/\s+/);
|
||||||
|
const map: Record<string, number> = {};
|
||||||
|
for (let i = 1; i + 1 < parts.length; i += 2) {
|
||||||
|
map[parts[i]] = Number.parseInt(parts[i + 1], 10) || 0;
|
||||||
|
}
|
||||||
|
tcpInuse = map.inuse ?? 0;
|
||||||
|
tcpOrphan = map.orphan ?? 0;
|
||||||
|
tcpTw = map.tw ?? 0;
|
||||||
|
tcpAlloc = map.alloc ?? 0;
|
||||||
|
tcpMemPages = map.mem ?? 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { socketsUsed, tcpInuse, tcpOrphan, tcpTw, tcpAlloc, tcpMemPages };
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function compute(db: LibSQLDatabase, _peers: unknown) {
|
||||||
|
const [load1, load5, load15] = loadavg();
|
||||||
|
|
||||||
|
const memTotal = totalmem();
|
||||||
|
const memFree = freemem();
|
||||||
|
const memUsed = memTotal - memFree;
|
||||||
|
const memTotalMB = Math.round(memTotal / 1024 / 1024);
|
||||||
|
const memUsedMB = Math.round(memUsed / 1024 / 1024);
|
||||||
|
const memUsedPct = Math.round((memUsed / memTotal) * 10000) / 100;
|
||||||
|
|
||||||
|
let diskTotalGB = 0, diskUsedGB = 0, diskUsedPct = 0;
|
||||||
|
try {
|
||||||
|
const df = execSync("df -B1 / | tail -1", { encoding: "utf-8" }).trim();
|
||||||
|
const parts = df.split(/\s+/);
|
||||||
|
const total = Number(parts[1]);
|
||||||
|
const used = Number(parts[2]);
|
||||||
|
diskTotalGB = Math.round(total / 1024 / 1024 / 1024 * 100) / 100;
|
||||||
|
diskUsedGB = Math.round(used / 1024 / 1024 / 1024 * 100) / 100;
|
||||||
|
diskUsedPct = total > 0 ? Math.round((used / total) * 10000) / 100 : 0;
|
||||||
|
} catch {}
|
||||||
|
|
||||||
|
let tcp: SockstatResult = { socketsUsed: 0, tcpInuse: 0, tcpOrphan: 0, tcpTw: 0, tcpAlloc: 0, tcpMemPages: 0 };
|
||||||
|
try {
|
||||||
|
const content = await readFile(SOCKSTAT_PATH, "utf8");
|
||||||
|
tcp = parseSockstat(content);
|
||||||
|
} catch {}
|
||||||
|
|
||||||
|
const ts = Date.now();
|
||||||
|
const uptimeSec = Math.round(uptime());
|
||||||
|
|
||||||
|
await db.insert(snapshots).values({
|
||||||
|
ts, cpuLoad1m: load1, cpuLoad5m: load5, cpuLoad15m: load15,
|
||||||
|
memTotalMB, memUsedMB, memUsedPct,
|
||||||
|
diskTotalGB, diskUsedGB, diskUsedPct,
|
||||||
|
uptimeSec,
|
||||||
|
socketsUsed: tcp.socketsUsed,
|
||||||
|
tcpInuse: tcp.tcpInuse,
|
||||||
|
tcpOrphan: tcp.tcpOrphan,
|
||||||
|
tcpTw: tcp.tcpTw,
|
||||||
|
tcpAlloc: tcp.tcpAlloc,
|
||||||
|
tcpMemPages: tcp.tcpMemPages,
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
cpu: { load1m: load1, load5m: load5, load15m: load15 },
|
||||||
|
memory: { totalMB: memTotalMB, usedMB: memUsedMB, usedPct: memUsedPct },
|
||||||
|
disk: { totalGB: diskTotalGB, usedGB: diskUsedGB, usedPct: diskUsedPct },
|
||||||
|
tcp: { socketsUsed: tcp.socketsUsed, inuse: tcp.tcpInuse, orphan: tcp.tcpOrphan, tw: tcp.tcpTw, alloc: tcp.tcpAlloc, memPages: tcp.tcpMemPages },
|
||||||
|
uptimeSec,
|
||||||
|
};
|
||||||
|
}
|
||||||
22
senses/linux-system-health/src/schema.ts
Normal file
22
senses/linux-system-health/src/schema.ts
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
import { integer, real, sqliteTable, text } from "drizzle-orm/sqlite-core";
|
||||||
|
|
||||||
|
export const snapshots = sqliteTable("snapshots", {
|
||||||
|
ts: integer("ts").primaryKey(),
|
||||||
|
cpuLoad1m: real("cpu_load_1m").notNull(),
|
||||||
|
cpuLoad5m: real("cpu_load_5m").notNull(),
|
||||||
|
cpuLoad15m: real("cpu_load_15m").notNull(),
|
||||||
|
memTotalMB: integer("mem_total_mb").notNull(),
|
||||||
|
memUsedMB: integer("mem_used_mb").notNull(),
|
||||||
|
memUsedPct: real("mem_used_pct").notNull(),
|
||||||
|
diskTotalGB: real("disk_total_gb").notNull(),
|
||||||
|
diskUsedGB: real("disk_used_gb").notNull(),
|
||||||
|
diskUsedPct: real("disk_used_pct").notNull(),
|
||||||
|
uptimeSec: integer("uptime_sec").notNull(),
|
||||||
|
// TCP socket stats (merged from linux-tcp-socket-stats)
|
||||||
|
socketsUsed: integer("sockets_used"),
|
||||||
|
tcpInuse: integer("tcp_inuse"),
|
||||||
|
tcpOrphan: integer("tcp_orphan"),
|
||||||
|
tcpTw: integer("tcp_tw"),
|
||||||
|
tcpAlloc: integer("tcp_alloc"),
|
||||||
|
tcpMemPages: integer("tcp_mem_pages"),
|
||||||
|
});
|
||||||
44
senses/worker-process-metrics/index.js
Normal file
44
senses/worker-process-metrics/index.js
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
// src/schema.ts
|
||||||
|
import { integer, real, sqliteTable } from "drizzle-orm/sqlite-core";
|
||||||
|
var workerProcessMetrics = sqliteTable("worker_process_metrics", {
|
||||||
|
ts: integer("ts").primaryKey(),
|
||||||
|
pid: integer("pid").notNull(),
|
||||||
|
uptimeSec: real("uptime_sec").notNull(),
|
||||||
|
heapUsedMB: real("heap_used_mb").notNull(),
|
||||||
|
rssMB: real("rss_mb").notNull(),
|
||||||
|
externalMB: real("external_mb").notNull()
|
||||||
|
});
|
||||||
|
|
||||||
|
// src/index.ts
|
||||||
|
function round2(n) {
|
||||||
|
return Math.round(n * 100) / 100;
|
||||||
|
}
|
||||||
|
async function compute(db, _peers) {
|
||||||
|
const ts = Date.now();
|
||||||
|
const pid = process.pid;
|
||||||
|
const uptimeSec = process.uptime();
|
||||||
|
const m = process.memoryUsage();
|
||||||
|
const heapUsedMB = round2(m.heapUsed / 1024 / 1024);
|
||||||
|
const rssMB = round2(m.rss / 1024 / 1024);
|
||||||
|
const externalMB = round2(m.external / 1024 / 1024);
|
||||||
|
const row = {
|
||||||
|
ts,
|
||||||
|
pid,
|
||||||
|
uptimeSec,
|
||||||
|
heapUsedMB,
|
||||||
|
rssMB,
|
||||||
|
externalMB
|
||||||
|
};
|
||||||
|
await db.insert(workerProcessMetrics).values(row);
|
||||||
|
return {
|
||||||
|
ts: row.ts,
|
||||||
|
pid: row.pid,
|
||||||
|
uptimeSec: row.uptimeSec,
|
||||||
|
heapUsedMB: row.heapUsedMB,
|
||||||
|
rssMB: row.rssMB,
|
||||||
|
externalMB: row.externalMB
|
||||||
|
};
|
||||||
|
}
|
||||||
|
export {
|
||||||
|
compute
|
||||||
|
};
|
||||||
11
senses/worker-process-metrics/migrations/0001_init.sql
Normal file
11
senses/worker-process-metrics/migrations/0001_init.sql
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
-- Migration: 0001_init
|
||||||
|
-- Creates the worker_process_metrics table for worker-process-metrics sense.
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS worker_process_metrics (
|
||||||
|
ts INTEGER PRIMARY KEY,
|
||||||
|
pid INTEGER NOT NULL,
|
||||||
|
uptime_sec REAL NOT NULL,
|
||||||
|
heap_used_mb REAL NOT NULL,
|
||||||
|
rss_mb REAL NOT NULL,
|
||||||
|
external_mb REAL NOT NULL
|
||||||
|
);
|
||||||
14
senses/worker-process-metrics/package.json
Normal file
14
senses/worker-process-metrics/package.json
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"name": "sense-worker-process-metrics",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"private": true,
|
||||||
|
"type": "module",
|
||||||
|
"scripts": {
|
||||||
|
"build": "esbuild src/index.ts --bundle --platform=node --format=esm --outfile=index.js --packages=external"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/node": "^22.0.0",
|
||||||
|
"esbuild": "^0.27.0",
|
||||||
|
"typescript": "^5.7.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
36
senses/worker-process-metrics/src/index.ts
Normal file
36
senses/worker-process-metrics/src/index.ts
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
import type { LibSQLDatabase } from "drizzle-orm/libsql";
|
||||||
|
import { workerProcessMetrics } from "./schema.ts";
|
||||||
|
|
||||||
|
function round2(n: number): number {
|
||||||
|
return Math.round(n * 100) / 100;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function compute(db: LibSQLDatabase, _peers: unknown) {
|
||||||
|
const ts = Date.now();
|
||||||
|
const pid = process.pid;
|
||||||
|
const uptimeSec = process.uptime();
|
||||||
|
const m = process.memoryUsage();
|
||||||
|
const heapUsedMB = round2(m.heapUsed / 1024 / 1024);
|
||||||
|
const rssMB = round2(m.rss / 1024 / 1024);
|
||||||
|
const externalMB = round2(m.external / 1024 / 1024);
|
||||||
|
|
||||||
|
const row = {
|
||||||
|
ts,
|
||||||
|
pid,
|
||||||
|
uptimeSec,
|
||||||
|
heapUsedMB,
|
||||||
|
rssMB,
|
||||||
|
externalMB,
|
||||||
|
};
|
||||||
|
|
||||||
|
await db.insert(workerProcessMetrics).values(row);
|
||||||
|
|
||||||
|
return {
|
||||||
|
ts: row.ts,
|
||||||
|
pid: row.pid,
|
||||||
|
uptimeSec: row.uptimeSec,
|
||||||
|
heapUsedMB: row.heapUsedMB,
|
||||||
|
rssMB: row.rssMB,
|
||||||
|
externalMB: row.externalMB,
|
||||||
|
};
|
||||||
|
}
|
||||||
10
senses/worker-process-metrics/src/schema.ts
Normal file
10
senses/worker-process-metrics/src/schema.ts
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
import { integer, real, sqliteTable } from "drizzle-orm/sqlite-core";
|
||||||
|
|
||||||
|
export const workerProcessMetrics = sqliteTable("worker_process_metrics", {
|
||||||
|
ts: integer("ts").primaryKey(),
|
||||||
|
pid: integer("pid").notNull(),
|
||||||
|
uptimeSec: real("uptime_sec").notNull(),
|
||||||
|
heapUsedMB: real("heap_used_mb").notNull(),
|
||||||
|
rssMB: real("rss_mb").notNull(),
|
||||||
|
externalMB: real("external_mb").notNull(),
|
||||||
|
});
|
||||||
1
workflows/develop-sense/.gitignore
vendored
Normal file
1
workflows/develop-sense/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
dist/
|
||||||
39
workflows/develop-sense/build.ts
Normal file
39
workflows/develop-sense/build.ts
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
import type { AgentFn, WorkflowDefinition } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
|
||||||
|
import { moderator } from "./moderator.js";
|
||||||
|
import type { SenseMeta } from "./moderator.js";
|
||||||
|
import { createCoderRole } from "./roles/coder.js";
|
||||||
|
import { createWorkspaceCommitterRole } from "./roles/committer.js";
|
||||||
|
import { createPlannerRole } from "./roles/planner.js";
|
||||||
|
import { createReviewerRole } from "./roles/reviewer.js";
|
||||||
|
import { createTesterRole } from "./roles/tester.js";
|
||||||
|
|
||||||
|
export type CreateDevelopSenseDeps = {
|
||||||
|
defaultAdapter: AgentFn;
|
||||||
|
adapters?: Partial<Record<keyof SenseMeta, AgentFn>>;
|
||||||
|
extract: LlmExtractorConfig;
|
||||||
|
cwd: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function createDevelopSenseWorkflow({
|
||||||
|
defaultAdapter,
|
||||||
|
adapters,
|
||||||
|
extract,
|
||||||
|
cwd,
|
||||||
|
}: CreateDevelopSenseDeps): WorkflowDefinition<SenseMeta> {
|
||||||
|
const a = (role: keyof SenseMeta) => adapters?.[role] ?? defaultAdapter;
|
||||||
|
const roles = {
|
||||||
|
planner: createPlannerRole(a('planner'), extract),
|
||||||
|
coder: createCoderRole(a('coder'), extract),
|
||||||
|
reviewer: createReviewerRole(a('reviewer'), extract, cwd),
|
||||||
|
tester: createTesterRole(a('tester'), extract, cwd),
|
||||||
|
committer: createWorkspaceCommitterRole(a('committer'), extract),
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
name: "develop-sense",
|
||||||
|
roles,
|
||||||
|
moderator,
|
||||||
|
};
|
||||||
|
}
|
||||||
@ -1,7 +1,7 @@
|
|||||||
import { join } from "node:path";
|
import { join } from "node:path";
|
||||||
import { createCursorAdapter, cursorAdapter } from "@uncaged/nerve-adapter-cursor";
|
import { createCursorAdapter, cursorAdapter } from "@uncaged/nerve-adapter-cursor";
|
||||||
import { hermesAdapter } from "@uncaged/nerve-adapter-hermes";
|
import { hermesAdapter } from "@uncaged/nerve-adapter-hermes";
|
||||||
import { createDevelopSenseWorkflow } from "@uncaged/nerve-workflow-meta";
|
import { createDevelopSenseWorkflow } from "./build.js";
|
||||||
|
|
||||||
const HOME = process.env.HOME ?? "/home/azureuser";
|
const HOME = process.env.HOME ?? "/home/azureuser";
|
||||||
const NERVE_ROOT = join(HOME, ".uncaged-nerve");
|
const NERVE_ROOT = join(HOME, ".uncaged-nerve");
|
||||||
|
|||||||
65
workflows/develop-sense/moderator.ts
Normal file
65
workflows/develop-sense/moderator.ts
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
import { END } from "@uncaged/nerve-core";
|
||||||
|
import type { Moderator } from "@uncaged/nerve-core";
|
||||||
|
import type { PlannerMeta } from "./roles/planner.js";
|
||||||
|
import type { CoderMeta } from "./roles/coder.js";
|
||||||
|
import type { ReviewerMeta } from "./roles/reviewer.js";
|
||||||
|
import type { TesterMeta } from "./roles/tester.js";
|
||||||
|
import type { CommitterMeta } from "./roles/committer.js";
|
||||||
|
|
||||||
|
export type SenseMeta = {
|
||||||
|
planner: PlannerMeta;
|
||||||
|
coder: CoderMeta;
|
||||||
|
reviewer: ReviewerMeta;
|
||||||
|
tester: TesterMeta;
|
||||||
|
committer: CommitterMeta;
|
||||||
|
};
|
||||||
|
|
||||||
|
const MAX_CODER_ROUNDS = 20;
|
||||||
|
const MAX_TOTAL_REJECTIONS = 10;
|
||||||
|
|
||||||
|
function coderRounds(steps: { role: string }[]): number {
|
||||||
|
return steps.filter((s) => s.role === "coder").length;
|
||||||
|
}
|
||||||
|
|
||||||
|
function totalRejections(steps: { role: string; meta: unknown }[]): number {
|
||||||
|
return steps.filter((s) => {
|
||||||
|
if (s.role === "reviewer") return !(s.meta as Record<string, boolean>).approved;
|
||||||
|
if (s.role === "tester") return !(s.meta as Record<string, boolean>).passed;
|
||||||
|
if (s.role === "committer") return !(s.meta as Record<string, boolean>).committed;
|
||||||
|
return false;
|
||||||
|
}).length;
|
||||||
|
}
|
||||||
|
|
||||||
|
function canRetryCoder(steps: { role: string; meta: unknown }[]): boolean {
|
||||||
|
return coderRounds(steps) < MAX_CODER_ROUNDS && totalRejections(steps) < MAX_TOTAL_REJECTIONS;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const moderator: Moderator<SenseMeta> = (context) => {
|
||||||
|
if (context.steps.length === 0) return "planner";
|
||||||
|
|
||||||
|
const last = context.steps[context.steps.length - 1];
|
||||||
|
|
||||||
|
if (last.role === "planner") return "coder";
|
||||||
|
|
||||||
|
if (last.role === "coder") {
|
||||||
|
if (last.meta.filesCreated) return "reviewer";
|
||||||
|
return canRetryCoder(context.steps) ? "coder" : END;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (last.role === "reviewer") {
|
||||||
|
if (last.meta.approved) return "tester";
|
||||||
|
return canRetryCoder(context.steps) ? "coder" : END;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (last.role === "tester") {
|
||||||
|
if (last.meta.passed) return "committer";
|
||||||
|
return canRetryCoder(context.steps) ? "coder" : END;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (last.role === "committer") {
|
||||||
|
if (last.meta.committed) return END;
|
||||||
|
return canRetryCoder(context.steps) ? "coder" : END;
|
||||||
|
}
|
||||||
|
|
||||||
|
return END;
|
||||||
|
};
|
||||||
21
workflows/develop-sense/package.json
Normal file
21
workflows/develop-sense/package.json
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
{
|
||||||
|
"name": "generate-sense-workflow",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"private": true,
|
||||||
|
"type": "module",
|
||||||
|
"scripts": {
|
||||||
|
"build": "esbuild index.ts --bundle --platform=node --format=esm --outdir=dist --packages=external"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@uncaged/nerve-adapter-cursor": "latest",
|
||||||
|
"@uncaged/nerve-adapter-hermes": "latest",
|
||||||
|
"@uncaged/nerve-core": "latest",
|
||||||
|
"@uncaged/nerve-workflow-utils": "latest",
|
||||||
|
"zod": "^4.3.6"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/node": "^22.0.0",
|
||||||
|
"esbuild": "^0.27.0",
|
||||||
|
"typescript": "^5.7.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
50
workflows/develop-sense/roles/coder.ts
Normal file
50
workflows/develop-sense/roles/coder.ts
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
import type { AgentFn, Role, StartStep } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
export const coderMetaSchema = z.object({
|
||||||
|
filesCreated: z.boolean().describe("true if the sense files were created"),
|
||||||
|
});
|
||||||
|
export type CoderMeta = z.infer<typeof coderMetaSchema>;
|
||||||
|
|
||||||
|
export function coderPrompt({ threadId }: { threadId: string }): string {
|
||||||
|
return `Read the workflow thread for the planner's sense design and any tester feedback: \`nerve thread ${threadId}\`
|
||||||
|
Read the nerve-dev skill for sense file structure and conventions: \`cat node_modules/@uncaged/nerve-skills/nerve-dev/SKILL.md\`
|
||||||
|
|
||||||
|
## Your task
|
||||||
|
|
||||||
|
Implement (or fix) the sense the planner designed. If there is tester feedback in the thread, fix the issues it identified.
|
||||||
|
|
||||||
|
## Multi-step approach
|
||||||
|
|
||||||
|
You do NOT need to finish everything in one pass. You may return \`done: false\` to continue in the next iteration.
|
||||||
|
|
||||||
|
## File structure for each sense
|
||||||
|
|
||||||
|
- \`senses/<name>/src/index.ts\` — TypeScript compute source; import schema as \`./schema.ts\`
|
||||||
|
- \`senses/<name>/src/schema.ts\` — Drizzle schema (TypeScript)
|
||||||
|
- \`senses/<name>/migrations/\` — Drizzle migration files (at sense root, not inside src/)
|
||||||
|
- \`senses/<name>/package.json\` — with esbuild build script
|
||||||
|
- \`senses/<name>/index.js\` — bundled output generated by \`pnpm build\` (do NOT edit by hand)
|
||||||
|
|
||||||
|
Look at existing senses for the package.json template and patterns.
|
||||||
|
|
||||||
|
## When to return done: true
|
||||||
|
|
||||||
|
Return \`done: true\` ONLY when ALL of the following are true:
|
||||||
|
- All required files are created
|
||||||
|
- \`pnpm install --no-cache && pnpm build\` succeeds (run it!)
|
||||||
|
- \`nerve.yaml\` is updated with the sense config
|
||||||
|
|
||||||
|
Return \`done: false\` if you made progress but there is still work to do.`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createCoderRole(adapter: AgentFn, extract: LlmExtractorConfig): Role<CoderMeta> {
|
||||||
|
return createRole(
|
||||||
|
adapter,
|
||||||
|
async (start: StartStep) => coderPrompt({ threadId: start.meta.threadId }),
|
||||||
|
coderMetaSchema,
|
||||||
|
extract,
|
||||||
|
);
|
||||||
|
}
|
||||||
5
workflows/develop-sense/roles/committer.ts
Normal file
5
workflows/develop-sense/roles/committer.ts
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
export {
|
||||||
|
createCommitterRole as createWorkspaceCommitterRole,
|
||||||
|
committerMetaSchema,
|
||||||
|
type CommitterMeta,
|
||||||
|
} from "@uncaged/nerve-role-committer";
|
||||||
36
workflows/develop-sense/roles/planner.ts
Normal file
36
workflows/develop-sense/roles/planner.ts
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
import type { AgentFn, Role, StartStep } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
export const plannerMetaSchema = z.object({
|
||||||
|
senseName: z.string().describe("kebab-case sense name from the plan"),
|
||||||
|
});
|
||||||
|
export type PlannerMeta = z.infer<typeof plannerMetaSchema>;
|
||||||
|
|
||||||
|
export function plannerPrompt({ threadId }: { threadId: string }): string {
|
||||||
|
return `You are planning a new Nerve sense.
|
||||||
|
|
||||||
|
Read the workflow thread for the user's request: \`nerve thread ${threadId}\`
|
||||||
|
Read the nerve-dev skill for sense conventions: \`cat node_modules/@uncaged/nerve-skills/nerve-dev/SKILL.md\`
|
||||||
|
Also look at existing senses in the \`senses/\` directory for patterns.
|
||||||
|
|
||||||
|
Pick a good kebab-case name for this sense. Produce a PLAN (not code) in markdown:
|
||||||
|
|
||||||
|
## Sense Design
|
||||||
|
### Name — kebab-case
|
||||||
|
### Fields — name, type (integer/real/text), description
|
||||||
|
### Compute Logic — step-by-step, specific Node.js APIs or shell commands
|
||||||
|
### Trigger Config — group, interval, throttle, timeout
|
||||||
|
|
||||||
|
Output ONLY the plan. Be precise and implementation-ready.`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createPlannerRole(adapter: AgentFn, extract: LlmExtractorConfig): Role<PlannerMeta> {
|
||||||
|
return createRole(
|
||||||
|
adapter,
|
||||||
|
async (start: StartStep) => plannerPrompt({ threadId: start.meta.threadId }),
|
||||||
|
plannerMetaSchema,
|
||||||
|
extract,
|
||||||
|
);
|
||||||
|
}
|
||||||
3
workflows/develop-sense/roles/reviewer.ts
Normal file
3
workflows/develop-sense/roles/reviewer.ts
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
import { createReviewerRole } from "@uncaged/nerve-role-reviewer";
|
||||||
|
export { createReviewerRole };
|
||||||
|
export type { ReviewerMeta } from "@uncaged/nerve-role-reviewer";
|
||||||
58
workflows/develop-sense/roles/tester.ts
Normal file
58
workflows/develop-sense/roles/tester.ts
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
import type { AgentFn, Role, StartStep } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
export const testerMetaSchema = z.object({
|
||||||
|
passed: z.boolean().describe("true if all e2e checks passed"),
|
||||||
|
});
|
||||||
|
export type TesterMeta = z.infer<typeof testerMetaSchema>;
|
||||||
|
|
||||||
|
export function testerPrompt({ threadId, nerveRoot }: { threadId: string; nerveRoot: string }): string {
|
||||||
|
return `You are testing a newly created Nerve sense end-to-end.
|
||||||
|
|
||||||
|
**IMPORTANT: The Nerve workspace is at \`${nerveRoot}\`. All paths below are relative to this directory. Always \`cd ${nerveRoot}\` first.**
|
||||||
|
|
||||||
|
Read the workflow thread for context: \`nerve thread ${threadId}\`
|
||||||
|
Read the nerve-dev skill for expected file structure: \`cat ${nerveRoot}/node_modules/@uncaged/nerve-skills/nerve-dev/SKILL.md\`
|
||||||
|
|
||||||
|
Verify the full lifecycle in this order:
|
||||||
|
|
||||||
|
1. **File check** — all required sense files exist:
|
||||||
|
- \`senses/<name>/src/index.ts\`
|
||||||
|
- \`senses/<name>/src/schema.ts\`
|
||||||
|
- \`senses/<name>/migrations/\`
|
||||||
|
- \`senses/<name>/package.json\`
|
||||||
|
|
||||||
|
2. **Build** — run inside the sense directory:
|
||||||
|
\`\`\`
|
||||||
|
cd ${nerveRoot}/senses/<name> && pnpm install --no-cache && pnpm build
|
||||||
|
\`\`\`
|
||||||
|
Must produce \`index.js\` at sense root without errors.
|
||||||
|
|
||||||
|
3. **Config check** — \`nerve validate\` passes, confirming nerve.yaml is valid.
|
||||||
|
|
||||||
|
4. **Sense list** — \`nerve sense list\` shows the sense.
|
||||||
|
|
||||||
|
5. **Trigger** — \`nerve sense trigger <name>\` completes without error.
|
||||||
|
|
||||||
|
6. **Query** — \`nerve sense query <name>\` — retry up to 20s until rows appear.
|
||||||
|
|
||||||
|
If any step fails, include the relevant error output.
|
||||||
|
|
||||||
|
Output a clear summary: what you checked, what passed, what failed, and why.`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createTesterRole(
|
||||||
|
adapter: AgentFn,
|
||||||
|
extract: LlmExtractorConfig,
|
||||||
|
nerveRoot: string,
|
||||||
|
): Role<TesterMeta> {
|
||||||
|
return createRole(
|
||||||
|
adapter,
|
||||||
|
async (start: StartStep) =>
|
||||||
|
testerPrompt({ threadId: start.meta.threadId, nerveRoot }),
|
||||||
|
testerMetaSchema,
|
||||||
|
extract,
|
||||||
|
);
|
||||||
|
}
|
||||||
14
workflows/develop-sense/tsconfig.json
Normal file
14
workflows/develop-sense/tsconfig.json
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"target": "ES2022",
|
||||||
|
"lib": ["ES2022"],
|
||||||
|
"module": "NodeNext",
|
||||||
|
"moduleResolution": "NodeNext",
|
||||||
|
"strict": true,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"noEmit": false,
|
||||||
|
"declaration": false,
|
||||||
|
"types": ["node"]
|
||||||
|
},
|
||||||
|
"include": ["./**/*.ts", "../_shared/**/*.ts"]
|
||||||
|
}
|
||||||
1
workflows/develop-workflow/.gitignore
vendored
Normal file
1
workflows/develop-workflow/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
dist/
|
||||||
39
workflows/develop-workflow/build.ts
Normal file
39
workflows/develop-workflow/build.ts
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
import type { AgentFn, WorkflowDefinition } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
|
||||||
|
import { moderator } from "./moderator.js";
|
||||||
|
import type { WorkflowMeta } from "./moderator.js";
|
||||||
|
import { createCoderRole } from "./roles/coder.js";
|
||||||
|
import { createWorkspaceCommitterRole } from "./roles/committer.js";
|
||||||
|
import { createPlannerRole } from "./roles/planner.js";
|
||||||
|
import { createReviewerRole } from "./roles/reviewer.js";
|
||||||
|
import { createTesterRole } from "./roles/tester.js";
|
||||||
|
|
||||||
|
export type CreateDevelopWorkflowDeps = {
|
||||||
|
defaultAdapter: AgentFn;
|
||||||
|
adapters?: Partial<Record<keyof WorkflowMeta, AgentFn>>;
|
||||||
|
extract: LlmExtractorConfig;
|
||||||
|
nerveRoot: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function createDevelopWorkflowWorkflow({
|
||||||
|
defaultAdapter,
|
||||||
|
adapters,
|
||||||
|
extract,
|
||||||
|
nerveRoot,
|
||||||
|
}: CreateDevelopWorkflowDeps): WorkflowDefinition<WorkflowMeta> {
|
||||||
|
const a = (role: keyof WorkflowMeta) => adapters?.[role] ?? defaultAdapter;
|
||||||
|
const roles = {
|
||||||
|
planner: createPlannerRole(a('planner'), extract),
|
||||||
|
coder: createCoderRole(a('coder'), extract),
|
||||||
|
reviewer: createReviewerRole(a('reviewer'), extract, nerveRoot),
|
||||||
|
tester: createTesterRole(a('tester'), extract, nerveRoot),
|
||||||
|
committer: createWorkspaceCommitterRole(a('committer'), extract),
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
name: "develop-workflow",
|
||||||
|
roles,
|
||||||
|
moderator,
|
||||||
|
};
|
||||||
|
}
|
||||||
@ -1,7 +1,7 @@
|
|||||||
import { join } from "node:path";
|
import { join } from "node:path";
|
||||||
import { createCursorAdapter, cursorAdapter } from "@uncaged/nerve-adapter-cursor";
|
import { createCursorAdapter, cursorAdapter } from "@uncaged/nerve-adapter-cursor";
|
||||||
import { hermesAdapter } from "@uncaged/nerve-adapter-hermes";
|
import { hermesAdapter } from "@uncaged/nerve-adapter-hermes";
|
||||||
import { createDevelopWorkflowWorkflow } from "@uncaged/nerve-workflow-meta";
|
import { createDevelopWorkflowWorkflow } from "./build.js";
|
||||||
|
|
||||||
const HOME = process.env.HOME ?? "/home/azureuser";
|
const HOME = process.env.HOME ?? "/home/azureuser";
|
||||||
const NERVE_ROOT = join(HOME, ".uncaged-nerve");
|
const NERVE_ROOT = join(HOME, ".uncaged-nerve");
|
||||||
|
|||||||
67
workflows/develop-workflow/moderator.ts
Normal file
67
workflows/develop-workflow/moderator.ts
Normal file
@ -0,0 +1,67 @@
|
|||||||
|
import { END } from "@uncaged/nerve-core";
|
||||||
|
import type { Moderator } from "@uncaged/nerve-core";
|
||||||
|
import type { PlannerMeta } from "./roles/planner.js";
|
||||||
|
import type { CoderMeta } from "./roles/coder.js";
|
||||||
|
import type { ReviewerMeta } from "./roles/reviewer.js";
|
||||||
|
import type { TesterMeta } from "./roles/tester.js";
|
||||||
|
import type { CommitterMeta } from "./roles/committer.js";
|
||||||
|
|
||||||
|
export type WorkflowMeta = {
|
||||||
|
planner: PlannerMeta;
|
||||||
|
coder: CoderMeta;
|
||||||
|
reviewer: ReviewerMeta;
|
||||||
|
tester: TesterMeta;
|
||||||
|
committer: CommitterMeta;
|
||||||
|
};
|
||||||
|
|
||||||
|
const MAX_CODER_ROUNDS = 20;
|
||||||
|
const MAX_TOTAL_REJECTIONS = 10;
|
||||||
|
|
||||||
|
function coderRounds(steps: { role: string }[]): number {
|
||||||
|
return steps.filter((s) => s.role === "coder").length;
|
||||||
|
}
|
||||||
|
|
||||||
|
function totalRejections(steps: { role: string; meta: unknown }[]): number {
|
||||||
|
return steps.filter((s) => {
|
||||||
|
if (s.role === "reviewer") return !(s.meta as Record<string, boolean>).approved;
|
||||||
|
if (s.role === "tester") return !(s.meta as Record<string, boolean>).passed;
|
||||||
|
if (s.role === "committer") return !(s.meta as Record<string, boolean>).committed;
|
||||||
|
return false;
|
||||||
|
}).length;
|
||||||
|
}
|
||||||
|
|
||||||
|
function canRetryCoder(steps: { role: string; meta: unknown }[]): boolean {
|
||||||
|
return coderRounds(steps) < MAX_CODER_ROUNDS && totalRejections(steps) < MAX_TOTAL_REJECTIONS;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const moderator: Moderator<WorkflowMeta> = (context) => {
|
||||||
|
if (context.steps.length === 0) return "planner";
|
||||||
|
|
||||||
|
const last = context.steps[context.steps.length - 1];
|
||||||
|
|
||||||
|
if (last.role === "planner") {
|
||||||
|
return last.meta.ready ? "coder" : END;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (last.role === "coder") {
|
||||||
|
if (last.meta.done) return "reviewer";
|
||||||
|
return canRetryCoder(context.steps) ? "coder" : END;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (last.role === "reviewer") {
|
||||||
|
if (last.meta.approved) return "tester";
|
||||||
|
return canRetryCoder(context.steps) ? "coder" : END;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (last.role === "tester") {
|
||||||
|
if (last.meta.passed) return "committer";
|
||||||
|
return canRetryCoder(context.steps) ? "coder" : END;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (last.role === "committer") {
|
||||||
|
if (last.meta.committed) return END;
|
||||||
|
return canRetryCoder(context.steps) ? "coder" : END;
|
||||||
|
}
|
||||||
|
|
||||||
|
return END;
|
||||||
|
};
|
||||||
21
workflows/develop-workflow/package.json
Normal file
21
workflows/develop-workflow/package.json
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
{
|
||||||
|
"name": "generate-workflow-workflow",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"private": true,
|
||||||
|
"type": "module",
|
||||||
|
"scripts": {
|
||||||
|
"build": "esbuild index.ts --bundle --platform=node --format=esm --outdir=dist --packages=external"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@uncaged/nerve-adapter-cursor": "latest",
|
||||||
|
"@uncaged/nerve-adapter-hermes": "latest",
|
||||||
|
"@uncaged/nerve-core": "latest",
|
||||||
|
"@uncaged/nerve-workflow-utils": "latest",
|
||||||
|
"zod": "^4.3.6"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/node": "^22.0.0",
|
||||||
|
"esbuild": "^0.27.0",
|
||||||
|
"typescript": "^5.7.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
69
workflows/develop-workflow/roles/coder.ts
Normal file
69
workflows/develop-workflow/roles/coder.ts
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
import type { AgentFn, Role, StartStep } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
export const coderMetaSchema = z.object({
|
||||||
|
done: z.boolean().describe("true if the workflow files were created and build passes"),
|
||||||
|
});
|
||||||
|
export type CoderMeta = z.infer<typeof coderMetaSchema>;
|
||||||
|
|
||||||
|
export function coderPrompt({ threadId }: { threadId: string }): string {
|
||||||
|
return `Read the workflow thread to get the planner's design and any reviewer/tester/committer feedback: \`nerve thread ${threadId}\`
|
||||||
|
Read the nerve-dev skill for workflow file structure and conventions: \`cat node_modules/@uncaged/nerve-skills/nerve-dev/SKILL.md\`
|
||||||
|
Also look at existing workflows in the \`workflows/\` directory for patterns.
|
||||||
|
|
||||||
|
## Your task
|
||||||
|
|
||||||
|
Implement the planner's design. This may be **creating a new workflow** or **modifying an existing one**. If there is reviewer, tester, or committer feedback in the thread, fix the issues they identified.
|
||||||
|
|
||||||
|
**IMPORTANT:** The thread contains both the **initial user prompt** (the first message) and the **planner's design**. Read both carefully:
|
||||||
|
- The **initial prompt** contains the user's specific requirements for role behavior, tools to use, and acceptance criteria
|
||||||
|
- The **planner's design** contains the architecture, file structure, and routing logic
|
||||||
|
- When writing role prompts, follow the user's behavioral requirements from the initial prompt — do not invent your own interpretation
|
||||||
|
|
||||||
|
## Multi-step approach
|
||||||
|
|
||||||
|
You do NOT need to finish everything in one pass. You may return \`done: false\` to continue in the next iteration. For example:
|
||||||
|
1. First pass: scaffold files / make structural changes
|
||||||
|
2. Second pass: implement role logic
|
||||||
|
3. Third pass: fix build/lint errors
|
||||||
|
|
||||||
|
## Workflow file structure
|
||||||
|
|
||||||
|
Each workflow must have:
|
||||||
|
- \`workflows/<name>/index.ts\` — WorkflowDefinition default export
|
||||||
|
- \`workflows/<name>/build.ts\` — factory function
|
||||||
|
- \`workflows/<name>/moderator.ts\` — moderator + meta types
|
||||||
|
- \`workflows/<name>/roles/<role>.ts\` — meta schema and prompt function per role
|
||||||
|
- \`workflows/<name>/package.json\` — with esbuild build script
|
||||||
|
- \`workflows/<name>/tsconfig.json\` — TypeScript config
|
||||||
|
|
||||||
|
For **new workflows**, also update \`nerve.yaml\` with \`workflows.<name>\`.
|
||||||
|
|
||||||
|
## Rules
|
||||||
|
|
||||||
|
- Keep the WorkflowDefinition<WorkflowMeta> pattern
|
||||||
|
- No dynamic import()
|
||||||
|
- Use types (not interfaces)
|
||||||
|
- Meta should be simple routing signals (single boolean per role)
|
||||||
|
- Write compile-ready TypeScript
|
||||||
|
|
||||||
|
## When to return done: true
|
||||||
|
|
||||||
|
Return \`done: true\` ONLY when ALL of the following are true:
|
||||||
|
- All changes from the plan are implemented
|
||||||
|
- \`cd workflows/<name> && pnpm install --no-cache && pnpm build\` succeeds (run it!)
|
||||||
|
- No lint or type errors remain
|
||||||
|
|
||||||
|
Return \`done: false\` if you made progress but there is still work to do, or if build/lint has errors you plan to fix in the next iteration.`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createCoderRole(adapter: AgentFn, extract: LlmExtractorConfig): Role<CoderMeta> {
|
||||||
|
return createRole(
|
||||||
|
adapter,
|
||||||
|
async (start: StartStep) => coderPrompt({ threadId: start.meta.threadId }),
|
||||||
|
coderMetaSchema,
|
||||||
|
extract,
|
||||||
|
);
|
||||||
|
}
|
||||||
5
workflows/develop-workflow/roles/committer.ts
Normal file
5
workflows/develop-workflow/roles/committer.ts
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
export {
|
||||||
|
createCommitterRole as createWorkspaceCommitterRole,
|
||||||
|
committerMetaSchema,
|
||||||
|
type CommitterMeta,
|
||||||
|
} from "@uncaged/nerve-role-committer";
|
||||||
65
workflows/develop-workflow/roles/planner.ts
Normal file
65
workflows/develop-workflow/roles/planner.ts
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
import type { AgentFn, Role, StartStep } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
export const plannerMetaSchema = z.object({
|
||||||
|
ready: z.boolean().describe("true if requirements are clear and a workflow can be implemented"),
|
||||||
|
});
|
||||||
|
export type PlannerMeta = z.infer<typeof plannerMetaSchema>;
|
||||||
|
|
||||||
|
export function plannerPrompt({ threadId }: { threadId: string }): string {
|
||||||
|
return `You are a Nerve workflow planner. You can **create new workflows** or **modify existing ones**.
|
||||||
|
|
||||||
|
Read the workflow thread for the user's request: \`nerve thread ${threadId}\`
|
||||||
|
Read the nerve-dev skill for workflow conventions: \`cat node_modules/@uncaged/nerve-skills/nerve-dev/SKILL.md\`
|
||||||
|
List existing workflows: \`ls workflows/\`
|
||||||
|
|
||||||
|
## Determine the task type
|
||||||
|
|
||||||
|
1. If the user wants to **modify an existing workflow** — read its current code (\`cat workflows/<name>/moderator.ts\`, \`cat workflows/<name>/build.ts\`, \`ls workflows/<name>/roles/\`, etc.) and understand its current structure before planning changes.
|
||||||
|
2. If the user wants to **create a new workflow** — look at existing workflows in \`workflows/\` for patterns to follow.
|
||||||
|
|
||||||
|
## Produce a PLAN (not code) in markdown
|
||||||
|
|
||||||
|
For **new workflows**:
|
||||||
|
- Workflow name (kebab-case)
|
||||||
|
- Roles list (name, purpose, tool)
|
||||||
|
- Flow transitions / moderator routing logic
|
||||||
|
- Validation loops design
|
||||||
|
- External dependencies
|
||||||
|
- Data flow between roles
|
||||||
|
|
||||||
|
For **modifications to existing workflows**:
|
||||||
|
- Workflow name (existing)
|
||||||
|
- What changes are needed and why
|
||||||
|
- Files to add/modify/delete
|
||||||
|
- Impact on moderator routing logic (this workflow's typical order is planner → coder → reviewer → tester → committer)
|
||||||
|
- Backward compatibility considerations (if any)
|
||||||
|
|
||||||
|
**For every role (new or modified)**, include a **Role Behavior** section that describes:
|
||||||
|
- What the role should do, check, or produce
|
||||||
|
- What tools or commands it should use
|
||||||
|
- What criteria determine its meta output (e.g. approved/passed/done)
|
||||||
|
- Preserve the user's specific requirements verbatim — do NOT summarize away details
|
||||||
|
|
||||||
|
If requirements are NOT clear, describe what is missing or ambiguous.
|
||||||
|
|
||||||
|
End your response with a JSON block:
|
||||||
|
\`\`\`json
|
||||||
|
{ "ready": true }
|
||||||
|
\`\`\`
|
||||||
|
or
|
||||||
|
\`\`\`json
|
||||||
|
{ "ready": false }
|
||||||
|
\`\`\``;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createPlannerRole(adapter: AgentFn, extract: LlmExtractorConfig): Role<PlannerMeta> {
|
||||||
|
return createRole(
|
||||||
|
adapter,
|
||||||
|
async (start: StartStep) => plannerPrompt({ threadId: start.meta.threadId }),
|
||||||
|
plannerMetaSchema,
|
||||||
|
extract,
|
||||||
|
);
|
||||||
|
}
|
||||||
3
workflows/develop-workflow/roles/reviewer.ts
Normal file
3
workflows/develop-workflow/roles/reviewer.ts
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
import { createReviewerRole } from "@uncaged/nerve-role-reviewer";
|
||||||
|
export { createReviewerRole };
|
||||||
|
export type { ReviewerMeta } from "@uncaged/nerve-role-reviewer";
|
||||||
59
workflows/develop-workflow/roles/tester.ts
Normal file
59
workflows/develop-workflow/roles/tester.ts
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
import type { AgentFn, Role, StartStep } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
export const testerMetaSchema = z.object({
|
||||||
|
passed: z.boolean().describe("true if all validation checks passed"),
|
||||||
|
});
|
||||||
|
export type TesterMeta = z.infer<typeof testerMetaSchema>;
|
||||||
|
|
||||||
|
export function testerPrompt({ threadId, nerveRoot }: { threadId: string; nerveRoot: string }): string {
|
||||||
|
return `You are testing a Nerve workflow — either newly created or recently modified.
|
||||||
|
|
||||||
|
**IMPORTANT: The Nerve workspace is at \`${nerveRoot}\`. All paths below are relative to this directory. Always \`cd ${nerveRoot}\` first.**
|
||||||
|
|
||||||
|
Read the workflow thread for context: \`nerve thread ${threadId}\`
|
||||||
|
Read the nerve-dev skill for expected file structure: \`cat ${nerveRoot}/node_modules/@uncaged/nerve-skills/nerve-dev/SKILL.md\`
|
||||||
|
|
||||||
|
Get the workflow name from the thread (the planner's output).
|
||||||
|
|
||||||
|
Verify the full lifecycle in this order:
|
||||||
|
|
||||||
|
1. **File check** — all required workflow files exist (under \`${nerveRoot}/\`):
|
||||||
|
- \`workflows/<name>/index.ts\`
|
||||||
|
- \`workflows/<name>/build.ts\`
|
||||||
|
- \`workflows/<name>/moderator.ts\`
|
||||||
|
- \`workflows/<name>/roles/\` with one \`.ts\` file per role
|
||||||
|
- \`workflows/<name>/package.json\`
|
||||||
|
|
||||||
|
2. **Build** — run inside the workflow directory:
|
||||||
|
\`\`\`
|
||||||
|
cd ${nerveRoot}/workflows/<name> && pnpm install --no-cache && pnpm build
|
||||||
|
\`\`\`
|
||||||
|
Must produce \`dist/index.js\` without errors.
|
||||||
|
|
||||||
|
3. **Config check** — \`cd ${nerveRoot} && nerve validate\` passes, confirming nerve.yaml is valid.
|
||||||
|
|
||||||
|
4. **Workflow list** — \`nerve workflow list\` shows the workflow.
|
||||||
|
|
||||||
|
5. **Trigger test** — \`nerve workflow trigger <name> --dry-run\` if available, otherwise just confirm the workflow appears in \`nerve workflow status\`.
|
||||||
|
|
||||||
|
If any step fails, include the relevant error output.
|
||||||
|
|
||||||
|
Output a clear summary: what you checked, what passed, what failed, and why.`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createTesterRole(
|
||||||
|
adapter: AgentFn,
|
||||||
|
extract: LlmExtractorConfig,
|
||||||
|
nerveRoot: string,
|
||||||
|
): Role<TesterMeta> {
|
||||||
|
return createRole(
|
||||||
|
adapter,
|
||||||
|
async (start: StartStep) =>
|
||||||
|
testerPrompt({ threadId: start.meta.threadId, nerveRoot }),
|
||||||
|
testerMetaSchema,
|
||||||
|
extract,
|
||||||
|
);
|
||||||
|
}
|
||||||
@ -7,13 +7,7 @@
|
|||||||
"strict": true,
|
"strict": true,
|
||||||
"skipLibCheck": true,
|
"skipLibCheck": true,
|
||||||
"noEmit": true,
|
"noEmit": true,
|
||||||
"allowImportingTsExtensions": true,
|
|
||||||
"types": ["node"]
|
"types": ["node"]
|
||||||
},
|
},
|
||||||
"include": [
|
"include": ["./**/*.ts", "../_shared/**/*.ts"]
|
||||||
"senses/**/*.ts",
|
|
||||||
"workflows/**/*.ts",
|
|
||||||
"scripts/**/*.ts",
|
|
||||||
"workflows/_shared/**/*.ts"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
@ -1,33 +0,0 @@
|
|||||||
import type { AgentFn, WorkflowDefinition } from "@uncaged/nerve-core";
|
|
||||||
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { createLlmAdapter } from "@uncaged/nerve-workflow-utils";
|
|
||||||
|
|
||||||
import { moderator } from "./moderator.js";
|
|
||||||
import type { WorkflowMeta } from "./moderator.js";
|
|
||||||
import { createAnswererRole } from "./roles/answerer.js";
|
|
||||||
import { createExplorerRole } from "./roles/explorer.js";
|
|
||||||
import { createQuestionerRole } from "./roles/questioner.js";
|
|
||||||
|
|
||||||
export type CreateKnowledgeExtractionDeps = {
|
|
||||||
defaultAdapter: AgentFn;
|
|
||||||
adapters?: Partial<Record<keyof WorkflowMeta, AgentFn>>;
|
|
||||||
extract: LlmExtractorConfig;
|
|
||||||
};
|
|
||||||
|
|
||||||
export function createKnowledgeExtractionWorkflow({
|
|
||||||
defaultAdapter,
|
|
||||||
adapters,
|
|
||||||
extract,
|
|
||||||
}: CreateKnowledgeExtractionDeps): WorkflowDefinition<WorkflowMeta> {
|
|
||||||
const a = (role: keyof WorkflowMeta) => adapters?.[role] ?? defaultAdapter;
|
|
||||||
const llmAdapter = createLlmAdapter(extract.provider);
|
|
||||||
return {
|
|
||||||
name: "extract-knowledge",
|
|
||||||
roles: {
|
|
||||||
questioner: createQuestionerRole(adapters?.questioner ?? llmAdapter, { extract }),
|
|
||||||
answerer: createAnswererRole(adapters?.answerer ?? llmAdapter, { extract }),
|
|
||||||
explorer: createExplorerRole(a("explorer"), { extract }),
|
|
||||||
},
|
|
||||||
moderator,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@ -1,30 +0,0 @@
|
|||||||
import { join } from "node:path";
|
|
||||||
import { createCursorAdapter } from "@uncaged/nerve-adapter-cursor";
|
|
||||||
import { hermesAdapter } from "@uncaged/nerve-adapter-hermes";
|
|
||||||
import { createKnowledgeExtractionWorkflow } from "./build.js";
|
|
||||||
import { resolveDashScopeProvider } from "../solve-issue/lib/provider.js";
|
|
||||||
|
|
||||||
const HOME = process.env.HOME ?? "/home/azureuser";
|
|
||||||
const NERVE_ROOT = join(HOME, ".uncaged-nerve");
|
|
||||||
|
|
||||||
const provider = await resolveDashScopeProvider(NERVE_ROOT);
|
|
||||||
|
|
||||||
if (provider === null) {
|
|
||||||
throw new Error("Set DASHSCOPE_API_KEY and DASHSCOPE_BASE_URL (or cfg get equivalents)");
|
|
||||||
}
|
|
||||||
|
|
||||||
const CURSOR_TIMEOUT_MS = 300_000;
|
|
||||||
|
|
||||||
const workflow = createKnowledgeExtractionWorkflow({
|
|
||||||
defaultAdapter: hermesAdapter,
|
|
||||||
adapters: {
|
|
||||||
explorer: createCursorAdapter({
|
|
||||||
type: "cursor",
|
|
||||||
model: "claude-sonnet-4",
|
|
||||||
timeout: CURSOR_TIMEOUT_MS,
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
extract: { provider },
|
|
||||||
});
|
|
||||||
|
|
||||||
export default workflow;
|
|
||||||
@ -1,74 +0,0 @@
|
|||||||
import type { Dirent } from "node:fs";
|
|
||||||
import { readdir } from "node:fs/promises";
|
|
||||||
import { join } from "node:path";
|
|
||||||
|
|
||||||
import type { StartStep, WorkflowMessage } from "@uncaged/nerve-core";
|
|
||||||
|
|
||||||
import type { ExplorerMeta } from "../roles/explorer.js";
|
|
||||||
import type { QuestionerMeta } from "../roles/questioner.js";
|
|
||||||
|
|
||||||
async function walkMarkdownFiles(rootDir: string, base: string): Promise<string[]> {
|
|
||||||
const out: string[] = [];
|
|
||||||
let entries: Dirent[];
|
|
||||||
try {
|
|
||||||
entries = (await readdir(rootDir, { withFileTypes: true })) as Dirent[];
|
|
||||||
} catch {
|
|
||||||
return out;
|
|
||||||
}
|
|
||||||
for (const e of entries) {
|
|
||||||
const name = e.name;
|
|
||||||
const rel = base ? `${base}/${name}` : name;
|
|
||||||
const full = join(rootDir, name);
|
|
||||||
if (e.isDirectory()) {
|
|
||||||
out.push(...(await walkMarkdownFiles(full, rel)));
|
|
||||||
} else if (e.isFile() && name.endsWith(".md")) {
|
|
||||||
out.push(rel.replace(/\\/g, "/"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return out;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Enumerate all markdown files under `.knowledge/` as repo-relative paths; seed line first if present. */
|
|
||||||
export async function bootstrapKnowledgeQueue(cwd: string, startContent: string): Promise<string[]> {
|
|
||||||
const knowledgeDir = join(cwd, ".knowledge");
|
|
||||||
const relFiles = await walkMarkdownFiles(knowledgeDir, "");
|
|
||||||
const paths = relFiles.map((f) => `.knowledge/${f}`);
|
|
||||||
const seed = startContent.trim().split(/\r?\n/u)[0]?.trim() ?? "";
|
|
||||||
if (paths.length === 0 && seed.length > 0) {
|
|
||||||
return [seed];
|
|
||||||
}
|
|
||||||
if (seed.length > 0 && paths.includes(seed)) {
|
|
||||||
return [seed, ...paths.filter((p) => p !== seed)];
|
|
||||||
}
|
|
||||||
if (seed.length > 0 && !paths.includes(seed)) {
|
|
||||||
return [seed, ...paths];
|
|
||||||
}
|
|
||||||
return [...paths].sort();
|
|
||||||
}
|
|
||||||
|
|
||||||
function lastIndexOfRole(messages: WorkflowMessage[], role: string): number {
|
|
||||||
for (let i = messages.length - 1; i >= 0; i--) {
|
|
||||||
if (messages[i].role === role) return i;
|
|
||||||
}
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Next queue for questioner: bootstrap, or continue after answerer / explorer. */
|
|
||||||
export async function resolveQueueForQuestioner(
|
|
||||||
start: StartStep,
|
|
||||||
messages: WorkflowMessage[],
|
|
||||||
cwd: string,
|
|
||||||
): Promise<string[]> {
|
|
||||||
const lastQi = lastIndexOfRole(messages, "questioner");
|
|
||||||
if (lastQi === -1) {
|
|
||||||
return bootstrapKnowledgeQueue(cwd, start.content);
|
|
||||||
}
|
|
||||||
const qMeta = messages[lastQi].meta as QuestionerMeta;
|
|
||||||
const tail = messages.slice(lastQi + 1);
|
|
||||||
const explorerMsg = tail.find((m) => m.role === "explorer");
|
|
||||||
if (explorerMsg) {
|
|
||||||
const eMeta = explorerMsg.meta as ExplorerMeta;
|
|
||||||
return [...qMeta.remaining_queue, ...eMeta.new_cards];
|
|
||||||
}
|
|
||||||
return qMeta.remaining_queue;
|
|
||||||
}
|
|
||||||
@ -1,21 +0,0 @@
|
|||||||
import type { StartStep } from "@uncaged/nerve-core";
|
|
||||||
|
|
||||||
type StartMetaWithWorkdir = StartStep["meta"] & { workdir?: string | null };
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Resolve the target repo working directory.
|
|
||||||
* Priority: start.meta.workdir → prompt second line (if absolute path) → cwd.
|
|
||||||
*/
|
|
||||||
export function resolveWorkdir(start: StartStep): string {
|
|
||||||
const m = start.meta as StartMetaWithWorkdir;
|
|
||||||
if (m.workdir) return m.workdir;
|
|
||||||
|
|
||||||
// Allow prompt to carry workdir on the second line: "seed\n/abs/path"
|
|
||||||
const lines = start.content.split(/\r?\n/);
|
|
||||||
if (lines.length >= 2) {
|
|
||||||
const candidate = lines[1]!.trim();
|
|
||||||
if (candidate.startsWith("/")) return candidate;
|
|
||||||
}
|
|
||||||
|
|
||||||
return process.cwd();
|
|
||||||
}
|
|
||||||
@ -1,84 +0,0 @@
|
|||||||
import { END } from "@uncaged/nerve-core";
|
|
||||||
import type { Moderator, ThreadContext } from "@uncaged/nerve-core";
|
|
||||||
|
|
||||||
import type { AnswererMeta } from "./roles/answerer.js";
|
|
||||||
import type { ExplorerMeta } from "./roles/explorer.js";
|
|
||||||
import type { QuestionerMeta } from "./roles/questioner.js";
|
|
||||||
|
|
||||||
export type WorkflowMeta = {
|
|
||||||
questioner: QuestionerMeta;
|
|
||||||
answerer: AnswererMeta;
|
|
||||||
explorer: ExplorerMeta;
|
|
||||||
};
|
|
||||||
|
|
||||||
type Steps = ThreadContext<WorkflowMeta>["steps"];
|
|
||||||
|
|
||||||
function lastQuestionerRemaining(steps: Steps): QuestionerMeta | undefined {
|
|
||||||
for (let i = steps.length - 1; i >= 0; i--) {
|
|
||||||
const s = steps[i];
|
|
||||||
if (s.role === "questioner") return s.meta;
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** End when the last two explorer invocations both added no new cards (issue #266 stagnation rule). */
|
|
||||||
function lastTwoExplorerRunsBothEmpty(steps: Steps): boolean {
|
|
||||||
const explorerSteps = steps.filter((s) => s.role === "explorer");
|
|
||||||
if (explorerSteps.length < 2) return false;
|
|
||||||
const e1 = explorerSteps[explorerSteps.length - 1].meta as ExplorerMeta;
|
|
||||||
const e2 = explorerSteps[explorerSteps.length - 2].meta as ExplorerMeta;
|
|
||||||
return e1.new_cards.length === 0 && e2.new_cards.length === 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
function queueAfterSkippedExplorer(steps: Steps): string[] {
|
|
||||||
const q = lastQuestionerRemaining(steps);
|
|
||||||
return q?.remaining_queue ?? [];
|
|
||||||
}
|
|
||||||
|
|
||||||
function queueAfterExplorerStep(steps: Steps): string[] {
|
|
||||||
const last = steps[steps.length - 1];
|
|
||||||
if (!last || last.role !== "explorer") return [];
|
|
||||||
const q = lastQuestionerRemaining(steps);
|
|
||||||
if (!q) return [];
|
|
||||||
const e = last.meta as ExplorerMeta;
|
|
||||||
return [...q.remaining_queue, ...e.new_cards];
|
|
||||||
}
|
|
||||||
|
|
||||||
export const moderator: Moderator<WorkflowMeta> = (context) => {
|
|
||||||
const { steps } = context;
|
|
||||||
|
|
||||||
if (steps.length === 0) {
|
|
||||||
return "questioner";
|
|
||||||
}
|
|
||||||
|
|
||||||
const last = steps[steps.length - 1];
|
|
||||||
|
|
||||||
if (last.role === "questioner") {
|
|
||||||
return "answerer";
|
|
||||||
}
|
|
||||||
|
|
||||||
if (last.role === "answerer") {
|
|
||||||
const am = last.meta as AnswererMeta;
|
|
||||||
if (am.has_unanswered) {
|
|
||||||
return "explorer";
|
|
||||||
}
|
|
||||||
const q = queueAfterSkippedExplorer(steps);
|
|
||||||
if (q.length === 0) {
|
|
||||||
return END;
|
|
||||||
}
|
|
||||||
return "questioner";
|
|
||||||
}
|
|
||||||
|
|
||||||
if (last.role === "explorer") {
|
|
||||||
if (lastTwoExplorerRunsBothEmpty(steps)) {
|
|
||||||
return END;
|
|
||||||
}
|
|
||||||
const q = queueAfterExplorerStep(steps);
|
|
||||||
if (q.length === 0) {
|
|
||||||
return END;
|
|
||||||
}
|
|
||||||
return "questioner";
|
|
||||||
}
|
|
||||||
|
|
||||||
return END;
|
|
||||||
};
|
|
||||||
@ -1,102 +0,0 @@
|
|||||||
import type { AgentFn, Role, ThreadContext, WorkflowMessage } from "@uncaged/nerve-core";
|
|
||||||
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { createRole, nerveCommandEnv, spawnSafe } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
import { resolveWorkdir } from "../lib/workdir.js";
|
|
||||||
|
|
||||||
import type { QuestionerMeta } from "./questioner.js";
|
|
||||||
|
|
||||||
export const answererMetaSchema = z.object({
|
|
||||||
results: z.array(
|
|
||||||
z.object({
|
|
||||||
id: z.string(),
|
|
||||||
found: z.boolean(),
|
|
||||||
source: z.string(),
|
|
||||||
note: z.string(),
|
|
||||||
}),
|
|
||||||
),
|
|
||||||
has_unanswered: z.boolean(),
|
|
||||||
});
|
|
||||||
|
|
||||||
export type AnswererMeta = z.infer<typeof answererMetaSchema>;
|
|
||||||
|
|
||||||
export type CreateAnswererRoleDeps = {
|
|
||||||
extract: LlmExtractorConfig;
|
|
||||||
};
|
|
||||||
|
|
||||||
function lastQuestionerMeta(messages: WorkflowMessage[]): QuestionerMeta | undefined {
|
|
||||||
for (let i = messages.length - 1; i >= 0; i--) {
|
|
||||||
if (messages[i].role === "questioner") {
|
|
||||||
return messages[i].meta as QuestionerMeta;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function answererPrompt(ctx: ThreadContext): Promise<string> {
|
|
||||||
const messages = ctx.steps as unknown as WorkflowMessage[];
|
|
||||||
const cwd = resolveWorkdir(ctx.start);
|
|
||||||
const qm = lastQuestionerMeta(messages);
|
|
||||||
if (!qm || qm.questions.length === 0) {
|
|
||||||
throw new Error("answerer: prompt invoked without questioner questions — wrapped role should short-circuit");
|
|
||||||
}
|
|
||||||
|
|
||||||
const blocks: string[] = [];
|
|
||||||
for (const q of qm.questions) {
|
|
||||||
if ((ctx.start.meta as Record<string, unknown>).dryRun) {
|
|
||||||
blocks.push(`### ${q.id}\n[dryRun] skipped nerve knowledge query\n`);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
const res = await spawnSafe(
|
|
||||||
"nerve",
|
|
||||||
["knowledge", "query", q.question],
|
|
||||||
{
|
|
||||||
cwd,
|
|
||||||
env: nerveCommandEnv(),
|
|
||||||
timeoutMs: 120_000,
|
|
||||||
dryRun: false,
|
|
||||||
abortSignal: null,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
if (res.ok) {
|
|
||||||
blocks.push(`### ${q.id} (${q.domain})\nQuestion: ${q.question}\n---\n${res.value.stdout}\n`);
|
|
||||||
} else {
|
|
||||||
const err = res.error;
|
|
||||||
const detail =
|
|
||||||
err.kind === "non_zero_exit"
|
|
||||||
? `exit ${err.exitCode}\n${err.stderr}`
|
|
||||||
: err.kind === "timeout"
|
|
||||||
? `timeout\n${err.stderr}`
|
|
||||||
: err.kind === "spawn_failed"
|
|
||||||
? err.message
|
|
||||||
: "aborted";
|
|
||||||
blocks.push(`### ${q.id}\nnerve knowledge query failed: ${detail}\n`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return [
|
|
||||||
"You are the **answerer**. You MUST NOT read repository source code — only the CLI retrieval excerpts below.",
|
|
||||||
"For each question id, decide whether the knowledge base already answers it.",
|
|
||||||
"Set found=true only when the excerpt supports a confident answer; otherwise found=false.",
|
|
||||||
"Set has_unanswered=true if any question remains unanswered by the knowledge base.",
|
|
||||||
"",
|
|
||||||
...blocks,
|
|
||||||
].join("\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createAnswererRole(adapter: AgentFn, { extract }: CreateAnswererRoleDeps): Role<AnswererMeta> {
|
|
||||||
const inner = createRole(adapter, answererPrompt, answererMetaSchema, extract);
|
|
||||||
|
|
||||||
return async (ctx: ThreadContext) => {
|
|
||||||
const messages = ctx.steps as unknown as WorkflowMessage[];
|
|
||||||
const qm = lastQuestionerMeta(messages);
|
|
||||||
if (!qm || qm.questions.length === 0) {
|
|
||||||
return {
|
|
||||||
content: "answerer: no questions from questioner; skipping CLI lookup.",
|
|
||||||
meta: { results: [], has_unanswered: false },
|
|
||||||
};
|
|
||||||
}
|
|
||||||
return inner(ctx);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@ -1,93 +0,0 @@
|
|||||||
import type { AgentFn, Role, ThreadContext, WorkflowMessage } from "@uncaged/nerve-core";
|
|
||||||
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { createRole } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
import { resolveWorkdir } from "../lib/workdir.js";
|
|
||||||
|
|
||||||
import type { AnswererMeta } from "./answerer.js";
|
|
||||||
import type { QuestionerMeta } from "./questioner.js";
|
|
||||||
|
|
||||||
export const explorerMetaSchema = z.object({
|
|
||||||
patches: z.array(
|
|
||||||
z.object({
|
|
||||||
card: z.string(),
|
|
||||||
section: z.string(),
|
|
||||||
}),
|
|
||||||
),
|
|
||||||
new_cards: z.array(z.string()),
|
|
||||||
});
|
|
||||||
|
|
||||||
export type ExplorerMeta = z.infer<typeof explorerMetaSchema>;
|
|
||||||
|
|
||||||
export type CreateExplorerRoleDeps = {
|
|
||||||
extract: LlmExtractorConfig;
|
|
||||||
};
|
|
||||||
|
|
||||||
function lastMeta<M>(messages: WorkflowMessage[], role: string): M | undefined {
|
|
||||||
for (let i = messages.length - 1; i >= 0; i--) {
|
|
||||||
if (messages[i].role === role) {
|
|
||||||
return messages[i].meta as M;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function explorerPrompt(ctx: ThreadContext): string {
|
|
||||||
const messages = ctx.steps as unknown as WorkflowMessage[];
|
|
||||||
const threadId = ctx.start.meta.threadId;
|
|
||||||
const qm = lastMeta<QuestionerMeta>(messages, "questioner");
|
|
||||||
const am = lastMeta<AnswererMeta>(messages, "answerer");
|
|
||||||
const cwd = resolveWorkdir(ctx.start);
|
|
||||||
|
|
||||||
const unanswered =
|
|
||||||
am?.results.filter((r) => !r.found).map((r) => r.id) ?? [];
|
|
||||||
|
|
||||||
return `You are the **explorer** in an extract-knowledge workflow.
|
|
||||||
|
|
||||||
## Context
|
|
||||||
|
|
||||||
- Thread: \`nerve thread ${threadId}\`
|
|
||||||
- Working directory (repo root for paths): ${cwd}
|
|
||||||
- Current knowledge card (questioner): ${qm?.card ?? "(unknown)"}
|
|
||||||
|
|
||||||
## Unanswered question ids
|
|
||||||
|
|
||||||
${JSON.stringify(unanswered)}
|
|
||||||
|
|
||||||
Use the prior answerer results in the thread to map ids to full question text when you read messages above.
|
|
||||||
|
|
||||||
## Task
|
|
||||||
|
|
||||||
For each unanswered question, **read the codebase** as needed, then either:
|
|
||||||
|
|
||||||
- Add a new markdown file under \`.knowledge/\`, or
|
|
||||||
- Patch an existing card (prefer updating the card listed above when appropriate).
|
|
||||||
|
|
||||||
After any write or patch to \`.knowledge\`, run:
|
|
||||||
|
|
||||||
\`\`\`bash
|
|
||||||
nerve knowledge sync
|
|
||||||
\`\`\`
|
|
||||||
|
|
||||||
from this repo root (${cwd}), and fix failures until sync succeeds.
|
|
||||||
|
|
||||||
## Output meta
|
|
||||||
|
|
||||||
Report \`patches\` as { card, section } entries for cards you edited (section is a short heading or path hint).
|
|
||||||
Report \`new_cards\` as repo-relative paths for brand-new files you created (e.g. \`.knowledge/new-topic.md\`).
|
|
||||||
|
|
||||||
Do not claim work you did not perform.`;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createExplorerRole(
|
|
||||||
adapter: AgentFn,
|
|
||||||
{ extract }: CreateExplorerRoleDeps,
|
|
||||||
): Role<ExplorerMeta> {
|
|
||||||
return createRole(
|
|
||||||
adapter,
|
|
||||||
async (ctx: ThreadContext) => explorerPrompt(ctx),
|
|
||||||
explorerMetaSchema,
|
|
||||||
extract,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@ -1,108 +0,0 @@
|
|||||||
import { readFile } from "node:fs/promises";
|
|
||||||
import { join } from "node:path";
|
|
||||||
|
|
||||||
import type { AgentFn, Role, ThreadContext, WorkflowMessage } from "@uncaged/nerve-core";
|
|
||||||
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { createRole } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
import { resolveQueueForQuestioner } from "../lib/knowledge-queue.js";
|
|
||||||
import { resolveWorkdir } from "../lib/workdir.js";
|
|
||||||
|
|
||||||
const questionerExtractSchema = z.object({
|
|
||||||
questions: z
|
|
||||||
.array(
|
|
||||||
z.object({
|
|
||||||
id: z.string(),
|
|
||||||
question: z.string(),
|
|
||||||
domain: z.string(),
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
.length(5),
|
|
||||||
});
|
|
||||||
|
|
||||||
export type QuestionerMeta = {
|
|
||||||
/** Empty when no .knowledge cards and no work to do. */
|
|
||||||
card: string;
|
|
||||||
questions: { id: string; question: string; domain: string }[];
|
|
||||||
remaining_queue: string[];
|
|
||||||
};
|
|
||||||
|
|
||||||
export type CreateQuestionerRoleDeps = {
|
|
||||||
extract: LlmExtractorConfig;
|
|
||||||
};
|
|
||||||
|
|
||||||
function questionerSystem(): string {
|
|
||||||
return `You are the **questioner** in an extract-knowledge workflow.
|
|
||||||
|
|
||||||
Read the given markdown knowledge card. Propose exactly **five** technical questions that are **not** already answered or covered by that card.
|
|
||||||
|
|
||||||
Rules:
|
|
||||||
- Questions must be concrete and technical.
|
|
||||||
- Each question needs a stable string id (e.g. q1, q2, q3, q4, q5), a short domain label (e.g. routing, storage), and the question text.
|
|
||||||
- Do not assume access to other files or tools — reason only from the card content shown.`;
|
|
||||||
}
|
|
||||||
|
|
||||||
function questionerUser(card: string, cardBody: string, remainingHint: string[]): string {
|
|
||||||
return `Current card path: ${card}
|
|
||||||
|
|
||||||
Remaining queue after this card (paths, may be empty): ${JSON.stringify(remainingHint)}
|
|
||||||
|
|
||||||
--- Card content ---
|
|
||||||
|
|
||||||
${cardBody}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function questionerPrompt(ctx: ThreadContext): Promise<string> {
|
|
||||||
const messages = ctx.steps as unknown as WorkflowMessage[];
|
|
||||||
const cwd = resolveWorkdir(ctx.start);
|
|
||||||
const queue = await resolveQueueForQuestioner(ctx.start, messages, cwd);
|
|
||||||
if (queue.length === 0) {
|
|
||||||
throw new Error(
|
|
||||||
"questioner: prompt invoked with empty queue — wrapped role should short-circuit before LLM",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
const card = queue[0]!;
|
|
||||||
const remaining_queue = queue.slice(1);
|
|
||||||
let cardBody: string;
|
|
||||||
try {
|
|
||||||
cardBody = await readFile(join(cwd, card), "utf8");
|
|
||||||
} catch (e) {
|
|
||||||
const msg = e instanceof Error ? e.message : String(e);
|
|
||||||
throw new Error(`questioner: failed to read ${card}: ${msg}`);
|
|
||||||
}
|
|
||||||
return `${questionerSystem()}\n\n${questionerUser(card, cardBody, remaining_queue)}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createQuestionerRole(adapter: AgentFn, { extract }: CreateQuestionerRoleDeps): Role<QuestionerMeta> {
|
|
||||||
const inner = createRole(adapter, questionerPrompt, questionerExtractSchema, extract);
|
|
||||||
|
|
||||||
return async (ctx: ThreadContext) => {
|
|
||||||
const messages = ctx.steps as unknown as WorkflowMessage[];
|
|
||||||
const cwd = resolveWorkdir(ctx.start);
|
|
||||||
const queue = await resolveQueueForQuestioner(ctx.start, messages, cwd);
|
|
||||||
if (queue.length === 0) {
|
|
||||||
return {
|
|
||||||
content:
|
|
||||||
"questioner: no `.knowledge` markdown files found and no seed path in the trigger prompt; queue is empty.",
|
|
||||||
meta: {
|
|
||||||
card: "",
|
|
||||||
questions: [],
|
|
||||||
remaining_queue: [],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const card = queue[0]!;
|
|
||||||
const remaining_queue = queue.slice(1);
|
|
||||||
const r = await inner(ctx);
|
|
||||||
return {
|
|
||||||
content: r.content,
|
|
||||||
meta: {
|
|
||||||
card,
|
|
||||||
questions: r.meta.questions,
|
|
||||||
remaining_queue,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
||||||
1
workflows/solve-issue/.gitignore
vendored
Normal file
1
workflows/solve-issue/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
dist/
|
||||||
@ -3,14 +3,14 @@ import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
|||||||
|
|
||||||
import { moderator } from "./moderator.js";
|
import { moderator } from "./moderator.js";
|
||||||
import type { WorkflowMeta } from "./moderator.js";
|
import type { WorkflowMeta } from "./moderator.js";
|
||||||
import { createCommitterRole } from "./roles/committer.js";
|
import { createCommitterRole } from "./roles/committer/index.js";
|
||||||
import { createImplementRole } from "./roles/implement.js";
|
import { createImplementRole } from "./roles/implement/index.js";
|
||||||
import { createPlanRole } from "./roles/plan.js";
|
import { createPlanRole } from "./roles/plan/index.js";
|
||||||
import { createPrepareRole } from "./roles/prepare.js";
|
import { createPrepareRole } from "./roles/prepare/index.js";
|
||||||
import { createPublishRole } from "./roles/publish.js";
|
import { createPublishRole } from "./roles/publish/index.js";
|
||||||
import { createReadIssueRole } from "./roles/read-issue.js";
|
import { createReadIssueRole } from "./roles/read-issue/index.js";
|
||||||
import { createReviewRole } from "./roles/review.js";
|
import { createReviewRole } from "./roles/review/index.js";
|
||||||
import { createTestRole } from "./roles/test.js";
|
import { createTestRole } from "./roles/test/index.js";
|
||||||
|
|
||||||
export type CreateSolveIssueDeps = {
|
export type CreateSolveIssueDeps = {
|
||||||
defaultAdapter: AgentFn;
|
defaultAdapter: AgentFn;
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import { join } from "node:path";
|
import { join } from "node:path";
|
||||||
import type { RoleStep, WorkflowMessage } from "@uncaged/nerve-core";
|
import type { WorkflowMessage } from "@uncaged/nerve-core";
|
||||||
|
|
||||||
type SolveIssueParse = {
|
type SolveIssueParse = {
|
||||||
host: string;
|
host: string;
|
||||||
|
|||||||
@ -1,13 +1,13 @@
|
|||||||
import { END } from "@uncaged/nerve-core";
|
import { END } from "@uncaged/nerve-core";
|
||||||
import type { Moderator } from "@uncaged/nerve-core";
|
import type { Moderator } from "@uncaged/nerve-core";
|
||||||
import type { ReadIssueMeta } from "./roles/read-issue.js";
|
import type { ReadIssueMeta } from "./roles/read-issue/index.js";
|
||||||
import type { PrepareMeta } from "./roles/prepare.js";
|
import type { PrepareMeta } from "./roles/prepare/index.js";
|
||||||
import type { PlanMeta } from "./roles/plan.js";
|
import type { PlanMeta } from "./roles/plan/index.js";
|
||||||
import type { ImplementMeta } from "./roles/implement.js";
|
import type { ImplementMeta } from "./roles/implement/index.js";
|
||||||
import type { CommitterMeta } from "./roles/committer.js";
|
import type { CommitterMeta } from "./roles/committer/index.js";
|
||||||
import type { ReviewMeta } from "./roles/review.js";
|
import type { ReviewMeta } from "./roles/review/index.js";
|
||||||
import type { TestMeta } from "./roles/test.js";
|
import type { TestMeta } from "./roles/test/index.js";
|
||||||
import type { PublishMeta } from "./roles/publish.js";
|
import type { PublishMeta } from "./roles/publish/index.js";
|
||||||
|
|
||||||
export type WorkflowMeta = {
|
export type WorkflowMeta = {
|
||||||
"read-issue": ReadIssueMeta;
|
"read-issue": ReadIssueMeta;
|
||||||
|
|||||||
21
workflows/solve-issue/package.json
Normal file
21
workflows/solve-issue/package.json
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
{
|
||||||
|
"name": "solve-issue-workflow",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"private": true,
|
||||||
|
"type": "module",
|
||||||
|
"scripts": {
|
||||||
|
"build": "esbuild index.ts --bundle --platform=node --format=esm --outdir=dist --packages=external"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@uncaged/nerve-adapter-cursor": "latest",
|
||||||
|
"@uncaged/nerve-adapter-hermes": "latest",
|
||||||
|
"@uncaged/nerve-core": "latest",
|
||||||
|
"@uncaged/nerve-workflow-utils": "latest",
|
||||||
|
"zod": "^4.3.6"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/node": "^22.0.0",
|
||||||
|
"esbuild": "^0.27.0",
|
||||||
|
"typescript": "^5.7.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
30
workflows/solve-issue/roles/committer/index.ts
Normal file
30
workflows/solve-issue/roles/committer/index.ts
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
import type { AgentFn, Role, StartStep } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createRole, decorateRole, withDryRun, onFail } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { committerPrompt } from "./prompt.js";
|
||||||
|
|
||||||
|
export const committerMetaSchema = z.object({
|
||||||
|
committed: z
|
||||||
|
.boolean()
|
||||||
|
.describe("true if branch created, changes committed, and pushed successfully"),
|
||||||
|
});
|
||||||
|
export type CommitterMeta = z.infer<typeof committerMetaSchema>;
|
||||||
|
|
||||||
|
export function createCommitterRole(
|
||||||
|
adapter: AgentFn,
|
||||||
|
extract: LlmExtractorConfig,
|
||||||
|
): Role<CommitterMeta> {
|
||||||
|
const inner = createRole(
|
||||||
|
adapter,
|
||||||
|
async (start: StartStep) => committerPrompt({ threadId: start.meta.threadId }),
|
||||||
|
committerMetaSchema,
|
||||||
|
extract,
|
||||||
|
);
|
||||||
|
|
||||||
|
return decorateRole(inner, [
|
||||||
|
withDryRun({ label: "committer", meta: { committed: true } as CommitterMeta }),
|
||||||
|
onFail({ label: "committer", meta: { committed: false } as CommitterMeta }),
|
||||||
|
]) as Role<CommitterMeta>;
|
||||||
|
}
|
||||||
@ -1,9 +1,4 @@
|
|||||||
import type { AgentFn, Role, ThreadContext } from "@uncaged/nerve-core";
|
export function committerPrompt({ threadId }: { threadId: string }): string {
|
||||||
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { createRole, decorateRole, withDryRun, onFail } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
function committerPrompt({ threadId }: { threadId: string }): string {
|
|
||||||
return `You are the committer agent. The **implement** step finished with a passing build; your job is to branch, commit, and push.
|
return `You are the committer agent. The **implement** step finished with a passing build; your job is to branch, commit, and push.
|
||||||
|
|
||||||
1. Read the workflow thread: \`nerve thread show ${threadId}\` — understand what was planned, implemented, and reviewed.
|
1. Read the workflow thread: \`nerve thread show ${threadId}\` — understand what was planned, implemented, and reviewed.
|
||||||
@ -31,27 +26,3 @@ or
|
|||||||
{ "committed": false }
|
{ "committed": false }
|
||||||
\`\`\``;
|
\`\`\``;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const committerMetaSchema = z.object({
|
|
||||||
committed: z
|
|
||||||
.boolean()
|
|
||||||
.describe("true if branch created, changes committed, and pushed successfully"),
|
|
||||||
});
|
|
||||||
export type CommitterMeta = z.infer<typeof committerMetaSchema>;
|
|
||||||
|
|
||||||
export function createCommitterRole(
|
|
||||||
adapter: AgentFn,
|
|
||||||
extract: LlmExtractorConfig,
|
|
||||||
): Role<CommitterMeta> {
|
|
||||||
const inner = createRole(
|
|
||||||
adapter,
|
|
||||||
async (ctx: ThreadContext) => committerPrompt({ threadId: ctx.start.meta.threadId }),
|
|
||||||
committerMetaSchema,
|
|
||||||
extract,
|
|
||||||
);
|
|
||||||
|
|
||||||
return decorateRole(inner, [
|
|
||||||
withDryRun({ label: "committer", meta: { committed: true } as CommitterMeta }),
|
|
||||||
onFail({ label: "committer", meta: { committed: false } as CommitterMeta }),
|
|
||||||
]) as Role<CommitterMeta>;
|
|
||||||
}
|
|
||||||
@ -1,86 +0,0 @@
|
|||||||
import type { AgentFn, Role, RoleResult, ThreadContext, WorkflowMessage } from "@uncaged/nerve-core";
|
|
||||||
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { createRole } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
import { resolveRepoCwd } from "../lib/repo-context.js";
|
|
||||||
|
|
||||||
function buildImplementPrompt({ threadId, nerveRoot }: { threadId: string; nerveRoot: string }): string {
|
|
||||||
return `You are the **implement** agent. You apply code changes for the issue.
|
|
||||||
|
|
||||||
Read workflow context (plan, reviewer/test feedback): \`nerve thread show ${threadId}\`
|
|
||||||
|
|
||||||
Read Nerve workspace conventions: \`cat ${nerveRoot}/CONVENTIONS.md\`
|
|
||||||
|
|
||||||
Your cwd is the target repository.
|
|
||||||
|
|
||||||
## Requirements
|
|
||||||
|
|
||||||
1. Implement the planned changes; address reviewer/tester feedback from the thread if any.
|
|
||||||
2. Run the project **build** (\`pnpm build\`, \`npm run build\`, etc.) and fix issues until build passes.
|
|
||||||
3. Multi-step: if you cannot finish this round, explain why and set **done** to false.
|
|
||||||
|
|
||||||
Do **not** run \`git checkout -b\`, \`git add\`, \`git commit\`, or \`git push\`. **Never** create commits on any branch — branching and commits are handled by the **committer** step after you finish.
|
|
||||||
|
|
||||||
Then close with JSON:
|
|
||||||
\`\`\`json
|
|
||||||
{ "done": true }
|
|
||||||
\`\`\`
|
|
||||||
or \`{ "done": false }\` matching whether implementation is complete.
|
|
||||||
|
|
||||||
**done=true** only when changes are complete **and** build passes in this round.`;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const implementMetaSchema = z.object({
|
|
||||||
done: z.boolean().describe("true when changes are complete and build passes this round"),
|
|
||||||
});
|
|
||||||
export type ImplementMeta = z.infer<typeof implementMetaSchema>;
|
|
||||||
|
|
||||||
export type CreateImplementRoleDeps = {
|
|
||||||
extract: LlmExtractorConfig;
|
|
||||||
nerveRoot: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export function createImplementRole(
|
|
||||||
adapter: AgentFn,
|
|
||||||
{ extract, nerveRoot }: CreateImplementRoleDeps,
|
|
||||||
): Role<ImplementMeta> {
|
|
||||||
return async (ctx: ThreadContext): Promise<RoleResult<ImplementMeta>> => {
|
|
||||||
const messages = ctx.steps as unknown as WorkflowMessage[];
|
|
||||||
const cwd = resolveRepoCwd(messages);
|
|
||||||
if (cwd === null) {
|
|
||||||
return {
|
|
||||||
content: "implement cannot run: missing repo path in thread markers",
|
|
||||||
meta: { done: false },
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const innerRole = createRole(
|
|
||||||
adapter,
|
|
||||||
async (innerCtx: ThreadContext) =>
|
|
||||||
buildImplementPrompt({
|
|
||||||
threadId: innerCtx.start.meta.threadId,
|
|
||||||
nerveRoot,
|
|
||||||
}),
|
|
||||||
implementMetaSchema,
|
|
||||||
extract,
|
|
||||||
);
|
|
||||||
|
|
||||||
const innerCtx: ThreadContext = {
|
|
||||||
...ctx,
|
|
||||||
start: {
|
|
||||||
...ctx.start,
|
|
||||||
meta: { ...ctx.start.meta, workdir: cwd },
|
|
||||||
},
|
|
||||||
};
|
|
||||||
try {
|
|
||||||
return await innerRole(innerCtx);
|
|
||||||
} catch (e) {
|
|
||||||
const msg = e instanceof Error ? e.message : String(e);
|
|
||||||
return {
|
|
||||||
content: `implement failed: ${msg}`,
|
|
||||||
meta: { done: false },
|
|
||||||
};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
58
workflows/solve-issue/roles/implement/index.ts
Normal file
58
workflows/solve-issue/roles/implement/index.ts
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
import type { AgentFn, Role, RoleResult, StartStep, WorkflowMessage } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { resolveRepoCwd } from "../../lib/repo-context.js";
|
||||||
|
import { buildImplementPrompt } from "./prompt.js";
|
||||||
|
|
||||||
|
export const implementMetaSchema = z.object({
|
||||||
|
done: z.boolean().describe("true when changes are complete and build passes this round"),
|
||||||
|
});
|
||||||
|
export type ImplementMeta = z.infer<typeof implementMetaSchema>;
|
||||||
|
|
||||||
|
export type CreateImplementRoleDeps = {
|
||||||
|
extract: LlmExtractorConfig;
|
||||||
|
nerveRoot: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function createImplementRole(
|
||||||
|
adapter: AgentFn,
|
||||||
|
{ extract, nerveRoot }: CreateImplementRoleDeps,
|
||||||
|
): Role<ImplementMeta> {
|
||||||
|
return async (start: StartStep, messages: WorkflowMessage[]): Promise<RoleResult<ImplementMeta>> => {
|
||||||
|
const cwd = resolveRepoCwd(messages);
|
||||||
|
if (cwd === null) {
|
||||||
|
return {
|
||||||
|
content: "implement cannot run: missing repo path in thread markers",
|
||||||
|
meta: { done: false },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const innerRole = createRole(
|
||||||
|
adapter,
|
||||||
|
async (innerStart: StartStep) =>
|
||||||
|
buildImplementPrompt({
|
||||||
|
threadId: innerStart.meta.threadId,
|
||||||
|
nerveRoot,
|
||||||
|
}),
|
||||||
|
implementMetaSchema,
|
||||||
|
extract,
|
||||||
|
);
|
||||||
|
|
||||||
|
const innerStart = {
|
||||||
|
...start,
|
||||||
|
meta: { ...start.meta, workdir: cwd },
|
||||||
|
} as StartStep;
|
||||||
|
|
||||||
|
try {
|
||||||
|
return await innerRole(innerStart, messages);
|
||||||
|
} catch (e) {
|
||||||
|
const msg = e instanceof Error ? e.message : String(e);
|
||||||
|
return {
|
||||||
|
content: `implement failed: ${msg}`,
|
||||||
|
meta: { done: false },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
25
workflows/solve-issue/roles/implement/prompt.ts
Normal file
25
workflows/solve-issue/roles/implement/prompt.ts
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
export function buildImplementPrompt({ threadId, nerveRoot }: { threadId: string; nerveRoot: string }): string {
|
||||||
|
return `You are the **implement** agent. You apply code changes for the issue.
|
||||||
|
|
||||||
|
Read workflow context (plan, reviewer/test feedback): \`nerve thread show ${threadId}\`
|
||||||
|
|
||||||
|
Read Nerve workspace conventions: \`cat ${nerveRoot}/CONVENTIONS.md\`
|
||||||
|
|
||||||
|
Your cwd is the target repository.
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
1. Implement the planned changes; address reviewer/tester feedback from the thread if any.
|
||||||
|
2. Run the project **build** (\`pnpm build\`, \`npm run build\`, etc.) and fix issues until build passes.
|
||||||
|
3. Multi-step: if you cannot finish this round, explain why and set **done** to false.
|
||||||
|
|
||||||
|
Do **not** run \`git checkout -b\`, \`git add\`, \`git commit\`, or \`git push\`. **Never** create commits on any branch — branching and commits are handled by the **committer** step after you finish.
|
||||||
|
|
||||||
|
Then close with JSON:
|
||||||
|
\`\`\`json
|
||||||
|
{ "done": true }
|
||||||
|
\`\`\`
|
||||||
|
or \`{ "done": false }\` matching whether implementation is complete.
|
||||||
|
|
||||||
|
**done=true** only when changes are complete **and** build passes in this round.`;
|
||||||
|
}
|
||||||
@ -1,88 +0,0 @@
|
|||||||
import type { AgentFn, Role, RoleResult, ThreadContext, WorkflowMessage } from "@uncaged/nerve-core";
|
|
||||||
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { createRole } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
import { resolveRepoCwd } from "../lib/repo-context.js";
|
|
||||||
|
|
||||||
function buildPlanPrompt({ threadId, nerveRoot }: { threadId: string; nerveRoot: string }): string {
|
|
||||||
return `You are the **plan** agent (analysis only — ask mode). You produce an implementation plan for fixing the issue.
|
|
||||||
|
|
||||||
Read workflow context: \`nerve thread show ${threadId}\`
|
|
||||||
|
|
||||||
Read Nerve workspace conventions (coding rules for agents): \`cat ${nerveRoot}/CONVENTIONS.md\`
|
|
||||||
|
|
||||||
In the **target repository** (your cwd), skim relevant files and read \`CONVENTIONS.md\` **if it exists** there.
|
|
||||||
|
|
||||||
## Output
|
|
||||||
|
|
||||||
Write an implementation plan in **markdown** with:
|
|
||||||
|
|
||||||
1. Problem understanding
|
|
||||||
2. Change strategy
|
|
||||||
3. Target files (paths)
|
|
||||||
4. **Test commands** to run (explicit shell commands, e.g. \`pnpm test\`, \`pnpm vitest run\`)
|
|
||||||
5. Risks
|
|
||||||
|
|
||||||
End your reply with a JSON code block (meta signal):
|
|
||||||
\`\`\`json
|
|
||||||
{ "ready": true }
|
|
||||||
\`\`\`
|
|
||||||
Use \`{ "ready": false }\` if the plan cannot be made actionable.
|
|
||||||
|
|
||||||
**ready=true** only when the plan is clear and actionable.`;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const planMetaSchema = z.object({
|
|
||||||
ready: z.boolean().describe("true if plan is clear and actionable"),
|
|
||||||
});
|
|
||||||
export type PlanMeta = z.infer<typeof planMetaSchema>;
|
|
||||||
|
|
||||||
export type CreatePlanRoleDeps = {
|
|
||||||
extract: LlmExtractorConfig;
|
|
||||||
nerveRoot: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export function createPlanRole(
|
|
||||||
adapter: AgentFn,
|
|
||||||
{ extract, nerveRoot }: CreatePlanRoleDeps,
|
|
||||||
): Role<PlanMeta> {
|
|
||||||
return async (ctx: ThreadContext): Promise<RoleResult<PlanMeta>> => {
|
|
||||||
const messages = ctx.steps as unknown as WorkflowMessage[];
|
|
||||||
const cwd = resolveRepoCwd(messages);
|
|
||||||
if (cwd === null) {
|
|
||||||
return {
|
|
||||||
content: "plan cannot run: missing ---SOLVE_ISSUE_REPO--- or ---SOLVE_ISSUE_PARSE--- in thread",
|
|
||||||
meta: { ready: false },
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const innerRole = createRole(
|
|
||||||
adapter,
|
|
||||||
async (innerCtx: ThreadContext) =>
|
|
||||||
buildPlanPrompt({
|
|
||||||
threadId: innerCtx.start.meta.threadId,
|
|
||||||
nerveRoot,
|
|
||||||
}),
|
|
||||||
planMetaSchema,
|
|
||||||
extract,
|
|
||||||
);
|
|
||||||
|
|
||||||
const innerCtx: ThreadContext = {
|
|
||||||
...ctx,
|
|
||||||
start: {
|
|
||||||
...ctx.start,
|
|
||||||
meta: { ...ctx.start.meta, workdir: cwd },
|
|
||||||
},
|
|
||||||
};
|
|
||||||
try {
|
|
||||||
return await innerRole(innerCtx);
|
|
||||||
} catch (e) {
|
|
||||||
const msg = e instanceof Error ? e.message : String(e);
|
|
||||||
return {
|
|
||||||
content: `plan failed: ${msg}`,
|
|
||||||
meta: { ready: false },
|
|
||||||
};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
58
workflows/solve-issue/roles/plan/index.ts
Normal file
58
workflows/solve-issue/roles/plan/index.ts
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
import type { AgentFn, Role, RoleResult, StartStep, WorkflowMessage } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { resolveRepoCwd } from "../../lib/repo-context.js";
|
||||||
|
import { buildPlanPrompt } from "./prompt.js";
|
||||||
|
|
||||||
|
export const planMetaSchema = z.object({
|
||||||
|
ready: z.boolean().describe("true if plan is clear and actionable"),
|
||||||
|
});
|
||||||
|
export type PlanMeta = z.infer<typeof planMetaSchema>;
|
||||||
|
|
||||||
|
export type CreatePlanRoleDeps = {
|
||||||
|
extract: LlmExtractorConfig;
|
||||||
|
nerveRoot: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function createPlanRole(
|
||||||
|
adapter: AgentFn,
|
||||||
|
{ extract, nerveRoot }: CreatePlanRoleDeps,
|
||||||
|
): Role<PlanMeta> {
|
||||||
|
return async (start: StartStep, messages: WorkflowMessage[]): Promise<RoleResult<PlanMeta>> => {
|
||||||
|
const cwd = resolveRepoCwd(messages);
|
||||||
|
if (cwd === null) {
|
||||||
|
return {
|
||||||
|
content: "plan cannot run: missing ---SOLVE_ISSUE_REPO--- or ---SOLVE_ISSUE_PARSE--- in thread",
|
||||||
|
meta: { ready: false },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const innerRole = createRole(
|
||||||
|
adapter,
|
||||||
|
async (innerStart: StartStep) =>
|
||||||
|
buildPlanPrompt({
|
||||||
|
threadId: innerStart.meta.threadId,
|
||||||
|
nerveRoot,
|
||||||
|
}),
|
||||||
|
planMetaSchema,
|
||||||
|
extract,
|
||||||
|
);
|
||||||
|
|
||||||
|
const innerStart = {
|
||||||
|
...start,
|
||||||
|
meta: { ...start.meta, workdir: cwd },
|
||||||
|
} as StartStep;
|
||||||
|
|
||||||
|
try {
|
||||||
|
return await innerRole(innerStart, messages);
|
||||||
|
} catch (e) {
|
||||||
|
const msg = e instanceof Error ? e.message : String(e);
|
||||||
|
return {
|
||||||
|
content: `plan failed: ${msg}`,
|
||||||
|
meta: { ready: false },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
27
workflows/solve-issue/roles/plan/prompt.ts
Normal file
27
workflows/solve-issue/roles/plan/prompt.ts
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
export function buildPlanPrompt({ threadId, nerveRoot }: { threadId: string; nerveRoot: string }): string {
|
||||||
|
return `You are the **plan** agent (analysis only — ask mode). You produce an implementation plan for fixing the issue.
|
||||||
|
|
||||||
|
Read workflow context: \`nerve thread show ${threadId}\`
|
||||||
|
|
||||||
|
Read Nerve workspace conventions (coding rules for agents): \`cat ${nerveRoot}/CONVENTIONS.md\`
|
||||||
|
|
||||||
|
In the **target repository** (your cwd), skim relevant files and read \`CONVENTIONS.md\` **if it exists** there.
|
||||||
|
|
||||||
|
## Output
|
||||||
|
|
||||||
|
Write an implementation plan in **markdown** with:
|
||||||
|
|
||||||
|
1. Problem understanding
|
||||||
|
2. Change strategy
|
||||||
|
3. Target files (paths)
|
||||||
|
4. **Test commands** to run (explicit shell commands, e.g. \`pnpm test\`, \`pnpm vitest run\`)
|
||||||
|
5. Risks
|
||||||
|
|
||||||
|
End your reply with a JSON code block (meta signal):
|
||||||
|
\`\`\`json
|
||||||
|
{ "ready": true }
|
||||||
|
\`\`\`
|
||||||
|
Use \`{ "ready": false }\` if the plan cannot be made actionable.
|
||||||
|
|
||||||
|
**ready=true** only when the plan is clear and actionable.`;
|
||||||
|
}
|
||||||
20
workflows/solve-issue/roles/prepare/index.ts
Normal file
20
workflows/solve-issue/roles/prepare/index.ts
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
import type { AgentFn, Role, StartStep } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { preparePrompt } from "./prompt.js";
|
||||||
|
|
||||||
|
export const prepareMetaSchema = z.object({
|
||||||
|
ready: z.boolean().describe("true if repo is ready and baseline build ok"),
|
||||||
|
});
|
||||||
|
export type PrepareMeta = z.infer<typeof prepareMetaSchema>;
|
||||||
|
|
||||||
|
export function createPrepareRole(adapter: AgentFn, extract: LlmExtractorConfig): Role<PrepareMeta> {
|
||||||
|
return createRole(
|
||||||
|
adapter,
|
||||||
|
async (start: StartStep) => preparePrompt({ threadId: start.meta.threadId }),
|
||||||
|
prepareMetaSchema,
|
||||||
|
extract,
|
||||||
|
);
|
||||||
|
}
|
||||||
@ -1,9 +1,4 @@
|
|||||||
import type { AgentFn, Role, ThreadContext } from "@uncaged/nerve-core";
|
export function preparePrompt({ threadId }: { threadId: string }): string {
|
||||||
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { createRole } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
function preparePrompt({ threadId }: { threadId: string }): string {
|
|
||||||
return `You are the **prepare** agent. You ensure the target repository is ready for work.
|
return `You are the **prepare** agent. You ensure the target repository is ready for work.
|
||||||
|
|
||||||
Read prior messages / thread for issue markers: \`nerve thread show ${threadId}\`
|
Read prior messages / thread for issue markers: \`nerve thread show ${threadId}\`
|
||||||
@ -57,17 +52,3 @@ or \`{ "ready": false }\` if the repo is invalid, or install/build baseline fail
|
|||||||
|
|
||||||
**ready=true** only when the repo exists at \`path\`, is clean, dependencies installed, and baseline build succeeded (or no build script).`;
|
**ready=true** only when the repo exists at \`path\`, is clean, dependencies installed, and baseline build succeeded (or no build script).`;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const prepareMetaSchema = z.object({
|
|
||||||
ready: z.boolean().describe("true if repo is ready and baseline build ok"),
|
|
||||||
});
|
|
||||||
export type PrepareMeta = z.infer<typeof prepareMetaSchema>;
|
|
||||||
|
|
||||||
export function createPrepareRole(adapter: AgentFn, extract: LlmExtractorConfig): Role<PrepareMeta> {
|
|
||||||
return createRole(
|
|
||||||
adapter,
|
|
||||||
async (ctx: ThreadContext) => preparePrompt({ threadId: ctx.start.meta.threadId }),
|
|
||||||
prepareMetaSchema,
|
|
||||||
extract,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
66
workflows/solve-issue/roles/publish/index.ts
Normal file
66
workflows/solve-issue/roles/publish/index.ts
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
import { mkdirSync, writeFileSync } from "node:fs";
|
||||||
|
import { join } from "node:path";
|
||||||
|
import type { AgentFn, Role, RoleResult, StartStep, WorkflowMessage } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createRole, isDryRun } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { buildPublishPrompt } from "./prompt.js";
|
||||||
|
|
||||||
|
export const publishMetaSchema = z.object({
|
||||||
|
success: z.boolean().describe("true if git push and tea pr create both succeeded"),
|
||||||
|
});
|
||||||
|
export type PublishMeta = z.infer<typeof publishMetaSchema>;
|
||||||
|
|
||||||
|
export type CreatePublishRoleDeps = {
|
||||||
|
extract: LlmExtractorConfig;
|
||||||
|
nerveRoot: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
function logPath(nerveRoot: string): string {
|
||||||
|
return join(nerveRoot, "logs", `solve-issue-publish-${Date.now()}.log`);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createPublishRole(
|
||||||
|
adapter: AgentFn,
|
||||||
|
{ extract, nerveRoot }: CreatePublishRoleDeps,
|
||||||
|
): Role<PublishMeta> {
|
||||||
|
const innerRole = createRole(
|
||||||
|
adapter,
|
||||||
|
async (start: StartStep) =>
|
||||||
|
buildPublishPrompt({ threadId: start.meta.threadId, nerveRoot }),
|
||||||
|
publishMetaSchema,
|
||||||
|
extract,
|
||||||
|
);
|
||||||
|
|
||||||
|
return async (start: StartStep, messages: WorkflowMessage[]): Promise<RoleResult<PublishMeta>> => {
|
||||||
|
const file = logPath(nerveRoot);
|
||||||
|
mkdirSync(join(file, ".."), { recursive: true });
|
||||||
|
|
||||||
|
if (isDryRun(start)) {
|
||||||
|
const msg = "[dry-run] publish skipped (no git push / PR)";
|
||||||
|
writeFileSync(file, `${msg}\n`, "utf-8");
|
||||||
|
return {
|
||||||
|
content: `[dry-run] publish skipped — log: ${file}`,
|
||||||
|
meta: { success: true },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const innerStart = {
|
||||||
|
...start,
|
||||||
|
meta: { ...start.meta, workdir: nerveRoot },
|
||||||
|
} as StartStep;
|
||||||
|
|
||||||
|
try {
|
||||||
|
return await innerRole(innerStart, messages);
|
||||||
|
} catch (e) {
|
||||||
|
const msg = e instanceof Error ? e.message : String(e);
|
||||||
|
const body = `publish failed: ${msg}\n`;
|
||||||
|
writeFileSync(file, body, "utf-8");
|
||||||
|
return {
|
||||||
|
content: `publish failed: ${msg}\nLog: ${file}`,
|
||||||
|
meta: { success: false },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
@ -1,11 +1,4 @@
|
|||||||
import { mkdirSync, writeFileSync } from "node:fs";
|
export function buildPublishPrompt({ threadId, nerveRoot }: { threadId: string; nerveRoot: string }): string {
|
||||||
import { join } from "node:path";
|
|
||||||
import type { AgentFn, Role, RoleResult, ThreadContext } from "@uncaged/nerve-core";
|
|
||||||
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { createRole, isDryRun } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
function buildPublishPrompt({ threadId, nerveRoot }: { threadId: string; nerveRoot: string }): string {
|
|
||||||
return `You are the **publish** agent (Hermes). Test has passed. Open a pull request for the current branch using the **tea** CLI.
|
return `You are the **publish** agent (Hermes). Test has passed. Open a pull request for the current branch using the **tea** CLI.
|
||||||
|
|
||||||
## Context
|
## Context
|
||||||
@ -47,64 +40,3 @@ or
|
|||||||
{ "success": false }
|
{ "success": false }
|
||||||
\`\`\``;
|
\`\`\``;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const publishMetaSchema = z.object({
|
|
||||||
success: z.boolean().describe("true if git push and tea pr create both succeeded"),
|
|
||||||
});
|
|
||||||
export type PublishMeta = z.infer<typeof publishMetaSchema>;
|
|
||||||
|
|
||||||
export type CreatePublishRoleDeps = {
|
|
||||||
extract: LlmExtractorConfig;
|
|
||||||
nerveRoot: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
function logPath(nerveRoot: string): string {
|
|
||||||
return join(nerveRoot, "logs", `solve-issue-publish-${Date.now()}.log`);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createPublishRole(
|
|
||||||
adapter: AgentFn,
|
|
||||||
{ extract, nerveRoot }: CreatePublishRoleDeps,
|
|
||||||
): Role<PublishMeta> {
|
|
||||||
const innerRole = createRole(
|
|
||||||
adapter,
|
|
||||||
async (ctx: ThreadContext) =>
|
|
||||||
buildPublishPrompt({ threadId: ctx.start.meta.threadId, nerveRoot }),
|
|
||||||
publishMetaSchema,
|
|
||||||
extract,
|
|
||||||
);
|
|
||||||
|
|
||||||
return async (ctx: ThreadContext): Promise<RoleResult<PublishMeta>> => {
|
|
||||||
const file = logPath(nerveRoot);
|
|
||||||
mkdirSync(join(file, ".."), { recursive: true });
|
|
||||||
|
|
||||||
if (isDryRun(ctx.start)) {
|
|
||||||
const msg = "[dry-run] publish skipped (no git push / PR)";
|
|
||||||
writeFileSync(file, `${msg}\n`, "utf-8");
|
|
||||||
return {
|
|
||||||
content: `[dry-run] publish skipped — log: ${file}`,
|
|
||||||
meta: { success: true },
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const innerCtx: ThreadContext = {
|
|
||||||
...ctx,
|
|
||||||
start: {
|
|
||||||
...ctx.start,
|
|
||||||
meta: { ...ctx.start.meta, workdir: nerveRoot },
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
try {
|
|
||||||
return await innerRole(innerCtx);
|
|
||||||
} catch (e) {
|
|
||||||
const msg = e instanceof Error ? e.message : String(e);
|
|
||||||
const body = `publish failed: ${msg}\n`;
|
|
||||||
writeFileSync(file, body, "utf-8");
|
|
||||||
return {
|
|
||||||
content: `publish failed: ${msg}\nLog: ${file}`,
|
|
||||||
meta: { success: false },
|
|
||||||
};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
20
workflows/solve-issue/roles/read-issue/index.ts
Normal file
20
workflows/solve-issue/roles/read-issue/index.ts
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
import type { AgentFn, Role, StartStep } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { readIssuePrompt } from "./prompt.js";
|
||||||
|
|
||||||
|
export const readIssueMetaSchema = z.object({
|
||||||
|
ready: z.boolean().describe("true if issue content was fetched and markers are present"),
|
||||||
|
});
|
||||||
|
export type ReadIssueMeta = z.infer<typeof readIssueMetaSchema>;
|
||||||
|
|
||||||
|
export function createReadIssueRole(adapter: AgentFn, extract: LlmExtractorConfig): Role<ReadIssueMeta> {
|
||||||
|
return createRole(
|
||||||
|
adapter,
|
||||||
|
async (start: StartStep) => readIssuePrompt({ threadId: start.meta.threadId }),
|
||||||
|
readIssueMetaSchema,
|
||||||
|
extract,
|
||||||
|
);
|
||||||
|
}
|
||||||
@ -1,9 +1,4 @@
|
|||||||
import type { AgentFn, Role, ThreadContext } from "@uncaged/nerve-core";
|
export function readIssuePrompt({ threadId }: { threadId: string }): string {
|
||||||
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { createRole } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
function readIssuePrompt({ threadId }: { threadId: string }): string {
|
|
||||||
return `You are the **read-issue** agent. You fetch Gitea issue content via the \`tea\` CLI.
|
return `You are the **read-issue** agent. You fetch Gitea issue content via the \`tea\` CLI.
|
||||||
|
|
||||||
Read the workflow thread start prompt for the issue URL (same run): \`nerve thread show ${threadId}\`
|
Read the workflow thread start prompt for the issue URL (same run): \`nerve thread show ${threadId}\`
|
||||||
@ -37,17 +32,3 @@ Use \`{ "ready": false }\` if you could not fetch or parse the issue.
|
|||||||
|
|
||||||
**ready=true** only if the issue was fetched successfully and the marker block is correct.`;
|
**ready=true** only if the issue was fetched successfully and the marker block is correct.`;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const readIssueMetaSchema = z.object({
|
|
||||||
ready: z.boolean().describe("true if issue content was fetched and markers are present"),
|
|
||||||
});
|
|
||||||
export type ReadIssueMeta = z.infer<typeof readIssueMetaSchema>;
|
|
||||||
|
|
||||||
export function createReadIssueRole(adapter: AgentFn, extract: LlmExtractorConfig): Role<ReadIssueMeta> {
|
|
||||||
return createRole(
|
|
||||||
adapter,
|
|
||||||
async (ctx: ThreadContext) => readIssuePrompt({ threadId: ctx.start.meta.threadId }),
|
|
||||||
readIssueMetaSchema,
|
|
||||||
extract,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
25
workflows/solve-issue/roles/review/index.ts
Normal file
25
workflows/solve-issue/roles/review/index.ts
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
import type { AgentFn, Role, StartStep } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { reviewPrompt } from "./prompt.js";
|
||||||
|
|
||||||
|
export const reviewMetaSchema = z.object({
|
||||||
|
approved: z.boolean().describe("true if diff is clean and ready for tests"),
|
||||||
|
});
|
||||||
|
export type ReviewMeta = z.infer<typeof reviewMetaSchema>;
|
||||||
|
|
||||||
|
export function createReviewRole(
|
||||||
|
adapter: AgentFn,
|
||||||
|
extract: LlmExtractorConfig,
|
||||||
|
nerveRoot: string,
|
||||||
|
): Role<ReviewMeta> {
|
||||||
|
return createRole(
|
||||||
|
adapter,
|
||||||
|
async (start: StartStep) =>
|
||||||
|
reviewPrompt({ threadId: start.meta.threadId, nerveRoot }),
|
||||||
|
reviewMetaSchema,
|
||||||
|
extract,
|
||||||
|
);
|
||||||
|
}
|
||||||
@ -1,9 +1,4 @@
|
|||||||
import type { AgentFn, Role, ThreadContext } from "@uncaged/nerve-core";
|
export function reviewPrompt({ threadId, nerveRoot }: { threadId: string; nerveRoot: string }): string {
|
||||||
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { createRole } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
function reviewPrompt({ threadId, nerveRoot }: { threadId: string; nerveRoot: string }): string {
|
|
||||||
return `You are a **code reviewer** (Hermes). You run after implement and before test.
|
return `You are a **code reviewer** (Hermes). You run after implement and before test.
|
||||||
|
|
||||||
Read Nerve workspace conventions: \`cat ${nerveRoot}/CONVENTIONS.md\`
|
Read Nerve workspace conventions: \`cat ${nerveRoot}/CONVENTIONS.md\`
|
||||||
@ -38,22 +33,3 @@ or
|
|||||||
{ "approved": false }
|
{ "approved": false }
|
||||||
\`\`\``;
|
\`\`\``;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const reviewMetaSchema = z.object({
|
|
||||||
approved: z.boolean().describe("true if diff is clean and ready for tests"),
|
|
||||||
});
|
|
||||||
export type ReviewMeta = z.infer<typeof reviewMetaSchema>;
|
|
||||||
|
|
||||||
export function createReviewRole(
|
|
||||||
adapter: AgentFn,
|
|
||||||
extract: LlmExtractorConfig,
|
|
||||||
nerveRoot: string,
|
|
||||||
): Role<ReviewMeta> {
|
|
||||||
return createRole(
|
|
||||||
adapter,
|
|
||||||
async (ctx: ThreadContext) =>
|
|
||||||
reviewPrompt({ threadId: ctx.start.meta.threadId, nerveRoot }),
|
|
||||||
reviewMetaSchema,
|
|
||||||
extract,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
20
workflows/solve-issue/roles/test/index.ts
Normal file
20
workflows/solve-issue/roles/test/index.ts
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
import type { AgentFn, Role, StartStep } from "@uncaged/nerve-core";
|
||||||
|
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { createRole } from "@uncaged/nerve-workflow-utils";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { testPrompt } from "./prompt.js";
|
||||||
|
|
||||||
|
export const testMetaSchema = z.object({
|
||||||
|
passed: z.boolean().describe("true if all test commands passed"),
|
||||||
|
});
|
||||||
|
export type TestMeta = z.infer<typeof testMetaSchema>;
|
||||||
|
|
||||||
|
export function createTestRole(adapter: AgentFn, extract: LlmExtractorConfig): Role<TestMeta> {
|
||||||
|
return createRole(
|
||||||
|
adapter,
|
||||||
|
async (start: StartStep) => testPrompt({ threadId: start.meta.threadId }),
|
||||||
|
testMetaSchema,
|
||||||
|
extract,
|
||||||
|
);
|
||||||
|
}
|
||||||
@ -1,9 +1,4 @@
|
|||||||
import type { AgentFn, Role, ThreadContext } from "@uncaged/nerve-core";
|
export function testPrompt({ threadId }: { threadId: string }): string {
|
||||||
import type { LlmExtractorConfig } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { createRole } from "@uncaged/nerve-workflow-utils";
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
function testPrompt({ threadId }: { threadId: string }): string {
|
|
||||||
return `You are the **test** agent (Hermes). You execute automated tests for the change.
|
return `You are the **test** agent (Hermes). You execute automated tests for the change.
|
||||||
|
|
||||||
Read workflow context: \`nerve thread show ${threadId}\`
|
Read workflow context: \`nerve thread show ${threadId}\`
|
||||||
@ -24,17 +19,3 @@ or \`{ "passed": false }\`
|
|||||||
|
|
||||||
**passed=true** only if every executed command exited 0 (or skip was justified with no failing command).`;
|
**passed=true** only if every executed command exited 0 (or skip was justified with no failing command).`;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const testMetaSchema = z.object({
|
|
||||||
passed: z.boolean().describe("true if all test commands passed"),
|
|
||||||
});
|
|
||||||
export type TestMeta = z.infer<typeof testMetaSchema>;
|
|
||||||
|
|
||||||
export function createTestRole(adapter: AgentFn, extract: LlmExtractorConfig): Role<TestMeta> {
|
|
||||||
return createRole(
|
|
||||||
adapter,
|
|
||||||
async (ctx: ThreadContext) => testPrompt({ threadId: ctx.start.meta.threadId }),
|
|
||||||
testMetaSchema,
|
|
||||||
extract,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
13
workflows/solve-issue/tsconfig.json
Normal file
13
workflows/solve-issue/tsconfig.json
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"target": "ES2022",
|
||||||
|
"lib": ["ES2022"],
|
||||||
|
"module": "NodeNext",
|
||||||
|
"moduleResolution": "NodeNext",
|
||||||
|
"strict": true,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"noEmit": true,
|
||||||
|
"types": ["node"]
|
||||||
|
},
|
||||||
|
"include": ["./**/*.ts"]
|
||||||
|
}
|
||||||
Loading…
x
Reference in New Issue
Block a user