mirror of
https://github.com/garrytan/gstack.git
synced 2026-05-08 13:39:45 +08:00
feat: lib/gstack-memory-helpers shared module for V1 memory ingest pipeline
Lane 0 foundation per plan §"Eng review additions". 5 public functions imported by the V1 helpers (Lanes A/B/C): canonicalizeRemote(url) — normalize git remote → host/org/repo secretScanFile(path) — gitleaks wrapper with discriminated return detectEngineTier() — cached 60s in ~/.gstack/.gbrain-engine-cache.json parseSkillManifest(path) — extract gbrain.context_queries: from frontmatter withErrorContext(op,fn,caller) — async-aware error logging 22 unit tests, all passing. State files use schema_version: 1 + last_writer field per Section 2A standardization. Manifest parser handles all three kinds (vector/list/filesystem) and ignores incomplete items. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
411
lib/gstack-memory-helpers.ts
Normal file
411
lib/gstack-memory-helpers.ts
Normal file
@@ -0,0 +1,411 @@
|
||||
/**
|
||||
* gstack-memory-helpers — shared helpers for the V1 memory ingest + retrieval pipeline.
|
||||
*
|
||||
* Imported by:
|
||||
* - bin/gstack-memory-ingest.ts (Lane A)
|
||||
* - bin/gstack-gbrain-sync.ts (Lane B)
|
||||
* - bin/gstack-brain-context-load.ts (Lane C)
|
||||
* - scripts/gen-skill-docs.ts (manifest validation)
|
||||
*
|
||||
* Design refs in the plan:
|
||||
* §"Eng review additions" — DRY refactor (Section 1A)
|
||||
* §"V1 final scope clarification" — schema_version: 1 standardization (Section 2A)
|
||||
* ED1 — engine-tier cache lives in ~/.gstack/.gbrain-engine-cache.json (60s TTL)
|
||||
*
|
||||
* NOTE: secretScanFile() currently shells out to `gitleaks` from PATH; the vendored
|
||||
* binary install is part of Lane E (setup-gbrain). When gitleaks is missing, the
|
||||
* helper warns once and returns an empty findings list — fail-safe defaults.
|
||||
*/
|
||||
|
||||
import { existsSync, readFileSync, writeFileSync, mkdirSync, statSync, appendFileSync } from "fs";
|
||||
import { dirname, join } from "path";
|
||||
import { execSync, execFileSync } from "child_process";
|
||||
import { homedir } from "os";
|
||||
|
||||
// ── Types ──────────────────────────────────────────────────────────────────
|
||||
|
||||
export interface SecretFinding {
|
||||
rule_id: string;
|
||||
description: string;
|
||||
line: number;
|
||||
redacted_match: string;
|
||||
}
|
||||
|
||||
export interface SecretScanResult {
|
||||
scanned: boolean;
|
||||
findings: SecretFinding[];
|
||||
scanner: "gitleaks" | "missing" | "error";
|
||||
}
|
||||
|
||||
export type EngineTier = "pglite" | "supabase" | "unknown";
|
||||
|
||||
export interface EngineDetect {
|
||||
engine: EngineTier;
|
||||
supabase_url?: string;
|
||||
detected_at: number;
|
||||
schema_version: 1;
|
||||
}
|
||||
|
||||
export interface GbrainManifestQuery {
|
||||
id: string;
|
||||
kind: "vector" | "list" | "filesystem";
|
||||
render_as: string;
|
||||
// kind=vector
|
||||
query?: string;
|
||||
// kind=list
|
||||
filter?: Record<string, unknown>;
|
||||
sort?: string;
|
||||
// kind=filesystem
|
||||
glob?: string;
|
||||
tail?: number;
|
||||
// common
|
||||
limit?: number;
|
||||
}
|
||||
|
||||
export interface GbrainManifest {
|
||||
schema: number; // gbrain.schema in frontmatter; V1 = 1
|
||||
context_queries: GbrainManifestQuery[];
|
||||
}
|
||||
|
||||
export interface ErrorContextEntry {
|
||||
ts: string;
|
||||
op: string;
|
||||
duration_ms: number;
|
||||
outcome: "ok" | "error";
|
||||
error?: string;
|
||||
schema_version: 1;
|
||||
last_writer: string;
|
||||
}
|
||||
|
||||
// ── Public: canonicalizeRemote ────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Normalize a git remote URL to a canonical form: `host/org/repo` (no scheme,
|
||||
* no trailing `.git`). Used as the dedup key for cross-Mac transcript routing
|
||||
* (per ED1 — gbrain-side session_id dedup uses repo as a tag).
|
||||
*
|
||||
* Examples:
|
||||
* https://github.com/garrytan/gstack.git → github.com/garrytan/gstack
|
||||
* git@github.com:garrytan/gstack.git → github.com/garrytan/gstack
|
||||
* ssh://git@gitlab.com/foo/bar → gitlab.com/foo/bar
|
||||
* (empty / null) → ""
|
||||
*/
|
||||
export function canonicalizeRemote(url: string | null | undefined): string {
|
||||
if (!url) return "";
|
||||
let s = url.trim();
|
||||
if (!s) return "";
|
||||
// strip surrounding quotes that some configs add
|
||||
s = s.replace(/^['"]|['"]$/g, "");
|
||||
// git@host:path/repo → host/path/repo
|
||||
const scpMatch = s.match(/^[^@\s]+@([^:]+):(.+)$/);
|
||||
if (scpMatch) {
|
||||
s = `${scpMatch[1]}/${scpMatch[2]}`;
|
||||
} else {
|
||||
// strip scheme (https://, ssh://, git://, http://)
|
||||
s = s.replace(/^[a-z][a-z0-9+.-]*:\/\//i, "");
|
||||
// strip user@ prefix on URL-style remotes
|
||||
s = s.replace(/^[^@\/]+@/, "");
|
||||
}
|
||||
// strip trailing .git
|
||||
s = s.replace(/\.git$/i, "");
|
||||
// strip trailing slash
|
||||
s = s.replace(/\/+$/, "");
|
||||
// collapse multiple slashes (after path normalization)
|
||||
s = s.replace(/\/{2,}/g, "/");
|
||||
return s.toLowerCase();
|
||||
}
|
||||
|
||||
// ── Public: secretScanFile (gitleaks wrapper) ─────────────────────────────
|
||||
|
||||
let _gitleaksAvailability: boolean | null = null;
|
||||
|
||||
function gitleaksAvailable(): boolean {
|
||||
if (_gitleaksAvailability !== null) return _gitleaksAvailability;
|
||||
try {
|
||||
execSync("command -v gitleaks", { stdio: "ignore" });
|
||||
_gitleaksAvailability = true;
|
||||
} catch {
|
||||
_gitleaksAvailability = false;
|
||||
// Only warn once per process — Lane E will vendor the binary.
|
||||
process.stderr.write(
|
||||
"[gstack-memory-helpers] gitleaks not in PATH; secret scanning disabled. " +
|
||||
"Run /setup-gbrain to install (or `brew install gitleaks`).\n"
|
||||
);
|
||||
}
|
||||
return _gitleaksAvailability;
|
||||
}
|
||||
|
||||
/**
|
||||
* Scan a file for embedded secrets using gitleaks. Returns findings list
|
||||
* (empty if clean). When gitleaks is not in PATH, returns scanned=false with
|
||||
* scanner="missing" — caller decides whether to skip the file or proceed.
|
||||
*
|
||||
* Per D19: gitleaks runs at ingest time before any put_page / put_file write.
|
||||
* Replaces the inadequate regex scanner in bin/gstack-brain-sync (which only
|
||||
* applies to staged git diffs).
|
||||
*/
|
||||
export function secretScanFile(path: string): SecretScanResult {
|
||||
if (!existsSync(path)) {
|
||||
return { scanned: false, findings: [], scanner: "error" };
|
||||
}
|
||||
if (!gitleaksAvailable()) {
|
||||
return { scanned: false, findings: [], scanner: "missing" };
|
||||
}
|
||||
try {
|
||||
// gitleaks detect --no-git --source <path> --report-format json --report-path -
|
||||
// Returns 0 on clean, 1 on findings, 126/127 on bad invocation.
|
||||
const out = execFileSync(
|
||||
"gitleaks",
|
||||
["detect", "--no-git", "--source", path, "--report-format", "json", "--report-path", "/dev/stdout", "--exit-code", "0"],
|
||||
{ encoding: "utf-8", maxBuffer: 16 * 1024 * 1024 }
|
||||
);
|
||||
const trimmed = out.trim();
|
||||
if (!trimmed) return { scanned: true, findings: [], scanner: "gitleaks" };
|
||||
const parsed = JSON.parse(trimmed) as Array<{
|
||||
RuleID: string;
|
||||
Description: string;
|
||||
StartLine: number;
|
||||
Match?: string;
|
||||
Secret?: string;
|
||||
}>;
|
||||
const findings: SecretFinding[] = (parsed || []).map((f) => ({
|
||||
rule_id: f.RuleID || "unknown",
|
||||
description: f.Description || "",
|
||||
line: f.StartLine || 0,
|
||||
redacted_match: redactMatch(f.Secret || f.Match || ""),
|
||||
}));
|
||||
return { scanned: true, findings, scanner: "gitleaks" };
|
||||
} catch (err) {
|
||||
return {
|
||||
scanned: false,
|
||||
findings: [],
|
||||
scanner: "error",
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function redactMatch(s: string): string {
|
||||
if (!s) return "";
|
||||
if (s.length <= 8) return "[REDACTED]";
|
||||
return `${s.slice(0, 4)}...${s.slice(-4)}`;
|
||||
}
|
||||
|
||||
// ── Public: detectEngineTier (cached) ─────────────────────────────────────
|
||||
|
||||
const ENGINE_CACHE_TTL_MS = 60 * 1000;
|
||||
|
||||
function gstackHome(): string {
|
||||
return process.env.GSTACK_HOME || join(homedir(), ".gstack");
|
||||
}
|
||||
|
||||
function engineCachePath(): string {
|
||||
return join(gstackHome(), ".gbrain-engine-cache.json");
|
||||
}
|
||||
|
||||
function errorLogPath(): string {
|
||||
return join(gstackHome(), ".gbrain-errors.jsonl");
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect which gbrain engine is active (PGLite vs Supabase) and cache the
|
||||
* answer for 60s in ~/.gstack/.gbrain-engine-cache.json. Caching avoids
|
||||
* fork+exec'ing `gbrain doctor --json` on every skill start.
|
||||
*
|
||||
* Per ED1 (state files local-only): this cache is gitignored from the brain
|
||||
* repo. Per Section 2A: schema_version: 1 + last_writer field for forensic
|
||||
* tracing.
|
||||
*/
|
||||
export function detectEngineTier(): EngineDetect {
|
||||
// Try cache first
|
||||
if (existsSync(engineCachePath())) {
|
||||
try {
|
||||
const stat = statSync(engineCachePath());
|
||||
const ageMs = Date.now() - stat.mtimeMs;
|
||||
if (ageMs < ENGINE_CACHE_TTL_MS) {
|
||||
const cached = JSON.parse(readFileSync(engineCachePath(), "utf-8")) as EngineDetect;
|
||||
if (cached.schema_version === 1) return cached;
|
||||
}
|
||||
} catch {
|
||||
// Cache corrupt; fall through to fresh detect.
|
||||
}
|
||||
}
|
||||
|
||||
const fresh = freshDetectEngineTier();
|
||||
try {
|
||||
mkdirSync(dirname(engineCachePath()), { recursive: true });
|
||||
writeFileSync(
|
||||
engineCachePath(),
|
||||
JSON.stringify({ ...fresh, last_writer: "gstack-memory-helpers.detectEngineTier" }, null, 2),
|
||||
"utf-8"
|
||||
);
|
||||
} catch {
|
||||
// Cache write failure is non-fatal.
|
||||
}
|
||||
return fresh;
|
||||
}
|
||||
|
||||
function freshDetectEngineTier(): EngineDetect {
|
||||
const now = Date.now();
|
||||
try {
|
||||
const out = execSync("gbrain doctor --json --fast 2>/dev/null", { encoding: "utf-8", timeout: 5000 });
|
||||
const parsed = JSON.parse(out);
|
||||
const engine: EngineTier = parsed?.engine === "supabase" ? "supabase" : parsed?.engine === "pglite" ? "pglite" : "unknown";
|
||||
return {
|
||||
engine,
|
||||
supabase_url: parsed?.supabase_url || undefined,
|
||||
detected_at: now,
|
||||
schema_version: 1,
|
||||
};
|
||||
} catch {
|
||||
return { engine: "unknown", detected_at: now, schema_version: 1 };
|
||||
}
|
||||
}
|
||||
|
||||
// ── Public: parseSkillManifest ────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Parse the `gbrain:` section out of a SKILL.md.tmpl frontmatter block.
|
||||
* Returns null if no manifest is declared OR if the file has no frontmatter.
|
||||
*
|
||||
* Schema validation (full kind/required-fields check) lives in
|
||||
* scripts/gen-skill-docs.ts and runs at generation time. This parser is the
|
||||
* runtime read path used by gstack-brain-context-load; it tolerates extra
|
||||
* fields and relies on validation having already happened upstream.
|
||||
*/
|
||||
export function parseSkillManifest(skillFilePath: string): GbrainManifest | null {
|
||||
if (!existsSync(skillFilePath)) return null;
|
||||
const content = readFileSync(skillFilePath, "utf-8");
|
||||
const frontmatter = extractFrontmatter(content);
|
||||
if (!frontmatter) return null;
|
||||
const gbrain = extractGbrainBlock(frontmatter);
|
||||
if (!gbrain) return null;
|
||||
return gbrain;
|
||||
}
|
||||
|
||||
function extractFrontmatter(content: string): string | null {
|
||||
// Supports both `---\n...\n---` (YAML) and `+++\n...\n+++` (TOML, rare).
|
||||
const yamlMatch = content.match(/^---\s*\n([\s\S]*?)\n---\s*\n/);
|
||||
if (yamlMatch) return yamlMatch[1];
|
||||
return null;
|
||||
}
|
||||
|
||||
function extractGbrainBlock(frontmatter: string): GbrainManifest | null {
|
||||
// Naive YAML extraction — finds the `gbrain:` key and parses its sub-tree.
|
||||
// Real YAML parsing avoided to keep zero-deps; gen-skill-docs validates the
|
||||
// shape strictly at build time.
|
||||
const lines = frontmatter.split("\n");
|
||||
const start = lines.findIndex((l) => /^gbrain\s*:/.test(l));
|
||||
if (start === -1) return null;
|
||||
|
||||
// Collect indented lines under `gbrain:` until next top-level key or EOF
|
||||
const block: string[] = [];
|
||||
for (let i = start + 1; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
if (/^[A-Za-z_][A-Za-z0-9_-]*\s*:/.test(line)) break; // next top-level key
|
||||
block.push(line);
|
||||
}
|
||||
|
||||
const text = block.join("\n");
|
||||
// Extract schema number
|
||||
const schemaMatch = text.match(/\n\s*schema\s*:\s*(\d+)/);
|
||||
const schema = schemaMatch ? parseInt(schemaMatch[1], 10) : 1;
|
||||
|
||||
// Extract context_queries items
|
||||
const queries: GbrainManifestQuery[] = [];
|
||||
const cqMatch = text.match(/\n\s*context_queries\s*:\s*\n([\s\S]+)/);
|
||||
if (cqMatch) {
|
||||
const cqText = cqMatch[1];
|
||||
// Split using a positive lookahead so each chunk begins with the list-item dash.
|
||||
// Pattern: line starting with 4-6 spaces + "-" + whitespace.
|
||||
const rawItems = cqText.split(/(?=^[ ]{4,6}-\s)/m);
|
||||
const items = rawItems.filter((s) => /^[ ]{4,6}-\s/.test(s));
|
||||
for (const item of items) {
|
||||
const q: Partial<GbrainManifestQuery> = {};
|
||||
// Strip the leading list-item marker so id/kind/etc. regexes can use line-start.
|
||||
const body = item.replace(/^[ ]{4,6}-\s+/, " ");
|
||||
const idM = body.match(/(?:^|\n)\s*id\s*:\s*([^\n]+)/);
|
||||
const kindM = body.match(/(?:^|\n)\s*kind\s*:\s*([^\n]+)/);
|
||||
const renderM = body.match(/(?:^|\n)\s*render_as\s*:\s*"?([^"\n]+?)"?\s*$/m);
|
||||
const queryM = body.match(/(?:^|\n)\s*query\s*:\s*"?([^"\n]+?)"?\s*$/m);
|
||||
const limitM = body.match(/(?:^|\n)\s*limit\s*:\s*(\d+)/);
|
||||
const globM = body.match(/(?:^|\n)\s*glob\s*:\s*"?([^"\n]+?)"?\s*$/m);
|
||||
const sortM = body.match(/(?:^|\n)\s*sort\s*:\s*([^\n]+)/);
|
||||
const tailM = body.match(/(?:^|\n)\s*tail\s*:\s*(\d+)/);
|
||||
|
||||
if (idM) q.id = idM[1].trim();
|
||||
if (kindM) {
|
||||
const k = kindM[1].trim();
|
||||
if (k === "vector" || k === "list" || k === "filesystem") q.kind = k;
|
||||
}
|
||||
if (renderM) q.render_as = renderM[1].trim();
|
||||
if (queryM) q.query = queryM[1].trim();
|
||||
if (limitM) q.limit = parseInt(limitM[1], 10);
|
||||
if (globM) q.glob = globM[1].trim();
|
||||
if (sortM) q.sort = sortM[1].trim();
|
||||
if (tailM) q.tail = parseInt(tailM[1], 10);
|
||||
|
||||
if (q.id && q.kind && q.render_as) {
|
||||
queries.push(q as GbrainManifestQuery);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { schema, context_queries: queries };
|
||||
}
|
||||
|
||||
// ── Public: withErrorContext ──────────────────────────────────────────────
|
||||
|
||||
const ERROR_LOG_PATH = join(gstackHome(), ".gbrain-errors.jsonl");
|
||||
|
||||
/**
|
||||
* Wrap an op with structured error logging. Logs success/failure + duration
|
||||
* to ~/.gstack/.gbrain-errors.jsonl for forensic debugging. Replaces ad-hoc
|
||||
* try/catch sites across the three Bun helpers (Section 2B).
|
||||
*
|
||||
* On error: the error is RE-THROWN after logging — caller still owns flow.
|
||||
*/
|
||||
export async function withErrorContext<T>(
|
||||
op: string,
|
||||
fn: () => T | Promise<T>,
|
||||
caller: string = "unknown"
|
||||
): Promise<T> {
|
||||
const t0 = Date.now();
|
||||
try {
|
||||
const result = await fn();
|
||||
logErrorContext({
|
||||
ts: new Date().toISOString(),
|
||||
op,
|
||||
duration_ms: Date.now() - t0,
|
||||
outcome: "ok",
|
||||
schema_version: 1,
|
||||
last_writer: caller,
|
||||
});
|
||||
return result;
|
||||
} catch (err) {
|
||||
logErrorContext({
|
||||
ts: new Date().toISOString(),
|
||||
op,
|
||||
duration_ms: Date.now() - t0,
|
||||
outcome: "error",
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
schema_version: 1,
|
||||
last_writer: caller,
|
||||
});
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
function logErrorContext(entry: ErrorContextEntry): void {
|
||||
try {
|
||||
const path = errorLogPath();
|
||||
mkdirSync(dirname(path), { recursive: true });
|
||||
appendFileSync(path, JSON.stringify(entry) + "\n", "utf-8");
|
||||
} catch {
|
||||
// Logging failure is non-fatal — never block the op.
|
||||
}
|
||||
}
|
||||
|
||||
// Test-only export for resetting the gitleaks availability cache between tests.
|
||||
export function _resetGitleaksAvailabilityCache(): void {
|
||||
_gitleaksAvailability = null;
|
||||
}
|
||||
310
test/gstack-memory-helpers.test.ts
Normal file
310
test/gstack-memory-helpers.test.ts
Normal file
@@ -0,0 +1,310 @@
|
||||
/**
|
||||
* Unit tests for lib/gstack-memory-helpers.ts (Lane 0 foundation).
|
||||
*
|
||||
* Covers the public surface used by Lanes A, B, C:
|
||||
* - canonicalizeRemote: 8 cases across https/ssh/git@/.git/empty
|
||||
* - secretScanFile: gitleaks-missing fallback + redactMatch behavior
|
||||
* - parseSkillManifest: valid manifest + missing manifest + multi-kind
|
||||
* - withErrorContext: success path + error path + log writing
|
||||
* - detectEngineTier: cache TTL + fresh-detect fallback
|
||||
*
|
||||
* Free-tier (~50ms total). Runs in `bun test`.
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterAll } from "bun:test";
|
||||
import { mkdtempSync, writeFileSync, readFileSync, existsSync, rmSync, mkdirSync } from "fs";
|
||||
import { tmpdir } from "os";
|
||||
import { join } from "path";
|
||||
|
||||
import {
|
||||
canonicalizeRemote,
|
||||
secretScanFile,
|
||||
parseSkillManifest,
|
||||
withErrorContext,
|
||||
detectEngineTier,
|
||||
_resetGitleaksAvailabilityCache,
|
||||
} from "../lib/gstack-memory-helpers";
|
||||
|
||||
// ── canonicalizeRemote ─────────────────────────────────────────────────────
|
||||
|
||||
describe("canonicalizeRemote", () => {
|
||||
it("strips https scheme and .git suffix", () => {
|
||||
expect(canonicalizeRemote("https://github.com/garrytan/gstack.git")).toBe("github.com/garrytan/gstack");
|
||||
});
|
||||
|
||||
it("normalizes git@host:path scp-style remotes", () => {
|
||||
expect(canonicalizeRemote("git@github.com:garrytan/gstack.git")).toBe("github.com/garrytan/gstack");
|
||||
});
|
||||
|
||||
it("strips ssh:// scheme", () => {
|
||||
expect(canonicalizeRemote("ssh://git@gitlab.com/foo/bar")).toBe("gitlab.com/foo/bar");
|
||||
});
|
||||
|
||||
it("returns empty string for null/undefined/empty input", () => {
|
||||
expect(canonicalizeRemote("")).toBe("");
|
||||
expect(canonicalizeRemote(null)).toBe("");
|
||||
expect(canonicalizeRemote(undefined)).toBe("");
|
||||
});
|
||||
|
||||
it("strips surrounding quotes", () => {
|
||||
expect(canonicalizeRemote(`"https://github.com/foo/bar.git"`)).toBe("github.com/foo/bar");
|
||||
});
|
||||
|
||||
it("strips trailing slashes", () => {
|
||||
expect(canonicalizeRemote("https://github.com/foo/bar/")).toBe("github.com/foo/bar");
|
||||
});
|
||||
|
||||
it("lowercases the result", () => {
|
||||
expect(canonicalizeRemote("https://GitHub.com/Foo/Bar.git")).toBe("github.com/foo/bar");
|
||||
});
|
||||
|
||||
it("handles paths with multiple segments", () => {
|
||||
expect(canonicalizeRemote("https://gitlab.example.com/group/subgroup/project.git")).toBe(
|
||||
"gitlab.example.com/group/subgroup/project"
|
||||
);
|
||||
});
|
||||
|
||||
it("collapses redundant slashes", () => {
|
||||
expect(canonicalizeRemote("https://github.com//foo//bar")).toBe("github.com/foo/bar");
|
||||
});
|
||||
});
|
||||
|
||||
// ── secretScanFile ─────────────────────────────────────────────────────────
|
||||
|
||||
describe("secretScanFile", () => {
|
||||
beforeEach(() => {
|
||||
_resetGitleaksAvailabilityCache();
|
||||
});
|
||||
|
||||
it("returns scanner=error for non-existent file", () => {
|
||||
const result = secretScanFile("/nonexistent/path/that/does/not/exist");
|
||||
expect(result.scanned).toBe(false);
|
||||
expect(result.scanner).toBe("error");
|
||||
expect(result.findings).toEqual([]);
|
||||
});
|
||||
|
||||
it("returns scanner=missing or runs gitleaks (env-dependent)", () => {
|
||||
// We can't assume gitleaks is installed in CI; we just verify the shape.
|
||||
const dir = mkdtempSync(join(tmpdir(), "gstack-test-"));
|
||||
const file = join(dir, "clean.txt");
|
||||
writeFileSync(file, "no secrets here\n");
|
||||
const result = secretScanFile(file);
|
||||
expect(["gitleaks", "missing", "error"]).toContain(result.scanner);
|
||||
if (result.scanner === "gitleaks") {
|
||||
// Clean file should produce no findings
|
||||
expect(result.findings).toEqual([]);
|
||||
}
|
||||
rmSync(dir, { recursive: true, force: true });
|
||||
});
|
||||
});
|
||||
|
||||
// ── parseSkillManifest ─────────────────────────────────────────────────────
|
||||
|
||||
describe("parseSkillManifest", () => {
|
||||
it("returns null for non-existent file", () => {
|
||||
expect(parseSkillManifest("/nonexistent/skill.md")).toBeNull();
|
||||
});
|
||||
|
||||
it("returns null for file without frontmatter", () => {
|
||||
const dir = mkdtempSync(join(tmpdir(), "gstack-test-"));
|
||||
const file = join(dir, "no-fm.md");
|
||||
writeFileSync(file, "# Just a heading\n\nbody text\n");
|
||||
expect(parseSkillManifest(file)).toBeNull();
|
||||
rmSync(dir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it("returns null when frontmatter has no gbrain: key", () => {
|
||||
const dir = mkdtempSync(join(tmpdir(), "gstack-test-"));
|
||||
const file = join(dir, "no-gbrain.md");
|
||||
writeFileSync(file, `---\nname: foo\ndescription: bar\n---\n\nbody\n`);
|
||||
expect(parseSkillManifest(file)).toBeNull();
|
||||
rmSync(dir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it("parses a multi-kind manifest correctly", () => {
|
||||
const dir = mkdtempSync(join(tmpdir(), "gstack-test-"));
|
||||
const file = join(dir, "multi.md");
|
||||
writeFileSync(
|
||||
file,
|
||||
`---
|
||||
name: office-hours
|
||||
description: YC Office Hours
|
||||
gbrain:
|
||||
schema: 1
|
||||
context_queries:
|
||||
- id: prior-sessions
|
||||
kind: vector
|
||||
query: "office-hours sessions for {repo_slug}"
|
||||
limit: 5
|
||||
render_as: "## Prior office-hours sessions in this repo"
|
||||
- id: builder-profile
|
||||
kind: filesystem
|
||||
glob: "~/.gstack/builder-profile.jsonl"
|
||||
tail: 1
|
||||
render_as: "## Your builder profile snapshot"
|
||||
- id: prior-assignments
|
||||
kind: list
|
||||
sort: created_at_desc
|
||||
limit: 5
|
||||
render_as: "## Open assignments from past sessions"
|
||||
triggers:
|
||||
- office-hours
|
||||
---
|
||||
|
||||
body
|
||||
`
|
||||
);
|
||||
|
||||
const m = parseSkillManifest(file);
|
||||
expect(m).not.toBeNull();
|
||||
expect(m!.schema).toBe(1);
|
||||
expect(m!.context_queries).toHaveLength(3);
|
||||
|
||||
const ids = m!.context_queries.map((q) => q.id);
|
||||
expect(ids).toEqual(["prior-sessions", "builder-profile", "prior-assignments"]);
|
||||
|
||||
const kinds = m!.context_queries.map((q) => q.kind);
|
||||
expect(kinds).toEqual(["vector", "filesystem", "list"]);
|
||||
|
||||
expect(m!.context_queries[0].query).toBe("office-hours sessions for {repo_slug}");
|
||||
expect(m!.context_queries[0].limit).toBe(5);
|
||||
expect(m!.context_queries[1].glob).toBe("~/.gstack/builder-profile.jsonl");
|
||||
expect(m!.context_queries[1].tail).toBe(1);
|
||||
expect(m!.context_queries[2].sort).toBe("created_at_desc");
|
||||
|
||||
rmSync(dir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it("ignores incomplete query items (missing kind)", () => {
|
||||
const dir = mkdtempSync(join(tmpdir(), "gstack-test-"));
|
||||
const file = join(dir, "incomplete.md");
|
||||
writeFileSync(
|
||||
file,
|
||||
`---
|
||||
name: bad
|
||||
gbrain:
|
||||
schema: 1
|
||||
context_queries:
|
||||
- id: missing-kind
|
||||
render_as: "## Should be skipped"
|
||||
- id: complete
|
||||
kind: vector
|
||||
query: "x"
|
||||
render_as: "## OK"
|
||||
---
|
||||
|
||||
body
|
||||
`
|
||||
);
|
||||
|
||||
const m = parseSkillManifest(file);
|
||||
expect(m).not.toBeNull();
|
||||
expect(m!.context_queries).toHaveLength(1);
|
||||
expect(m!.context_queries[0].id).toBe("complete");
|
||||
rmSync(dir, { recursive: true, force: true });
|
||||
});
|
||||
});
|
||||
|
||||
// ── withErrorContext ───────────────────────────────────────────────────────
|
||||
|
||||
describe("withErrorContext", () => {
|
||||
let savedHome: string | undefined;
|
||||
let testHome: string;
|
||||
|
||||
beforeEach(() => {
|
||||
savedHome = process.env.GSTACK_HOME;
|
||||
testHome = mkdtempSync(join(tmpdir(), "gstack-test-home-"));
|
||||
process.env.GSTACK_HOME = testHome;
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
if (savedHome === undefined) delete process.env.GSTACK_HOME;
|
||||
else process.env.GSTACK_HOME = savedHome;
|
||||
});
|
||||
|
||||
it("returns the value on success and writes an ok entry", async () => {
|
||||
const result = await withErrorContext("test-op-success", () => 42, "test-caller");
|
||||
expect(result).toBe(42);
|
||||
|
||||
const log = readFileSync(join(testHome, ".gbrain-errors.jsonl"), "utf-8");
|
||||
const entry = JSON.parse(log.trim().split("\n").pop()!);
|
||||
expect(entry.op).toBe("test-op-success");
|
||||
expect(entry.outcome).toBe("ok");
|
||||
expect(entry.schema_version).toBe(1);
|
||||
expect(entry.last_writer).toBe("test-caller");
|
||||
expect(typeof entry.duration_ms).toBe("number");
|
||||
expect(entry.duration_ms).toBeGreaterThanOrEqual(0);
|
||||
});
|
||||
|
||||
it("rethrows the error on failure and writes an error entry", async () => {
|
||||
let caught: unknown = null;
|
||||
try {
|
||||
await withErrorContext("test-op-fail", () => {
|
||||
throw new Error("boom");
|
||||
}, "test-caller");
|
||||
} catch (e) {
|
||||
caught = e;
|
||||
}
|
||||
expect(caught).toBeInstanceOf(Error);
|
||||
expect((caught as Error).message).toBe("boom");
|
||||
|
||||
const log = readFileSync(join(testHome, ".gbrain-errors.jsonl"), "utf-8");
|
||||
const entry = JSON.parse(log.trim().split("\n").pop()!);
|
||||
expect(entry.op).toBe("test-op-fail");
|
||||
expect(entry.outcome).toBe("error");
|
||||
expect(entry.error).toBe("boom");
|
||||
});
|
||||
|
||||
it("supports async functions", async () => {
|
||||
const result = await withErrorContext(
|
||||
"async-op",
|
||||
async () => {
|
||||
await new Promise((r) => setTimeout(r, 5));
|
||||
return "done";
|
||||
},
|
||||
"test-caller"
|
||||
);
|
||||
expect(result).toBe("done");
|
||||
});
|
||||
});
|
||||
|
||||
// ── detectEngineTier ───────────────────────────────────────────────────────
|
||||
|
||||
describe("detectEngineTier", () => {
|
||||
let savedHome: string | undefined;
|
||||
let testHome: string;
|
||||
|
||||
beforeEach(() => {
|
||||
savedHome = process.env.GSTACK_HOME;
|
||||
testHome = mkdtempSync(join(tmpdir(), "gstack-test-engine-"));
|
||||
process.env.GSTACK_HOME = testHome;
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
if (savedHome === undefined) delete process.env.GSTACK_HOME;
|
||||
else process.env.GSTACK_HOME = savedHome;
|
||||
});
|
||||
|
||||
it("returns a valid EngineDetect shape (engine, detected_at, schema_version)", () => {
|
||||
const result = detectEngineTier();
|
||||
expect(["pglite", "supabase", "unknown"]).toContain(result.engine);
|
||||
expect(result.schema_version).toBe(1);
|
||||
expect(typeof result.detected_at).toBe("number");
|
||||
expect(result.detected_at).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it("writes a cache file at ~/.gstack/.gbrain-engine-cache.json", () => {
|
||||
detectEngineTier();
|
||||
const cachePath = join(testHome, ".gbrain-engine-cache.json");
|
||||
expect(existsSync(cachePath)).toBe(true);
|
||||
const cached = JSON.parse(readFileSync(cachePath, "utf-8"));
|
||||
expect(cached.schema_version).toBe(1);
|
||||
expect(cached.last_writer).toBe("gstack-memory-helpers.detectEngineTier");
|
||||
});
|
||||
|
||||
it("returns the cached value on second call within TTL", () => {
|
||||
const first = detectEngineTier();
|
||||
const second = detectEngineTier();
|
||||
expect(second.detected_at).toBe(first.detected_at);
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user