mirror of
https://github.com/garrytan/gstack.git
synced 2026-05-16 01:02:13 +08:00
v1.37.0.0 feat: split-engine gbrain (remote MCP brain + local PGLite for code) (#1500)
* feat(gbrain): add lib/gbrain-local-status classifier with 5-state engine status + 60s cache
Foundation for split-engine gbrain: shared classifier used by both
bin/gstack-gbrain-detect (preamble probe) and bin/gstack-gbrain-sync.ts
(orchestrator SKIP-when-not-ok). Single source of truth.
Probes via `gbrain sources list --json` and classifies stderr against the
same patterns lib/gbrain-sources.ts:66-67 already uses ("Cannot connect to
database", "config.json"). Returns one of: ok, no-cli, missing-config,
broken-config, broken-db. Defensive default: unrecognized failures
classify as broken-config so the raw stderr can be surfaced upstream.
Cache at ~/.gstack/.gbrain-local-status-cache.json keyed on
{home, path_hash, gbrain_bin_path, gbrain_version, config_mtime, config_size}
with 60s TTL. Cache invalidates on any invariant change. --no-cache option
busts the cache for callers that just mutated state (/setup-gbrain,
/sync-gbrain after init/migration).
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
* refactor(gbrain): rewrite gstack-gbrain-detect bash→TS + add gbrain_local_status field
Replaces the bash detect helper with a bun shebang script sharing the
gbrain_local_status classifier from lib/gbrain-local-status.ts with the
sync orchestrator. Single source of truth for engine-status classification
between preamble-probe and orchestrator-skip paths.
Filename stays gstack-gbrain-detect (no .ts extension) so existing skill
preamble callers shell out unchanged. Shebang `#!/usr/bin/env -S bun run`
resolves bun at runtime.
Output is key/type backward-compatible with the bash version per plan
codex #5: the 9 pre-existing keys (gbrain_on_path, gbrain_version,
gbrain_config_exists, gbrain_engine, gbrain_doctor_ok, gbrain_mcp_mode,
gstack_brain_sync_mode, gstack_brain_git, gstack_artifacts_remote) stay
identical in name + type + value semantics. One new key added:
gbrain_local_status (5-state string enum).
Updates the existing schema regression at test/gstack-gbrain-detect-mcp-mode.test.ts
to include the new key. Adds test/gbrain-detect-shape.test.ts asserting
the regression contract for future changes.
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
* feat(gbrain): orchestrator SKIP when local engine not ok + remote-http transcripts via artifacts pipeline
Two changes in the sync orchestrator, both per plan D11/D12:
1. bin/gstack-gbrain-sync.ts: runCodeImport + runMemoryIngest call
localEngineStatus() (shared classifier from lib/gbrain-local-status.ts).
When status is not 'ok', return a SKIP stage result with a clear reason
instead of crashing with "source registration failed: gbrain not
configured". Brain-sync stage runs regardless — it doesn't depend on
local engine. dry-run preview path is gated above the check so it
continues to show would-do steps even when the engine is broken.
2. bin/gstack-memory-ingest.ts: when gbrain MCP is registered as
remote-http (Path 4), persist staged transcripts to
~/.gstack/transcripts/run-<pid>-<ts>/ instead of the ephemeral
~/.gstack/.staging-ingest-<pid>-<ts>/ tmp dir, and SKIP the local
`gbrain import` call entirely. The artifacts pipeline (gstack-brain-sync
push to git, brain admin pulls and indexes) handles routing to the
remote brain. Local PGLite (when present via Step 4.5) stays code-only.
State recording still happens — prepared pages get their mtime+sha256
stamped under remote-http mode so the next /sync-gbrain doesn't
re-stage them. Cleanup is skipped intentionally so the persisted dir
survives until gstack-brain-sync moves it.
Adds test/gbrain-sync-skip.test.ts covering 5 SKIP scenarios (broken-db,
broken-config, no-cli, missing-config, ok pass-through). All 25
sync-related unit tests pass.
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
* feat(gbrain): v1.34.0.0 migration notice + transcripts allowlist for artifacts pipeline
Per plan D5 + D11. Two pieces of the split-engine rollout:
1. gstack-upgrade/migrations/v1.34.0.0.sh — prints a one-time
discoverability notice for existing Path 4 (remote-http MCP) users
whose machine has no local engine yet. Tells them about /setup-gbrain
Step 4.5 (the new local-PGLite opt-in). Silent for everyone else.
User can suppress permanently via `gstack-config set
local_code_index_offered true`. Touchfile at
~/.gstack/.migrations/v1.34.0.0.done makes it idempotent.
2. bin/gstack-artifacts-init — adds `transcripts/run-*/*.md` and
`transcripts/run-*/**/*.md` to the managed allowlist so the
gstack-memory-ingest persistent staging dir (used in remote-http
mode per D11) gets pushed to the artifacts repo. Brain admin's
pull job then indexes transcripts into the remote brain.
Privacy class: behavioral (matches transcript content).
Adds test/gstack-upgrade-migration-v1_34_0_0.test.ts with 5 cases:
state match, no-MCP, local-config-present, opt-out, and idempotency.
All 5 pass.
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
* feat(gbrain): /setup-gbrain Step 1.5/4.5 + /sync-gbrain Step 1.5 templates
Per plan D4, D10, D11, D12. Wires the skill prose to the new
split-engine flow + classifier introduced in earlier commits.
setup-gbrain/SKILL.md.tmpl:
- Step 1: detect output description now includes the v1.34.0.0
gbrain_local_status field (5 values).
- Step 1.5 (NEW): broken-db / broken-config remediation. AskUserQuestion
with 4 options — Retry / Switch to PGLite / Switch brain mode / Quit
(plan D4). Retry is recommended first since broken-db often = transient
Postgres outage. PGLite is explicitly one-way + destructive (moves
existing config to ~/.gbrain/config.json.gstack-bak-<ts>); rollback on
init failure restores the .bak (plan D7).
- Step 4d → Step 4.5 (NEW): in Path 4, after the verify step, offer
local PGLite for code search. AskUserQuestion Yes/No (plan D10/D11).
Yes path runs gstack-gbrain-install + `gbrain init --pglite --json`
with the same rollback-safe sequence. No path skips Steps 3/4/5/7.5.
- Step 10 verdict (Path 4): adds "Code search" row reflecting Step 4.5
choice. Updates "Transcripts" row to describe the new D11 routing
(artifacts repo → remote brain).
sync-gbrain/SKILL.md.tmpl:
- Step 1 split-engine prose: corrects the prior misleading claim that
"memory routes through whatever setup-gbrain configured, including
remote-MCP" (codex finding #3). Memory stage shells out to local
`gbrain import` in local-stdio mode; in remote-http mode it persists
to ~/.gstack/transcripts/ for the artifacts pipeline.
- Step 1.5 (NEW): local-engine pre-flight. STOP on no-cli, broken-config,
broken-db. Soft skip (continue with code+memory SKIP) on
missing-config + remote-http per plan D12. Surfaces actionable user
remediation message instead of the orchestrator crashing two stages
with ERR.
Regenerated SKILL.md for all hosts (claude, kiro, opencode, slate,
cursor, openclaw, hermes, gbrain). All 712 skill-validation + gen-skill-docs
tests pass.
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
* test(gbrain): .bak-rollback contract for Step 1.5 / 4.5 init failure path
Per plan D7 (rollback semantics) and codex #10 (rollback scope). The
/setup-gbrain skill instructs the model to follow a specific shell
sequence when running `gbrain init --pglite` against an existing
config:
1. mv ~/.gbrain/config.json ~/.gbrain/config.json.gstack-bak-<ts>
2. gbrain init --pglite --json
3. on non-zero exit: mv .bak back; surface error
This test verifies that contract using a fake `gbrain` binary that
fails on init. Three cases:
- FAILURE: gbrain init exits non-zero → broken config restored to
original path, no leftover .bak.
- SUCCESS: gbrain init exits 0 → new config in place, .bak survives
for audit (user reviews + deletes manually).
- SCOPE: any partial PGLite directory at ~/.gbrain/pglite/ is NOT
auto-cleaned. We only promise to restore config.json; PGLite
cleanup is the user's call (codex #10).
If the skill template rewrites this sequence in a future change, this
test should fail until the test's shell is updated too. That's the
point — keep the test and the skill template aligned.
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
* test(gbrain): periodic E2E for /setup-gbrain Path 4 + Step 4.5 Yes flow
End-to-end coverage of the new opt-in question via runAgentSdkTest.
Stubs the MCP endpoint at /tools/list with a 200 response carrying a
fake gbrain v0.32.3.0 serverInfo, and fakes the gbrain + claude CLIs
so init writes a PGLite config and mcp add succeeds. Asserts the model:
1. invokes gstack-gbrain-install (Step 4.5 Yes branch)
2. invokes `gbrain init --pglite --json`
3. writes a working ~/.gbrain/config.json with engine=pglite
4. registers the remote MCP via `claude mcp add --transport http`
5. never leaks the bearer token to CLAUDE.md
Classified as periodic-tier per plan D6 (codex #12 flagged AgentSDK
flakiness; gate-tier coverage of the split-engine behavior lives in the
deterministic unit tests at gbrain-local-status.test.ts and
gbrain-sync-skip.test.ts). Touchfile fires the test when the skill
template, install/verify/init helpers, the local-status classifier, or
the agent-sdk-runner harness changes.
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
* chore(gbrain): bump migration to v1.35.0.0 after main merge
main shipped v1.34.0.0 (factory-export submodule) + v1.34.1.0 (update-check
hardening) while this branch was in flight. The migration file I named
v1.34.0.0.sh now belongs at v1.35.0.0 — the next minor on top of main,
matching the scale of split-engine work (new lib + orchestrator skip +
template overhaul + transcripts routing).
Renames the migration script and its test file; updates all internal
version references in both files. Behavior unchanged.
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
* perf(gbrain): memoize gbrain resolution + use --fast doctor in detect
Cuts detect's wall time substantially by sharing fork-exec results
between the helper that walks the JSON output and the localEngineStatus
classifier from lib/gbrain-local-status.ts.
Before: detect made 2x `command -v gbrain` calls (one in detect's
detectGbrain, one in the classifier's resolveGbrainBin) and 2x
`gbrain --version` calls. With memoization keyed on PATH, both
collapse to one fork each (~400ms saved per skill preamble).
Also adds `--fast` to the `gbrain doctor --json` call in detect so a
broken-db config (Garry's repro) doesn't burn a full 5s timeout on the
doctor's DB-connection check. The classifier still probes the DB
directly via `gbrain sources list --json` for engine reachability —
that's `gbrain_local_status`, separate from the coarse
`gbrain_doctor_ok` summary flag.
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
* test(gbrain): relax E2E assertions to smoke-test contract
Per codex #12 (AgentSDK harness is non-deterministic): the E2E now
asserts the model followed the split-engine path WITHOUT requiring a
specific subcommand sequence. Three assertions:
1. AskUserQuestion was called (model reached interactive branches)
2. At least one of {gstack-gbrain-install, `gbrain init --pglite`,
`claude mcp add`} fired (model followed the skill, not a no-op)
3. The fake bearer token never leaked to CLAUDE.md (security regression)
Deterministic per-step coverage of the same flow lives in the gate-tier
unit tests (gbrain-local-status, gbrain-sync-skip, init-rollback,
upgrade-migration). The E2E exists to catch the "model can't follow
the skill at all" regression class, not to pin the exact tool sequence.
Test passes in 280s against the live Agent SDK.
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
* test(version): bump CLI smoke-test timeout to 15s (flaky at 5s under load)
The gstack-next-version integration smoke test spawns a child process
that does git operations + sibling-worktree probing. Wall time hovers
4-5s on M-series Macs; flakes at exactly 5001-5002ms when the test
suite runs under load (bun's parallel scheduling). Bumping per-test
timeout to 15s eliminates the flake without changing test logic.
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
* chore: bump version and changelog (v1.37.0.0)
Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
---------
Co-authored-by: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
269
lib/gbrain-local-status.ts
Normal file
269
lib/gbrain-local-status.ts
Normal file
@@ -0,0 +1,269 @@
|
||||
/**
|
||||
* gbrain-local-status — classify the local gbrain engine into 5 states.
|
||||
*
|
||||
* Shared between bin/gstack-gbrain-detect (preamble probe on every skill start)
|
||||
* and bin/gstack-gbrain-sync.ts (orchestrator SKIP-when-not-ok semantics).
|
||||
* Single source of truth: same probe, same classification, same cache.
|
||||
*
|
||||
* Per the split-engine plan (D2 + D8):
|
||||
* - Probe: `gbrain sources list --json`. Cheap (~80ms), actually hits the DB.
|
||||
* Uses the same stderr patterns as lib/gbrain-sources.ts:66-67.
|
||||
* - Cache: 60s TTL at ~/.gstack/.gbrain-local-status-cache.json, keyed on
|
||||
* {home, path_hash, gbrain_bin_path, gbrain_version, config_mtime}.
|
||||
* - --no-cache bypass: /setup-gbrain and /sync-gbrain pass it after any
|
||||
* state-mutating operation so the next read sees fresh status.
|
||||
*
|
||||
* No-cli → gbrain not on PATH.
|
||||
* Missing → CLI present, ~/.gbrain/config.json absent.
|
||||
* Broken-config → config exists but `gbrain sources list` fails with config parse error
|
||||
* (or any non-recognized error — defensive default per codex #8).
|
||||
* Broken-db → config exists, DB unreachable per stderr classification.
|
||||
* Ok → DB reachable, sources list returned valid JSON.
|
||||
*/
|
||||
|
||||
import { execFileSync } from "child_process";
|
||||
import {
|
||||
createHash,
|
||||
} from "crypto";
|
||||
import {
|
||||
existsSync,
|
||||
mkdirSync,
|
||||
readFileSync,
|
||||
renameSync,
|
||||
statSync,
|
||||
writeFileSync,
|
||||
} from "fs";
|
||||
import { homedir } from "os";
|
||||
import { dirname, join } from "path";
|
||||
|
||||
export type LocalEngineStatus =
|
||||
| "ok"
|
||||
| "no-cli"
|
||||
| "missing-config"
|
||||
| "broken-config"
|
||||
| "broken-db";
|
||||
|
||||
export interface ClassifyOptions {
|
||||
/** Bypass the 60s cache. Used after any state-mutating operation. */
|
||||
noCache?: boolean;
|
||||
/** Env override for the spawned `gbrain` (used by tests to point at a fake binary). */
|
||||
env?: NodeJS.ProcessEnv;
|
||||
}
|
||||
|
||||
interface CacheEntry {
|
||||
schema_version: 1;
|
||||
status: LocalEngineStatus;
|
||||
cached_at: number;
|
||||
/** Cache invariants — entry is invalidated if any of these change between writes. */
|
||||
key: {
|
||||
home: string;
|
||||
path_hash: string;
|
||||
gbrain_bin_path: string;
|
||||
gbrain_version: string;
|
||||
config_mtime: number; // 0 when config absent
|
||||
config_size: number; // 0 when config absent
|
||||
};
|
||||
}
|
||||
|
||||
export const CACHE_TTL_MS = 60_000;
|
||||
export const PROBE_TIMEOUT_MS = 5_000;
|
||||
|
||||
/** Effective user home — respects HOME env override (used by tests). */
|
||||
function userHome(): string {
|
||||
return process.env.HOME || homedir();
|
||||
}
|
||||
|
||||
/** Cache path computed fresh on each call so tests can mutate GSTACK_HOME per case. */
|
||||
export function cacheFilePath(): string {
|
||||
return join(
|
||||
process.env.GSTACK_HOME || join(userHome(), ".gstack"),
|
||||
".gbrain-local-status-cache.json",
|
||||
);
|
||||
}
|
||||
|
||||
function gbrainConfigPath(): string {
|
||||
return join(userHome(), ".gbrain", "config.json");
|
||||
}
|
||||
|
||||
function hashPath(p: string): string {
|
||||
return createHash("sha256").update(p).digest("hex").slice(0, 16);
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve the absolute path of `gbrain` on PATH. Returns null when missing.
|
||||
* Memoized per-process keyed on PATH so detect's call and the classifier's
|
||||
* call share one fork-exec (~200ms saved per skill preamble).
|
||||
*/
|
||||
const _gbrainBinCache = new Map<string, string | null>();
|
||||
export function resolveGbrainBin(env?: NodeJS.ProcessEnv): string | null {
|
||||
const e = env ?? process.env;
|
||||
const key = e.PATH || "";
|
||||
if (_gbrainBinCache.has(key)) return _gbrainBinCache.get(key)!;
|
||||
let result: string | null = null;
|
||||
try {
|
||||
const out = execFileSync("sh", ["-c", "command -v gbrain"], {
|
||||
encoding: "utf-8",
|
||||
timeout: 2_000,
|
||||
stdio: ["ignore", "pipe", "ignore"],
|
||||
env: e,
|
||||
});
|
||||
result = out.trim() || null;
|
||||
} catch {
|
||||
result = null;
|
||||
}
|
||||
_gbrainBinCache.set(key, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
/** Memoized per-process. */
|
||||
const _gbrainVersionCache = new Map<string, string>();
|
||||
export function readGbrainVersion(env?: NodeJS.ProcessEnv): string {
|
||||
const e = env ?? process.env;
|
||||
const key = `${e.PATH || ""}|${resolveGbrainBin(e) || ""}`;
|
||||
if (_gbrainVersionCache.has(key)) return _gbrainVersionCache.get(key)!;
|
||||
let result = "";
|
||||
try {
|
||||
const out = execFileSync("gbrain", ["--version"], {
|
||||
encoding: "utf-8",
|
||||
timeout: 2_000,
|
||||
stdio: ["ignore", "pipe", "ignore"],
|
||||
env: e,
|
||||
});
|
||||
result = out.trim().split("\n")[0] || "";
|
||||
} catch {
|
||||
result = "";
|
||||
}
|
||||
_gbrainVersionCache.set(key, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
function configFingerprint(): { mtime: number; size: number } {
|
||||
try {
|
||||
const st = statSync(gbrainConfigPath());
|
||||
return { mtime: Math.floor(st.mtimeMs), size: st.size };
|
||||
} catch {
|
||||
return { mtime: 0, size: 0 };
|
||||
}
|
||||
}
|
||||
|
||||
function buildCacheKey(
|
||||
gbrainBin: string | null,
|
||||
gbrainVersion: string,
|
||||
env?: NodeJS.ProcessEnv,
|
||||
): CacheEntry["key"] {
|
||||
const e = env ?? process.env;
|
||||
const config = configFingerprint();
|
||||
return {
|
||||
home: e.HOME || "",
|
||||
path_hash: hashPath(e.PATH || ""),
|
||||
gbrain_bin_path: gbrainBin || "",
|
||||
gbrain_version: gbrainVersion,
|
||||
config_mtime: config.mtime,
|
||||
config_size: config.size,
|
||||
};
|
||||
}
|
||||
|
||||
function keysEqual(a: CacheEntry["key"], b: CacheEntry["key"]): boolean {
|
||||
return (
|
||||
a.home === b.home &&
|
||||
a.path_hash === b.path_hash &&
|
||||
a.gbrain_bin_path === b.gbrain_bin_path &&
|
||||
a.gbrain_version === b.gbrain_version &&
|
||||
a.config_mtime === b.config_mtime &&
|
||||
a.config_size === b.config_size
|
||||
);
|
||||
}
|
||||
|
||||
function readCache(key: CacheEntry["key"]): LocalEngineStatus | null {
|
||||
if (!existsSync(cacheFilePath())) return null;
|
||||
try {
|
||||
const raw = JSON.parse(readFileSync(cacheFilePath(), "utf-8")) as CacheEntry;
|
||||
if (raw.schema_version !== 1) return null;
|
||||
if (Date.now() - raw.cached_at > CACHE_TTL_MS) return null;
|
||||
if (!keysEqual(raw.key, key)) return null;
|
||||
return raw.status;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function writeCache(status: LocalEngineStatus, key: CacheEntry["key"]): void {
|
||||
const entry: CacheEntry = {
|
||||
schema_version: 1,
|
||||
status,
|
||||
cached_at: Date.now(),
|
||||
key,
|
||||
};
|
||||
try {
|
||||
mkdirSync(dirname(cacheFilePath()), { recursive: true });
|
||||
const tmp = cacheFilePath() + ".tmp." + process.pid;
|
||||
writeFileSync(tmp, JSON.stringify(entry, null, 2), "utf-8");
|
||||
renameSync(tmp, cacheFilePath());
|
||||
} catch {
|
||||
// Cache write failure is non-fatal — we re-probe next call.
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Probe via `gbrain sources list --json`. Classify the outcome.
|
||||
*
|
||||
* Pattern strings ("Cannot connect to database", "config.json") are deliberately
|
||||
* the same strings used in lib/gbrain-sources.ts:66-67. If gbrain reworks its
|
||||
* error messages, classifier returns broken-config defensively (codex #8).
|
||||
*/
|
||||
function freshClassify(env?: NodeJS.ProcessEnv): LocalEngineStatus {
|
||||
// 1. CLI on PATH?
|
||||
const gbrainBin = resolveGbrainBin(env);
|
||||
if (!gbrainBin) return "no-cli";
|
||||
|
||||
// 2. Config file present?
|
||||
if (!existsSync(gbrainConfigPath())) return "missing-config";
|
||||
|
||||
// 3. Probe gbrain sources list.
|
||||
try {
|
||||
execFileSync("gbrain", ["sources", "list", "--json"], {
|
||||
encoding: "utf-8",
|
||||
timeout: PROBE_TIMEOUT_MS,
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
env: env ?? process.env,
|
||||
});
|
||||
return "ok";
|
||||
} catch (err) {
|
||||
const e = err as NodeJS.ErrnoException & { stderr?: Buffer | string };
|
||||
const stderr = (e.stderr ? e.stderr.toString() : "") || "";
|
||||
|
||||
// ENOENT can happen if gbrain disappeared between resolveGbrainBin and now.
|
||||
if (e.code === "ENOENT") return "no-cli";
|
||||
|
||||
// Pattern match against gbrain's known error strings. Order matters:
|
||||
// "Cannot connect to database" is the more specific DB-unreachable signal.
|
||||
if (stderr.includes("Cannot connect to database")) return "broken-db";
|
||||
if (stderr.includes("config.json")) return "broken-config";
|
||||
|
||||
// Defensive default per codex #8: unrecognized failures classify as
|
||||
// broken-config so the user sees the raw stderr surfaced upstream.
|
||||
return "broken-config";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Classify the local gbrain engine status. Cached for 60s; bypassable.
|
||||
*
|
||||
* Returns one of 5 states. Never throws — failure modes are surfaced as states.
|
||||
*/
|
||||
export function localEngineStatus(opts: ClassifyOptions = {}): LocalEngineStatus {
|
||||
const env = opts.env ?? process.env;
|
||||
const gbrainBin = resolveGbrainBin(env);
|
||||
const gbrainVersion = gbrainBin ? readGbrainVersion(env) : "";
|
||||
const key = buildCacheKey(gbrainBin, gbrainVersion, env);
|
||||
|
||||
if (!opts.noCache) {
|
||||
const cached = readCache(key);
|
||||
if (cached) return cached;
|
||||
}
|
||||
|
||||
const fresh = freshClassify(env);
|
||||
writeCache(fresh, key);
|
||||
return fresh;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user