Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,4 @@ dist/
.DS_Store
*.log
package-lock.json
.sisyphus/
7 changes: 7 additions & 0 deletions index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,13 @@ const graphMemoryPlugin = {
? cfg.llm.apiKey // If apiKey set but no baseURL, assume Anthropic direct
: undefined;
const llm = createCompleteFn(provider, model, cfg.llm, anthropicApiKey);
if (cfg.llm?.auth === "oauth") {
if (!cfg.llm.oauthPath) {
api.logger.error("[graph-memory] OAuth mode enabled but llm.oauthPath is missing — LLM calls will fail");
} else {
api.logger.info("[graph-memory] OAuth mode enabled");
}
}
const recaller = new Recaller(db, cfg);
const extractor = new Extractor(cfg, llm);

Expand Down
10 changes: 7 additions & 3 deletions openclaw.plugin.json
Original file line number Diff line number Diff line change
Expand Up @@ -60,9 +60,13 @@
"type": "object",
"description": "可选:LLM 配置。不配则用 OpenClaw 全局 provider",
"properties": {
"apiKey": { "type": "string" },
"baseURL": { "type": "string" },
"model": { "type": "string" }
"apiKey": { "type": "string", "description": "API Key(传统认证)" },
"baseURL": { "type": "string", "description": "API 地址" },
"model": { "type": "string", "description": "模型名称" },
"auth": { "type": "string", "enum": ["api-key", "oauth"], "default": "api-key", "description": "认证模式:api-key(默认)或 oauth" },
"oauthPath": { "type": "string", "description": "OAuth 会话文件路径(auth=oauth 时必填)" },
"oauthProvider": { "type": "string", "default": "openai-codex", "description": "OAuth 提供商标识" },
"timeoutMs": { "type": "integer", "default": 30000, "description": "请求超时(毫秒)" }
}
}
}
Expand Down
13 changes: 6 additions & 7 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,13 @@
"test:watch": "vitest"
},
"dependencies": {
"@photostructure/sqlite": "^1.0.0",
"@sinclair/typebox": "^0.34.48",
"openai": "^4.47.0"
"@photostructure/sqlite": "^1.2.0",
"@sinclair/typebox": "^0.34.49"
},
"devDependencies": {
"@types/node": "^20.0.0",
"typescript": "^5.4.0",
"vitest": "^1.4.0"
"@types/node": "^22.19.17",
"typescript": "^5.9.0",
"vitest": "^4.1.4"
},
"peerDependencies": {
"openclaw": "*"
Expand All @@ -28,4 +27,4 @@
],
"hooks": {}
}
}
}
120 changes: 120 additions & 0 deletions src/engine/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,30 @@
*
* 路径 A:pluginConfig.llm 配置直接调 OpenAI 兼容 API
* 路径 B:直接调 Anthropic REST API(需 ANTHROPIC_API_KEY)
* 路径 C:OAuth Codex Responses API(需 llm.auth="oauth")
*
* 内置:429/5xx 重试 3 次 + 30s 超时
*/

import {
loadOAuthSession,
needsRefresh,
refreshOAuthSession,
saveOAuthSession,
normalizeOauthModel,
buildOauthEndpoint,
extractOutputTextFromSse,
} from "./oauth.js";
import type { OAuthSession } from "./oauth.js";

export interface LlmConfig {
apiKey?: string;
baseURL?: string;
model?: string;
auth?: "api-key" | "oauth";
oauthPath?: string;
oauthProvider?: string;
timeoutMs?: number;
}

export type CompleteFn = (system: string, user: string) => Promise<string>;
Expand Down Expand Up @@ -52,7 +68,111 @@ export function createCompleteFn(
llmConfig?: LlmConfig,
anthropicApiKey?: string,
): CompleteFn {
// ── Pre-resolve OAuth config to avoid non-null assertions in hot path ──
const oauthPath = llmConfig?.auth === "oauth" ? llmConfig.oauthPath : undefined;
const oauthTimeout = llmConfig?.timeoutMs;

// ── OAuth session cache ───────────────────────────────────
let cachedSessionPromise: Promise<OAuthSession> | null = null;
let refreshPromise: Promise<OAuthSession> | null = null;

async function getOAuthSession(): Promise<OAuthSession> {
if (!oauthPath) {
throw new Error("[graph-memory] OAuth mode requires llm.oauthPath");
}
if (!cachedSessionPromise) {
cachedSessionPromise = loadOAuthSession(oauthPath).catch((error) => {
cachedSessionPromise = null;
throw error;
});
}
let session = await cachedSessionPromise;
if (needsRefresh(session)) {
if (!refreshPromise) {
refreshPromise = refreshOAuthSession(session, oauthTimeout)
.then(async (s) => {
await saveOAuthSession(oauthPath, s);
cachedSessionPromise = Promise.resolve(s);
refreshPromise = null;
return s;
})
.catch((err) => {
refreshPromise = null;
throw err;
});
}
session = await refreshPromise;
}
return session;
}

return async (system, user) => {
// ── 路径 C(OAuth):Codex Responses API ────────────────
if (llmConfig?.auth === "oauth") {
if (!llmConfig.oauthPath) {
throw new Error("[graph-memory] OAuth mode requires llm.oauthPath");
}
const session = await getOAuthSession();
const endpoint = buildOauthEndpoint(llmConfig.baseURL, llmConfig.oauthProvider);
const oauthModel = normalizeOauthModel(llmConfig.model ?? model);

const res = await fetchRetry(endpoint, {
method: "POST",
headers: {
"Authorization": `Bearer ${session.accessToken}`,
"Content-Type": "application/json",
"Accept": "text/event-stream",
"OpenAI-Beta": "responses=experimental",
"chatgpt-account-id": session.accountId,
"originator": "codex_cli_rs",
},
body: JSON.stringify({
model: oauthModel,
instructions: system.trim(),
input: [
{
role: "user",
content: [{ type: "input_text", text: user }],
},
],
store: false,
stream: false,
text: { format: { type: "text" } },
}),
}, 3, llmConfig.timeoutMs ?? 30_000);

if (!res.ok) {
const errText = await res.text().catch(() => "");
throw new Error(`[graph-memory] OAuth LLM API ${res.status}: ${errText.slice(0, 500)}`);
}

const bodyText = await res.text();

// Non-streaming: parse as JSON and extract output text
let text: string | null = null;
try {
const parsed = JSON.parse(bodyText) as Record<string, unknown>;
const output = Array.isArray(parsed.output) ? parsed.output : [];
for (const item of output) {
if (!item || typeof item !== "object") continue;
const content = Array.isArray((item as Record<string, unknown>).content)
? (item as Record<string, unknown>).content as Array<Record<string, unknown>>
: [];
for (const part of content) {
if (part?.type === "output_text" && typeof part.text === "string") {
text = (text ?? "") + part.text;
}
}
}
} catch {
// fallback: try SSE parsing in case server ignored stream:false
text = extractOutputTextFromSse(bodyText);
}

if (text) return text;
throw new Error("[graph-memory] OAuth LLM returned empty content");
}

// ── 路径 A(优先):pluginConfig.llm 直接调 OpenAI 兼容 API ──
if (llmConfig?.apiKey && llmConfig?.baseURL) {
const baseURL = llmConfig.baseURL.replace(/\/+$/, "");
Expand Down
Loading