- Migrate src/ → plugin/ (plugin/core/, plugin/web/, plugin/commands/)
and src/mcp/ → services/ per OpenClaw plugin dev spec
- Add Gemini CLI backend (plugin/core/gemini/sdk-adapter.ts) with GEMINI.md
system-prompt injection
- Inject bootstrap as stateless system prompt on every turn instead of
first turn only: Claude via --system-prompt, Gemini via workspace/GEMINI.md;
eliminates isFirstTurn branch, keeps skills in sync with OpenClaw snapshots
- Fix session-map-store defensive parsing (sessions ?? []) to handle bare {}
reset files without crashing on .find()
- Add docs/TEST_FLOW.md with E2E test scenarios and expected outcomes
- Add docs/claude/BRIDGE_MODEL_FINDINGS.md with contractor-probe results
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
167 lines
4.6 KiB
JavaScript
167 lines
4.6 KiB
JavaScript
/**
|
|
* contractor-probe sidecar server
|
|
*
|
|
* Acts as an OpenAI-compatible model provider for "contractor-probe-bridge".
|
|
* Every request is logged in full to /tmp/contractor-probe-requests.jsonl so
|
|
* we can inspect exactly what OpenClaw sends to a custom model provider.
|
|
* The response echoes back a JSON summary of what was received.
|
|
*/
|
|
|
|
import http from "node:http";
|
|
import fs from "node:fs";
|
|
|
|
const PORT = Number(process.env.PROBE_PORT || 8799);
|
|
const LOG_FILE = process.env.PROBE_LOG || "/tmp/contractor-probe-requests.jsonl";
|
|
const MODEL_ID = "contractor-probe-bridge";
|
|
|
|
function sendJson(res, status, payload) {
|
|
const body = JSON.stringify(payload, null, 2);
|
|
res.writeHead(status, {
|
|
"Content-Type": "application/json; charset=utf-8",
|
|
"Content-Length": Buffer.byteLength(body),
|
|
});
|
|
res.end(body);
|
|
}
|
|
|
|
function logRequest(entry) {
|
|
try {
|
|
fs.appendFileSync(LOG_FILE, JSON.stringify(entry) + "\n");
|
|
} catch (err) {
|
|
console.error("[contractor-probe] failed to write log:", err);
|
|
}
|
|
}
|
|
|
|
function buildChatCompletionResponse(reqBody, summary) {
|
|
return {
|
|
id: `chatcmpl_probe_${Date.now()}`,
|
|
object: "chat.completion",
|
|
created: Math.floor(Date.now() / 1000),
|
|
model: reqBody?.model || MODEL_ID,
|
|
choices: [
|
|
{
|
|
index: 0,
|
|
message: {
|
|
role: "assistant",
|
|
content: `[contractor-probe] Received request:\n${JSON.stringify(summary, null, 2)}`,
|
|
},
|
|
finish_reason: "stop",
|
|
},
|
|
],
|
|
usage: { prompt_tokens: 0, completion_tokens: 10, total_tokens: 10 },
|
|
};
|
|
}
|
|
|
|
function buildResponsesResponse(reqBody, summary) {
|
|
return {
|
|
id: `resp_probe_${Date.now()}`,
|
|
object: "response",
|
|
created_at: Math.floor(Date.now() / 1000),
|
|
model: reqBody?.model || MODEL_ID,
|
|
output: [
|
|
{
|
|
type: "message",
|
|
role: "assistant",
|
|
content: [
|
|
{
|
|
type: "output_text",
|
|
text: `[contractor-probe] Received request:\n${JSON.stringify(summary, null, 2)}`,
|
|
},
|
|
],
|
|
},
|
|
],
|
|
usage: { input_tokens: 0, output_tokens: 10, total_tokens: 10 },
|
|
};
|
|
}
|
|
|
|
function listModels() {
|
|
return {
|
|
object: "list",
|
|
data: [
|
|
{
|
|
id: MODEL_ID,
|
|
object: "model",
|
|
created: Math.floor(Date.now() / 1000),
|
|
owned_by: "contractor-probe",
|
|
},
|
|
],
|
|
};
|
|
}
|
|
|
|
const server = http.createServer((req, res) => {
|
|
const url = req.url ?? "/";
|
|
const method = req.method ?? "GET";
|
|
|
|
if (method === "GET" && url === "/health") {
|
|
return sendJson(res, 200, { ok: true, service: "contractor-probe", port: PORT });
|
|
}
|
|
|
|
if (method === "GET" && url === "/v1/models") {
|
|
return sendJson(res, 200, listModels());
|
|
}
|
|
|
|
if (method !== "POST") {
|
|
return sendJson(res, 404, { error: "not_found" });
|
|
}
|
|
|
|
let body = "";
|
|
req.on("data", (chunk) => {
|
|
body += chunk;
|
|
if (body.length > 2_000_000) req.destroy();
|
|
});
|
|
|
|
req.on("end", () => {
|
|
let parsed = {};
|
|
try {
|
|
parsed = body ? JSON.parse(body) : {};
|
|
} catch {
|
|
return sendJson(res, 400, { error: "invalid_json" });
|
|
}
|
|
|
|
const entry = {
|
|
ts: new Date().toISOString(),
|
|
method,
|
|
url,
|
|
headers: req.headers,
|
|
body: parsed,
|
|
};
|
|
logRequest(entry);
|
|
console.log(`[contractor-probe] ${method} ${url} — ${new Date().toISOString()}`);
|
|
console.log(`[contractor-probe] messages count: ${parsed?.messages?.length ?? 0}`);
|
|
if (parsed?.messages?.length) {
|
|
const last = parsed.messages[parsed.messages.length - 1];
|
|
console.log(`[contractor-probe] last message role=${last.role} content=${JSON.stringify(last.content)?.substring(0, 200)}`);
|
|
}
|
|
|
|
// Build a summary to echo back in the response
|
|
const summary = {
|
|
model: parsed.model,
|
|
messagesCount: parsed.messages?.length ?? 0,
|
|
lastMessage: parsed.messages?.[parsed.messages.length - 1] ?? null,
|
|
firstSystemContent: parsed.messages?.find(m => m.role === "system")?.content?.toString()?.substring(0, 300) ?? null,
|
|
stream: parsed.stream ?? false,
|
|
temperature: parsed.temperature,
|
|
max_tokens: parsed.max_tokens,
|
|
};
|
|
|
|
if (url === "/v1/chat/completions") {
|
|
return sendJson(res, 200, buildChatCompletionResponse(parsed, summary));
|
|
}
|
|
|
|
if (url === "/v1/responses") {
|
|
return sendJson(res, 200, buildResponsesResponse(parsed, summary));
|
|
}
|
|
|
|
return sendJson(res, 404, { error: "not_found" });
|
|
});
|
|
});
|
|
|
|
server.listen(PORT, "127.0.0.1", () => {
|
|
console.log(`[contractor-probe] listening on 127.0.0.1:${PORT}`);
|
|
console.log(`[contractor-probe] logging requests to ${LOG_FILE}`);
|
|
});
|
|
|
|
process.on("SIGTERM", () => {
|
|
console.log("[contractor-probe] shutting down");
|
|
server.close(() => process.exit(0));
|
|
});
|