pi extension fixes

This commit is contained in:
Jonas H
2026-03-09 11:53:09 +01:00
parent 25f6ce81f3
commit bea8fa19b3
2 changed files with 166 additions and 29 deletions

View File

@@ -6,7 +6,7 @@
* have complex union-type parameters represented as `{"description": "..."}` with
* no `type`, which causes llama-server to return a 400 error.
*
* This extension starts a tiny local HTTP proxy on port 8081 that:
* This extension provides an optional tiny local HTTP proxy on port 8081 that:
* 1. Intercepts outgoing OpenAI-compatible API calls
* 2. Walks tool schemas and adds `"type": "string"` to any schema node
* that is missing a type declaration
@@ -15,10 +15,13 @@
*
* It also overrides the `llama-cpp` provider's baseUrl to point at the proxy,
* so no changes to models.json are needed (beyond what's already there).
*
* Use `/llama-proxy` command to toggle the proxy on/off. Off by default.
*/
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
import * as http from "http";
import { execSync } from "child_process";
const PROXY_PORT = 8081;
const TARGET_HOST = "127.0.0.1";
@@ -97,6 +100,33 @@ function sanitizeRequestBody(body: Record<string, unknown>): Record<string, unkn
};
}
// ---------------------------------------------------------------------------
// Process management
// ---------------------------------------------------------------------------
/**
* Kill any existing processes using the proxy port.
*/
function killExistingProxy(): void {
try {
// Use lsof to find processes on the port and kill them
const output = execSync(`lsof -ti:${PROXY_PORT} 2>/dev/null || true`, {
encoding: "utf-8",
});
const pids = output.trim().split("\n").filter(Boolean);
for (const pid of pids) {
try {
process.kill(Number(pid), "SIGTERM");
console.log(`[llama-proxy] Terminated old instance (PID: ${pid})`);
} catch {
// Process may have already exited
}
}
} catch {
// lsof not available or other error — continue anyway
}
}
// ---------------------------------------------------------------------------
// Proxy server
// ---------------------------------------------------------------------------
@@ -165,15 +195,16 @@ function startProxy(): http.Server {
});
server.listen(PROXY_PORT, "127.0.0.1", () => {
// Server is up
console.log(`[llama-proxy] Proxy started on port ${PROXY_PORT}`);
});
server.on("error", (err: NodeJS.ErrnoException) => {
if (err.code === "EADDRINUSE") {
console.warn(
`[llama-proxy] Port ${PROXY_PORT} already in use — proxy not started. ` +
`If a previous pi session left it running, kill it and reload.`,
console.error(
`[llama-proxy] Port ${PROXY_PORT} already in use. ` +
`Killing old instances and retrying...`,
);
killExistingProxy();
} else {
console.error("[llama-proxy] Server error:", err);
}
@@ -187,15 +218,76 @@ function startProxy(): http.Server {
// ---------------------------------------------------------------------------
export default function (pi: ExtensionAPI) {
const server = startProxy();
let server: http.Server | null = null;
let proxyEnabled = false;
// Override the llama-cpp provider's baseUrl to route through our proxy.
// models.json model definitions are preserved; only the endpoint changes.
pi.registerProvider("llama-cpp", {
baseUrl: `http://127.0.0.1:${PROXY_PORT}/v1`,
/**
* Start the proxy and register the provider override.
*/
function enableProxy(): void {
if (proxyEnabled) {
console.log("[llama-proxy] Proxy already enabled");
return;
}
killExistingProxy();
server = startProxy();
// Override the llama-cpp provider's baseUrl to route through our proxy.
// models.json model definitions are preserved; only the endpoint changes.
pi.registerProvider("llama-cpp", {
baseUrl: `http://127.0.0.1:${PROXY_PORT}/v1`,
});
proxyEnabled = true;
console.log("[llama-proxy] Proxy enabled");
}
/**
* Disable the proxy and restore default provider.
*/
function disableProxy(): void {
if (!proxyEnabled) {
console.log("[llama-proxy] Proxy already disabled");
return;
}
if (server) {
server.close();
server = null;
}
// Reset provider to default (no baseUrl override)
pi.registerProvider("llama-cpp", {});
proxyEnabled = false;
console.log("[llama-proxy] Proxy disabled");
}
// Register the /llama-proxy command to toggle the proxy
pi.registerCommand("llama-proxy", async (args) => {
const action = args[0]?.toLowerCase() || "";
if (action === "on") {
enableProxy();
} else if (action === "off") {
disableProxy();
} else if (action === "status") {
console.log(`[llama-proxy] Status: ${proxyEnabled ? "enabled" : "disabled"}`);
} else {
// Toggle if no argument
if (proxyEnabled) {
disableProxy();
} else {
enableProxy();
}
}
});
// Clean up on session end
pi.on("session_end", async () => {
server.close();
if (server) {
server.close();
}
});
}