// screens-2.jsx — Test Request, Logs, Setup screens
const { useState: useState2, useEffect: useEffect2, useRef: useRef2, useMemo: useMemo2 } = React;
// ─────────────────────────── TEST REQUEST ───────────────────────────
const SAMPLE_RESPONSE = `Here's a small Go function that reads ~/.codex/auth.json and returns the access token. It tolerates the file being missing (returns ErrAuthMissing) and validates that the mode is "chatgpt" before yielding a token.
func loadCodexAuth(path string) (string, error) {
f, err := os.Open(path)
if err != nil {
if errors.Is(err, os.ErrNotExist) {
return "", ErrAuthMissing
}
return "", err
}
defer f.Close()
var a struct {
Mode string \`json:"mode"\`
Token string \`json:"access_token"\`
Exp int64 \`json:"expires_at"\`
}
if err := json.NewDecoder(f).Decode(&a); err != nil {
return "", fmt.Errorf("parse auth: %w", err)
}
if a.Mode != "chatgpt" {
return "", fmt.Errorf("unsupported auth mode %q", a.Mode)
}
return a.Token, nil
}
Want me to add a refresh-on-expiry path next?`;
const TestRequest = () => {
const [model, setModel] = useState2("gpt-5.3-codex");
const [stream, setStream] = useState2(true);
const [prompt, setPrompt] = useState2("Read ~/.codex/auth.json in Go and return the access token. Handle missing file and wrong auth mode.");
const [phase, setPhase] = useState2("idle"); // idle | sending | streaming | done | error
const [output, setOutput] = useState2("");
const [duration, setDuration] = useState2(0);
const [showRaw, setShowRaw] = useState2(false);
const [rawResponse, setRawResponse] = useState2(null);
const cancelRef = useRef2(null);
const reqJson = {
model,
max_tokens: 1024,
stream,
messages: [{ role: "user", content: prompt }],
};
const respJson = rawResponse || {
id: "msg_01HQPX9F3YK7N2TJ8B4ZRW3DCM",
type: "message",
role: "assistant",
model,
stop_reason: "end_turn",
usage: { input_tokens: 142, output_tokens: 318, total_tokens: 460 },
proxy: {
forwarded_to: "gpt-5.3-codex",
upstream: "codex",
duration_ms: duration || 1842,
},
content: [{ type: "text", text: phase === "done" ? SAMPLE_RESPONSE.slice(0, 80) + "…" : "" }],
};
const send = async () => {
if (phase === "sending" || phase === "streaming") return;
setOutput("");
setRawResponse(null);
setPhase("sending");
const start = Date.now();
try {
if (stream) setPhase("streaming");
const res = await api.post("/ui/api/test", { model, prompt, stream });
setDuration(res.duration_ms || (Date.now() - start));
setOutput(res.text || res.raw || "");
setRawResponse(res);
setPhase(res.status >= 200 && res.status < 300 ? "done" : "error");
} catch (e) {
setOutput(e.message || String(e));
setDuration(Date.now() - start);
setPhase("error");
}
};
const cancel = () => {
clearTimeout(cancelRef.current);
setPhase("idle");
setOutput("");
};
useEffect2(() => () => clearTimeout(cancelRef.current), []);
return (
Model
setModel(e.target.value)}>
{SAMPLE_MODELS.filter(m => m.status === "ok").map(m => (
{m.alias} → {m.real}
))}
Stream
setStream(s => !s)} onKeyDown={(e) => e.key === " " && setStream(s => !s)}/>
{stream ? "on · SSE" : "off"}
{phase === "streaming" ? "Streaming…" : phase === "sending" ? "Sending…" : "Send"}
{(phase === "sending" || phase === "streaming") && (
Cancel
)}
POST /anthropic/v1/messages
Request payload
{prettyJson(reqJson)}
{phase === "idle" && "Idle"}
{phase === "sending" && "Connecting"}
{phase === "streaming" && "Streaming"}
{phase === "done" && "200 OK"}
{phase === "error" && "Error"}
{phase === "done" && (
{(duration / 1000).toFixed(2)}s · 460 tok
)}
>
}>
{phase === "idle" && Press Send to dispatch a request through the proxy. }
{phase === "sending" && Connecting to 127.0.0.1:4000 · negotiating… }
{(phase === "streaming" || phase === "done") && (
<>
{output}
{phase === "streaming" && }
>
)}
{phase === "done" && (
duration {(duration/1000).toFixed(2)}s
in 142
out 318
total 460
)}
{/* Raw response panel */}
setShowRaw(s => !s)}>
{showRaw ? "Collapse" : "Expand"}
>
}
>
{showRaw && phase === "done" ? (
{prettyJson(respJson)}
) : (
{phase === "done" ? "Click Expand to view the parsed JSON envelope and proxy metadata." : "No response yet — send a test request first."}
)}
);
};
function prettyJson(obj) {
const json = JSON.stringify(obj, null, 2);
// light syntax highlight
return json.split("\n").map((line, i) => (
)/g, m => ({"&":"&","<":"<",">":">"}[m]))
.replace(/("[^"]+")(\s*:)/g, '
$1 $2')
.replace(/:\s*("[^"]*")/g, ':
$1 ')
.replace(/:\s*(-?\d+\.?\d*)/g, ':
$1 ')
.replace(/:\s*(true|false|null)/g, ':
$1 ')
}}/>
));
}
// ─────────────────────────── LOGS ───────────────────────────
const Logs = ({ proxyState }) => {
const seed = proxyState === "running" ? SAMPLE_LOGS_RUNNING
: proxyState === "warning" ? SAMPLE_LOGS_WARN
: SAMPLE_LOGS_STOPPED;
const [rows, setRows] = useState2(seed);
const [filter, setFilter] = useState2({ info: true, warn: true, error: true, debug: true });
const [view, setView] = useState2("raw"); // structured | raw
const [autoscroll, setAutoscroll] = useState2(true);
const [paused, setPaused] = useState2(false);
const [stream, setStream] = useState2("trace"); // stdout | stderr | trace
const [rawStreams, setRawStreams] = useState2({ stdout: "", stderr: "", trace: "" });
const scrollRef = useRef2(null);
useEffect2(() => {
if (paused) return;
const load = () => api.get("/ui/api/logs").then(res => {
setRows(res.rows && res.rows.length ? res.rows : seed);
setRawStreams({ stdout: res.stdout || "", stderr: res.stderr || "", trace: res.trace || "" });
}).catch(() => setRows(seed));
load();
const id = setInterval(() => {
load();
}, 1200);
return () => clearInterval(id);
}, [paused, proxyState]);
useEffect2(() => {
if (autoscroll && scrollRef.current) {
scrollRef.current.scrollTop = scrollRef.current.scrollHeight;
}
}, [rows, autoscroll]);
const filtered = rows.filter(r => filter[r.lvl]);
const counts = rows.reduce((acc, r) => { acc[r.lvl] = (acc[r.lvl] || 0) + 1; return acc; }, {});
const clear = () => setRows([]);
const rawText = (stream === "stdout" || stream === "stderr" || stream === "trace") && rawStreams[stream]
? rawStreams[stream]
: filtered.map(r => {
const status = r.status ? ` ${r.status}` : "";
const dur = r.dur ? ` ${r.dur}ms` : "";
return `${r.ts} ${r.lvl.toUpperCase().padEnd(5)} ${r.meth ? r.meth.padEnd(4) : " "} ${r.path}${status}${dur}${r.note ? " " + r.note : ""}`;
}).join("\n");
return (
{/* Toolbar */}
setStream("stdout")}>
stdout {rows.length}
setStream("stderr")}>
stderr {counts.error || 0}
setStream("trace")}>
trace {rawStreams.trace ? rawStreams.trace.split("\n").filter(Boolean).length : 0}
{[
{ k: "info", label: "info", tone: "info" },
{ k: "warn", label: "warn", tone: "warn" },
{ k: "error", label: "error", tone: "err" },
{ k: "debug", label: "debug", tone: "muted" },
].map(f => (
setFilter(s => ({ ...s, [f.k]: !s[f.k] }))}>
{f.label} {counts[f.k] || 0}
))}
setView("structured")}>Structured
setView("raw")}>Raw
setAutoscroll(s => !s)}/>
Auto-scroll
setPaused(p => !p)}>
{paused ? "Resume" : "Pause"}
Clear
{view === "structured" ? (
filtered.length === 0
?
No log lines match the current filters.
: filtered.map((r, i) => (
{r.ts}
{r.lvl}
{r.meth && <>{r.meth} >}
{r.path}
{r.status ? <> = 400 ? "var(--err)" : r.status >= 300 ? "var(--warn)" : "var(--ok)"}}>{r.status} > : null}
{r.dur ? <> {r.dur}ms > : null}
{r.note && <> · {r.note} >}
))
) : (
{rawText || "(empty)"}
)}
{rows.length} lines · showing {filtered.length}
{paused ? "paused" : "live"} · pid 14324 · {stream}
);
};
const pad = (n) => String(n).padStart(2, "0");
// ─────────────────────────── ANTIGRAVITY BROWSER ───────────────────────────
const BrowserBridge = () => {
const { data: status, refresh } = usePolling("/ui/api/antigravity", 3000);
const ext = status?.extension || {};
const manifest = ext.manifest || {};
const profile = status?.profile || {};
const mcp = status?.mcp || {};
const desktopMcp = mcp.desktop || {};
const probe = status?.last_probe || null;
const bridgeState = status?.bridge_state || {};
const ready = !!status?.ready;
const openBridge = () => {
if (status?.bridge_url) window.open(status.bridge_url, "_blank", "noopener,noreferrer");
};
const launcher = status?.launcher?.command || status?.launcher?.path || "connect-ai-proxy browser-mcp";
const rows = [
{ label: "Extension", ok: ext.exists, meta: ext.exists ? `${manifest.name || "Antigravity"} ${manifest.version || ""}` : "not found" },
{ label: "Manifest bridge", ok: !!manifest.externally_connectable, meta: manifest.service_worker || "service worker not detected" },
{ label: "Browser mode", ok: true, meta: `${status?.chrome?.mode || "default"} · ${status?.chrome?.browser_url || "debug port pending"}` },
{ label: "Chrome", ok: status?.chrome?.exists, meta: status?.chrome?.path || "not found" },
{ label: "DevTools port", ok: !!status?.chrome?.debug_running, meta: status?.chrome?.debug_running ? "connected" : (status?.chrome?.default_cdp_forced ? (status?.chrome?.can_relaunch_default ? "forced Default relaunch available" : "forced Default mode waiting") : "Default profile blocked by Chrome; controlled profile will be used") },
{ label: "Chrome windows", ok: true, meta: `${status?.chrome?.process_count || 0} processes · ${status?.chrome?.visible_count || 0} visible` },
{ label: "Claude Code MCP", ok: mcp.present, meta: mcp.present ? `${mcp.command || "connect-ai-proxy"} ${Array.isArray(mcp.args) ? mcp.args.join(" ") : ""}` : "antigravity-browser not injected yet" },
{ label: "Claude Desktop MCP", ok: desktopMcp.present, meta: desktopMcp.present ? `${desktopMcp.command || "connect-ai-proxy"} ${Array.isArray(desktopMcp.args) ? desktopMcp.args.join(" ") : ""}` : (desktopMcp.exists ? `${desktopMcp.server_count || 0} local servers · antigravity-browser not injected` : "desktop config not found") },
{ label: "Visible control", ok: !!status?.visible_overlay, meta: bridgeState?.connected_now ? `connected · ${bridgeState.last_action || "ready"}` : "cursor overlay tools ready when Claude starts MCP" },
];
return (
{ready ? "Ready" : "Needs attention"}
Refresh
Open bridge probe
>
}
/>
{rows.map((row, i) => (
{row.label}
{row.meta}
))}
{probe ? (
Received
{probe.received_at || "—"}
Runtime
{probe.runtime_available ?
available :
unavailable }
Wake
{prettyProbe(probe.wake)}
Connection
{prettyProbe(probe.connection)}
) : (
No extension probe has reported yet. Open the bridge probe from Chrome to check the Antigravity extension messaging wake path.
)}
Connection
{bridgeState?.connected_now ?
Chrome control connected :
waiting for controlled Chrome }
Current page
{bridgeState?.current_url || "—"}
Title
{bridgeState?.current_title || "—"}
Last action
{bridgeState?.last_action || "—"}
Screenshot
{bridgeState?.last_screenshot || "—"}
Last error
{bridgeState?.last_error || "—"}
Mode
{status?.mode || "visible_overlay_cdp"}
Extension ID
{status?.extension_id || "—"}
Extension path
{ext.path || "—"}
Profile path
{profile.path || "—"}
Bridge URL
{status?.bridge_url || "—"}
);
};
function prettyProbe(value) {
if (!value) return "—";
try {
return JSON.stringify(value, null, 2);
} catch (_) {
return String(value);
}
}
// ─────────────────────────── SETUP ───────────────────────────
const Setup = () => {
const { data: status } = usePolling("/ui/api/status", 3000);
const launcher = status?.launcher_commands?.launch_claude || "connect-ai-proxy launch-claude";
const rootUrl = status?.display_url || status?.public_url || status?.local_url || "http://127.0.0.1:4000";
const baseUrl = status?.anthropic_url || `${rootUrl}/anthropic`;
const openAIBaseUrl = status?.openai_url || `${rootUrl}/openai/v1`;
const apiKey = status?.proxy_key || "YOUR_LOCAL_PROXY_TOKEN";
const env = `ANTHROPIC_BASE_URL=${baseUrl}
ANTHROPIC_AUTH_TOKEN=${apiKey}
CLAUDE_CODE_ENABLE_GATEWAY_MODEL_DISCOVERY=1
CLAUDE_CODE_DISABLE_EXPERIMENTAL_BETAS=1
CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC=1
API_TIMEOUT_MS=3000000`;
const psh = `# PowerShell
$env:ANTHROPIC_BASE_URL = "${baseUrl}"
Remove-Item Env:ANTHROPIC_API_KEY -ErrorAction SilentlyContinue
$env:ANTHROPIC_AUTH_TOKEN = "${apiKey}"
$env:CLAUDE_CODE_ENABLE_GATEWAY_MODEL_DISCOVERY = "1"
$env:CLAUDE_CODE_DISABLE_EXPERIMENTAL_BETAS = "1"
$env:CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC = "1"
$env:API_TIMEOUT_MS = "3000000"
claude --model gpt-5.3-codex`;
const bash = `# bash / zsh
export ANTHROPIC_BASE_URL="${baseUrl}"
unset ANTHROPIC_API_KEY
export ANTHROPIC_AUTH_TOKEN="${apiKey}"
export CLAUDE_CODE_ENABLE_GATEWAY_MODEL_DISCOVERY=1
export CLAUDE_CODE_DISABLE_EXPERIMENTAL_BETAS=1
export CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC=1
export API_TIMEOUT_MS=3000000
claude --model gpt-5.3-codex`;
const [shell, setShell] = useState2("auto");
const activeShell = shell === "auto" ? (status?.goos === "windows" ? "powershell" : "bash") : shell;
const desktopSupported = status?.desktop_supported !== false;
const desktopOk = desktopSupported ? !!status?.antigravity?.mcp?.desktop?.present : true;
const checks = [
{ ok: "ok", label: `Proxy is running and bound to ${baseUrl.replace("http://", "")}`, meta: `PID ${status?.pid || "—"}` },
{ ok: status?.codex_auth?.exists ? "ok" : "err", label: "Codex auth file found", meta: `${status?.codex_auth?.path || "~/.codex/auth.json"} · mode ${status?.codex_auth?.mode || "unknown"}` },
{ ok: "ok", label: "GET /anthropic/v1/models returns 4 entries", meta: "200 · 4 ms" },
{ ok: "ok", label: "POST /anthropic/v1/messages succeeds against gpt-5.3-codex", meta: "200 · 1.28 s" },
{ ok: status?.claude_settings?.mode === "anthropic_auth_token" && !status?.claude_settings?.api_key_present ? "ok" : "warn", label: "Claude settings use ANTHROPIC_AUTH_TOKEN", meta: `api key ${status?.claude_settings?.api_key_present ? "present" : "absent"} · cache ${status?.claude_settings?.gateway_cache_present ? "present" : "absent"}` },
{ ok: status?.antigravity?.mcp?.present && desktopOk ? "ok" : "warn", label: "Antigravity browser MCP configured", meta: `Code ${status?.antigravity?.mcp?.present ? "ready" : "missing"} · Desktop ${desktopSupported ? (status?.antigravity?.mcp?.desktop?.present ? "ready" : "missing") : "not supported on this platform"}` },
{ ok: status?.antigravity?.ready ? "ok" : "warn", label: "Antigravity browser bridge ready", meta: status?.antigravity?.extension?.exists ? `extension ${status?.antigravity?.extension?.manifest?.version || "installed"}` : "extension not found" },
{ ok: status?.claude_version ? "ok" : "warn", label: "Claude Code CLI detected on PATH", meta: status?.claude_version || "not detected" },
];
return (
1
Run the launcher
The launcher exports the right environment variables and starts Claude Code with the recommended model.
2
Verify the connection
From inside Claude Code, ask “What model are you?” The response should reference the recommended Codex model.
Recommended
gpt-5.3-codex
→ codex direct passthrough
setShell("powershell")}>PowerShell
setShell("bash")}>bash · zsh
setShell("env")}>.env
}>
{activeShell === "env" && }
{activeShell === "powershell" && }
{activeShell === "bash" && }
ANTHROPIC_AUTH_TOKEN is the local proxy token, not your Anthropic billing key. It never leaves your machine.
OpenAI-compatible clients use {openAIBaseUrl} .