Skip to content

Commit 49a968e

Browse files
committed
OpenClaw and other config
1 parent e93b6c6 commit 49a968e

1 file changed

Lines changed: 120 additions & 50 deletions

File tree

dashboard/src/routes/integrations/+page.svelte

Lines changed: 120 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
import { instances, refreshState } from "$lib/stores/app.svelte";
77
import { onMount } from "svelte";
88
9-
const apiUrl = browser ? window.location.origin : "http://localhost:52415";
9+
const apiUrl = browser ? window.location.origin.replace("localhost", "127.0.0.1") : "http://127.0.0.1:52415";
1010
1111
const instancesData = $derived(instances());
1212
@@ -86,7 +86,6 @@
8686
let codexModel = $state("");
8787
let codexMcpPath = $state("/Users/username");
8888
let openClawModel = $state("");
89-
let openClawToolsProfile = $state("coding");
9089
$effect(() => {
9190
const def = modelsBySize.length > 0 ? modelsBySize[0] : "your-model-id";
9291
codexModel = def;
@@ -185,13 +184,30 @@
185184
const openClawConfig = $derived(
186185
JSON.stringify(
187186
{
188-
model: openClawModel,
189-
modelProvider: {
190-
name: "exo",
191-
baseURL: `${apiUrl}/v1`,
192-
apiKey: "x",
187+
gateway: { mode: "local" },
188+
models: {
189+
providers: {
190+
exo: {
191+
baseUrl: `${apiUrl}/v1`,
192+
apiKey: "x",
193+
api: "openai-completions",
194+
models: [
195+
{
196+
id: openClawModel,
197+
name: "exo local",
198+
input: (modelCapabilities[openClawModel] || []).includes("vision")
199+
? ["text", "image"]
200+
: ["text"],
201+
},
202+
],
203+
},
204+
},
205+
},
206+
agents: {
207+
defaults: {
208+
model: `exo/${openClawModel}`,
209+
},
193210
},
194-
toolsProfile: openClawToolsProfile,
195211
},
196212
null,
197213
2,
@@ -202,24 +218,53 @@
202218
`OLLAMA_HOST=${apiUrl}/ollama ollama run ${modelsBySize.length > 0 ? modelsBySize[0] : "your-model-id"}`,
203219
);
204220
205-
const n8nConfig = $derived.by(() => {
206-
const steps = [
207-
"1. In n8n, go to Credentials → New Credential → OpenAI API",
221+
const openWebUiCommand = $derived(
222+
[
223+
`docker run -d -p 3000:8080 \\`,
224+
` -e OLLAMA_BASE_URL=${apiUrl.replace("localhost", "host.docker.internal")}/ollama \\`,
225+
` -v open-webui:/app/backend/data \\`,
226+
` --name open-webui \\`,
227+
` ghcr.io/open-webui/open-webui:main`,
228+
].join("\n"),
229+
);
230+
231+
const n8nDockerCommand = $derived(
232+
[
233+
`docker run -d -p 5678:5678 \\`,
234+
` -v n8n_data:/home/node/.n8n \\`,
235+
` --name n8n \\`,
236+
` docker.n8n.io/n8nio/n8n`,
237+
].join("\n"),
238+
);
239+
240+
const n8nCredentialSteps = $derived(
241+
[
242+
`1. Go to Credentials → Add Credential → search "OpenAI API"`,
208243
`2. Set API Key to: x`,
209-
`3. Set Base URL to: ${apiUrl}/v1`,
210-
"4. Save the credential",
211-
`5. In your AI Agent or LLM Chain node, use the OpenAI Chat Model sub-node`,
212-
`6. Enter model name: ${modelsBySize.length > 0 ? modelsBySize[0] : "your-model-id"}`,
213-
];
214-
return steps.join("\n");
215-
});
244+
`3. Set Base URL to: ${apiUrl.replace("127.0.0.1", "host.docker.internal").replace("localhost", "host.docker.internal")}/v1`,
245+
`4. Save the credential`,
246+
].join("\n"),
247+
);
248+
249+
const n8nWorkflowSteps = $derived(
250+
[
251+
`1. Create a new workflow → "Start from Scratch"`,
252+
`2. Add an "AI Agent" or "Basic LLM Chain" node`,
253+
`3. Inside it, add an "OpenAI Chat Model" sub-node`,
254+
`4. Select the OpenAI credential you just created`,
255+
`5. Set Model to "From list" and pick your model (e.g. ${modelsBySize.length > 0 ? modelsBySize[0] : "your-model-id"})`,
256+
`6. Optionally toggle "Use Responses API", add Built-in Tools, or click "Add Option" for sampling settings`,
257+
`7. Connect a "Chat Trigger" node for interactive chat`,
258+
`8. On the Chat Trigger, enable "Allow File Uploads" for vision`,
259+
].join("\n"),
260+
);
216261
217262
const tabs = [
218263
"Claude Code",
219264
"OpenCode",
220265
"Codex",
221266
"OpenClaw",
222-
"Ollama",
267+
"Open WebUI",
223268
"n8n",
224269
] as const;
225270
type Tab = (typeof tabs)[number];
@@ -430,52 +475,77 @@
430475
language="bash"
431476
/>
432477
{:else if activeTab === "OpenClaw"}
433-
<div class="flex gap-3 text-xs">
434-
{#if runningModels.length > 1}
435-
<div>
436-
<span
437-
class="text-exo-light-gray/50 text-[10px] uppercase tracking-wider block mb-1"
438-
>Model</span
439-
>
440-
<select bind:value={openClawModel} class={selectClass}>
441-
{#each runningModels as model}
442-
<option value={model}>{model.split("/").pop()}</option>
443-
{/each}
444-
</select>
445-
</div>
446-
{/if}
447-
<div>
448-
<span
449-
class="text-exo-light-gray/50 text-[10px] uppercase tracking-wider block mb-1"
450-
>Tools Profile</span
451-
>
452-
<select bind:value={openClawToolsProfile} class={selectClass}>
453-
{#each ["minimal", "coding", "messaging", "full"] as profile}
454-
<option value={profile}>{profile}</option>
478+
{#if runningModels.length > 1}
479+
<div class="text-xs">
480+
<span class="text-exo-light-gray/50 text-[10px] uppercase tracking-wider block mb-1">Model</span>
481+
<select bind:value={openClawModel} class={selectClass}>
482+
{#each runningModels as model}
483+
<option value={model}>{model.split("/").pop()}</option>
455484
{/each}
456485
</select>
457486
</div>
458-
</div>
487+
{/if}
459488
<IntegrationCard
460489
title="Config File"
461490
subtitle="~/.openclaw/openclaw.json"
462-
description="Add this to your OpenClaw config."
491+
description="Add this to your OpenClaw config. If you haven't installed OpenClaw yet, run: npm install -g openclaw@latest"
463492
config={openClawConfig}
464493
/>
465-
{:else if activeTab === "Ollama"}
466494
<IntegrationCard
467-
title="Shell Command"
495+
title="Setup Commands"
496+
subtitle="Run in terminal"
497+
description="After saving the config, run these commands to fix metadata and start the gateway."
498+
config={`openclaw doctor --fix${(modelCapabilities[openClawModel] || []).includes("vision") ? `\nopenclaw models set-image exo/${openClawModel}` : ""}\nopenclaw gateway &\nopenclaw dashboard`}
499+
language="bash"
500+
/>
501+
{:else if activeTab === "Open WebUI"}
502+
<IntegrationCard
503+
title="1. Start Open WebUI"
504+
subtitle="Run in terminal"
505+
description="Run this to start Open WebUI."
506+
config={openWebUiCommand}
507+
language="bash"
508+
/>
509+
<IntegrationCard
510+
title="2. Open & Select Model"
511+
subtitle="http://localhost:3000"
512+
description={`Open http://localhost:3000 in your browser. Select the running model from the dropdown at the top: ${runningModels.length > 0 ? runningModels.join(", ") : "no models running"}`}
513+
config={"open http://localhost:3000"}
514+
language="bash"
515+
/>
516+
<IntegrationCard
517+
title="Ollama CLI"
468518
subtitle="Run in terminal"
469-
description="Set OLLAMA_HOST to point the Ollama CLI at your exo cluster."
519+
description="Or use the Ollama CLI directly."
470520
config={ollamaCommand}
471521
language="bash"
472522
/>
473523
{:else if activeTab === "n8n"}
474524
<IntegrationCard
475-
title="Credential Setup"
476-
subtitle="n8n UI"
477-
description="Configure an OpenAI credential in n8n to use your exo cluster."
478-
config={n8nConfig}
525+
title="1. Start n8n"
526+
subtitle="Run in terminal"
527+
description="Start n8n with Docker. If you already have n8n running, skip this step."
528+
config={n8nDockerCommand}
529+
language="bash"
530+
/>
531+
<IntegrationCard
532+
title="2. Open n8n"
533+
subtitle="http://localhost:5678"
534+
description="Open n8n in your browser. If this is your first time, complete the setup and select 'Start from Scratch' when prompted."
535+
config={"open http://localhost:5678"}
536+
language="bash"
537+
/>
538+
<IntegrationCard
539+
title="3. Add OpenAI Credential"
540+
subtitle="n8n UI → Credentials"
541+
description="Create an OpenAI credential pointing at your exo cluster."
542+
config={n8nCredentialSteps}
543+
/>
544+
<IntegrationCard
545+
title="4. Build a Workflow"
546+
subtitle="n8n UI → Workflows"
547+
description="Create a workflow that uses your exo-powered model."
548+
config={n8nWorkflowSteps}
479549
/>
480550
{/if}
481551
</div>

0 commit comments

Comments
 (0)