Compare commits

..

1 Commits

Author SHA1 Message Date
79ceddd85c feat: compact usage widget in authenticated layout header
Some checks failed
ci/woodpecker/push/ci Pipeline failed
2026-03-01 16:50:29 -06:00
3 changed files with 38 additions and 154 deletions

View File

@@ -32,7 +32,6 @@ variables:
- &node_image "node:24-alpine"
- &install_deps |
corepack enable
pnpm config set store-dir /root/.local/share/pnpm/store
pnpm install --frozen-lockfile
- &use_deps |
corepack enable

View File

@@ -184,11 +184,10 @@ function TaskCard({ task, provided, snapshot, columnAccent }: TaskCardProps): Re
interface KanbanColumnProps {
config: ColumnConfig;
tasks: Task[];
onAddTask: (status: TaskStatus, title: string, projectId?: string) => Promise<void>;
projectId?: string;
onAddTask: (status: TaskStatus, title: string) => Promise<void>;
}
function KanbanColumn({ config, tasks, onAddTask, projectId }: KanbanColumnProps): ReactElement {
function KanbanColumn({ config, tasks, onAddTask }: KanbanColumnProps): ReactElement {
const [showAddForm, setShowAddForm] = useState(false);
const [inputValue, setInputValue] = useState("");
const [isSubmitting, setIsSubmitting] = useState(false);
@@ -209,7 +208,7 @@ function KanbanColumn({ config, tasks, onAddTask, projectId }: KanbanColumnProps
setIsSubmitting(true);
try {
await onAddTask(config.status, inputValue.trim(), projectId);
await onAddTask(config.status, inputValue.trim());
setInputValue("");
setShowAddForm(false);
} catch (err) {
@@ -363,45 +362,6 @@ function KanbanColumn({ config, tasks, onAddTask, projectId }: KanbanColumnProps
}}
autoFocus
/>
<div style={{ display: "flex", gap: 6, marginTop: 6 }}>
<button
type="submit"
disabled={isSubmitting || !inputValue.trim()}
style={{
padding: "6px 12px",
borderRadius: "var(--r)",
border: "1px solid var(--primary)",
background: "var(--primary)",
color: "#fff",
fontSize: "0.8rem",
fontWeight: 500,
cursor: isSubmitting || !inputValue.trim() ? "not-allowed" : "pointer",
opacity: isSubmitting || !inputValue.trim() ? 0.5 : 1,
}}
>
Add
</button>
<button
type="button"
onClick={() => {
setShowAddForm(false);
setInputValue("");
}}
disabled={isSubmitting}
style={{
padding: "6px 12px",
borderRadius: "var(--r)",
border: "1px solid var(--border)",
background: "transparent",
color: "var(--muted)",
fontSize: "0.8rem",
cursor: isSubmitting ? "not-allowed" : "pointer",
opacity: isSubmitting ? 0.5 : 1,
}}
>
Cancel
</button>
</div>
<div style={{ marginTop: 6, fontSize: "0.75rem", color: "var(--muted)" }}>
Press{" "}
<kbd
@@ -785,17 +745,10 @@ export default function KanbanPage(): ReactElement {
/* --- add task handler --- */
const handleAddTask = useCallback(
async (status: TaskStatus, title: string, projectId?: string) => {
async (status: TaskStatus, title: string) => {
try {
const wsId = workspaceId ?? undefined;
const taskData: { title: string; status: TaskStatus; projectId?: string } = {
title,
status,
};
if (projectId) {
taskData.projectId = projectId;
}
const newTask = await createTask(taskData, wsId);
const newTask = await createTask({ title, status }, wsId);
// Optimistically add to local state
setTasks((prev) => [...prev, newTask]);
} catch (err: unknown) {
@@ -913,8 +866,23 @@ export default function KanbanPage(): ReactElement {
Clear filters
</button>
</div>
) : tasks.length === 0 ? (
/* Empty state */
<div
style={{
background: "var(--surface)",
border: "1px solid var(--border)",
borderRadius: "var(--r-lg)",
padding: 48,
textAlign: "center",
}}
>
<p style={{ color: "var(--muted)", margin: 0, fontSize: "0.9rem" }}>
No tasks yet. Create some tasks to see them here.
</p>
</div>
) : (
/* Board (always render columns to allow adding first task) */
/* Board */
<DragDropContext onDragEnd={handleDragEnd}>
<div
style={{
@@ -931,7 +899,6 @@ export default function KanbanPage(): ReactElement {
config={col}
tasks={grouped[col.status]}
onAddTask={handleAddTask}
projectId={filterProject}
/>
))}
</div>

View File

@@ -1,6 +1,6 @@
/**
* Chat API client
* Handles LLM chat interactions via /api/chat/stream (streaming) and /api/llm/chat (fallback)
* Handles LLM chat interactions via /api/llm/chat
*/
import { apiPost, fetchCsrfToken, getCsrfToken } from "./client";
@@ -33,28 +33,9 @@ export interface ChatResponse {
}
/**
* Parsed SSE data chunk from OpenAI-compatible stream
* Parsed SSE data chunk from the LLM stream
*/
interface OpenAiSseChunk {
id?: string;
object?: string;
created?: number;
model?: string;
choices?: {
index: number;
delta?: {
role?: string;
content?: string;
};
finish_reason?: string | null;
}[];
error?: string;
}
/**
* Parsed SSE data chunk from legacy /api/llm/chat stream
*/
interface LegacySseChunk {
interface SseChunk {
error?: string;
message?: {
role: string;
@@ -65,17 +46,7 @@ interface LegacySseChunk {
}
/**
* Parsed SSE data chunk with simple token format
*/
interface SimpleTokenChunk {
token?: string;
done?: boolean;
error?: string;
}
/**
* Send a chat message to the LLM (non-streaming fallback)
* Uses /api/llm/chat endpoint which supports both streaming and non-streaming
* Send a chat message to the LLM
*/
export async function sendChatMessage(request: ChatRequest): Promise<ChatResponse> {
return apiPost<ChatResponse>("/api/llm/chat", request);
@@ -95,20 +66,11 @@ async function ensureCsrfTokenForStream(): Promise<string> {
/**
* Stream a chat message from the LLM using SSE over fetch.
*
* Uses /api/chat/stream endpoint which proxies to OpenClaw.
* The backend responds with Server-Sent Events in one of these formats:
*
* OpenAI-compatible format:
* data: {"choices":[{"delta":{"content":"token"}}],...}\n\n
* data: [DONE]\n\n
*
* Legacy format (from /api/llm/chat):
* data: {"message":{"content":"token"},...}\n\n
* data: [DONE]\n\n
*
* Simple token format:
* data: {"token":"..."}\n\n
* data: {"done":true}\n\n
* The backend accepts stream: true in the request body and responds with
* Server-Sent Events:
* data: {"message":{"content":"token"},...}\n\n for each token
* data: [DONE]\n\n when the stream is complete
* data: {"error":"message"}\n\n on error
*
* @param request - Chat request (stream field will be forced to true)
* @param onChunk - Called with each token string as it arrives
@@ -127,14 +89,14 @@ export function streamChatMessage(
try {
const csrfToken = await ensureCsrfTokenForStream();
const response = await fetch(`${API_BASE_URL}/api/chat/stream`, {
const response = await fetch(`${API_BASE_URL}/api/llm/chat`, {
method: "POST",
headers: {
"Content-Type": "application/json",
"X-CSRF-Token": csrfToken,
},
credentials: "include",
body: JSON.stringify({ messages: request.messages, stream: true }),
body: JSON.stringify({ ...request, stream: true }),
signal: signal ?? null,
});
@@ -170,25 +132,6 @@ export function streamChatMessage(
const trimmed = part.trim();
if (!trimmed) continue;
// Handle event: error format
const eventMatch = /^event:\s*(\S+)\n/i.exec(trimmed);
const dataMatch = /^data:\s*(.+)$/im.exec(trimmed);
if (eventMatch?.[1] === "error" && dataMatch?.[1]) {
try {
const errorData = JSON.parse(dataMatch[1].trim()) as {
error?: string;
};
throw new Error(errorData.error ?? "Stream error occurred");
} catch (parseErr) {
if (parseErr instanceof SyntaxError) {
throw new Error("Stream error occurred");
}
throw parseErr;
}
}
// Standard SSE format: data: {...}
for (const line of trimmed.split("\n")) {
if (!line.startsWith("data: ")) continue;
@@ -200,39 +143,14 @@ export function streamChatMessage(
}
try {
const parsed: unknown = JSON.parse(data);
const parsed = JSON.parse(data) as SseChunk;
// Handle OpenAI format (from /api/chat/stream via OpenClaw)
const openAiChunk = parsed as OpenAiSseChunk;
if (openAiChunk.choices?.[0]?.delta?.content) {
onChunk(openAiChunk.choices[0].delta.content);
continue;
if (parsed.error) {
throw new Error(parsed.error);
}
// Handle legacy format (from /api/llm/chat)
const legacyChunk = parsed as LegacySseChunk;
if (legacyChunk.message?.content) {
onChunk(legacyChunk.message.content);
continue;
}
// Handle simple token format
const simpleChunk = parsed as SimpleTokenChunk;
if (simpleChunk.token) {
onChunk(simpleChunk.token);
continue;
}
// Handle done flag in simple format
if (simpleChunk.done === true) {
onComplete();
return;
}
// Handle error in any format
const error = openAiChunk.error ?? legacyChunk.error ?? simpleChunk.error;
if (error) {
throw new Error(error);
if (parsed.message?.content) {
onChunk(parsed.message.content);
}
} catch (parseErr) {
if (parseErr instanceof SyntaxError) {
@@ -244,7 +162,7 @@ export function streamChatMessage(
}
}
// Natural end of stream without [DONE] or done flag
// Natural end of stream without [DONE]
onComplete();
} catch (err: unknown) {
if (err instanceof DOMException && err.name === "AbortError") {