diff --git a/CHANGELOG.md b/CHANGELOG.md index b68a9e1d0..eaa778741 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ Only write entries that are worth mentioning to users. - Core: Fix HTTP header values containing trailing whitespace/newlines on certain Linux systems (e.g. kernel 6.8.0-101) causing connection errors — strip whitespace from ASCII header values before sending - Core: Fix OpenAI Responses provider sending implicit `reasoning.effort=null` which breaks Responses-compatible endpoints that require reasoning — reasoning parameters are now omitted unless explicitly set +- Vis: Add session download, import, export and delete — one-click ZIP download from session explorer and detail page, ZIP import into a dedicated `~/.kimi/imported_sessions/` directory with "Imported" filter toggle, `kimi export ` CLI command, and delete support for imported sessions with AlertDialog confirmation - Core: Fix context compaction failing when conversation contains media parts (images, audio, video) — switch from blacklist filtering (exclude `ThinkPart`) to whitelist filtering (only keep `TextPart`) to prevent unsupported content types from being sent to the compaction API - Web: Fix `@` file mention index not refreshing after switching sessions or when workspace files change — reset index on session switch, auto-refresh after 30s staleness, and support path-prefix search beyond the 500-file limit diff --git a/docs/en/release-notes/changelog.md b/docs/en/release-notes/changelog.md index 0fb7aa999..68aa007b9 100644 --- a/docs/en/release-notes/changelog.md +++ b/docs/en/release-notes/changelog.md @@ -6,6 +6,7 @@ This page documents the changes in each Kimi Code CLI release. - Core: Fix HTTP header values containing trailing whitespace/newlines on certain Linux systems (e.g. kernel 6.8.0-101) causing connection errors — strip whitespace from ASCII header values before sending - Core: Fix OpenAI Responses provider sending implicit `reasoning.effort=null` which breaks Responses-compatible endpoints that require reasoning — reasoning parameters are now omitted unless explicitly set +- Vis: Add session download, import, export and delete — one-click ZIP download from session explorer and detail page, ZIP import into a dedicated `~/.kimi/imported_sessions/` directory with "Imported" filter toggle, `kimi export ` CLI command, and delete support for imported sessions with AlertDialog confirmation - Core: Fix context compaction failing when conversation contains media parts (images, audio, video) — switch from blacklist filtering (exclude `ThinkPart`) to whitelist filtering (only keep `TextPart`) to prevent unsupported content types from being sent to the compaction API - Web: Fix `@` file mention index not refreshing after switching sessions or when workspace files change — reset index on session switch, auto-refresh after 30s staleness, and support path-prefix search beyond the 500-file limit diff --git a/docs/zh/release-notes/changelog.md b/docs/zh/release-notes/changelog.md index 586b01995..a210939ec 100644 --- a/docs/zh/release-notes/changelog.md +++ b/docs/zh/release-notes/changelog.md @@ -6,6 +6,7 @@ - Core:修复部分 Linux 系统(如内核版本 6.8.0-101)上 HTTP 请求头包含尾部空白/换行符导致连接错误的问题——发送前对 ASCII 请求头值执行空白裁剪 - Core:修复 OpenAI Responses provider 隐式发送 `reasoning.effort=null` 导致需要推理的 Responses 兼容端点报错的问题——现在仅在显式设置时才发送推理参数 +- Vis:新增会话下载、导入、导出与删除功能——在会话浏览器和详情页支持一键 ZIP 下载,支持将 ZIP 文件导入到独立的 `~/.kimi/imported_sessions/` 目录并通过"Imported"筛选器切换查看,新增 `kimi export ` CLI 命令,支持删除导入的会话并提供 AlertDialog 二次确认 - Core:修复对话包含媒体内容(图片、音频、视频)时上下文压缩失败的问题——将过滤策略从黑名单(排除 `ThinkPart`)改为白名单(仅保留 `TextPart`),防止不支持的内容类型被发送到压缩 API - Web:修复 `@` 文件提及索引在切换会话或工作区文件变更后不刷新的问题——切换会话时重置索引,30 秒过期自动刷新,输入路径前缀可查找超出 500 文件上限的文件 diff --git a/src/kimi_cli/cli/__init__.py b/src/kimi_cli/cli/__init__.py index 5bb820eb2..568c0e1f6 100644 --- a/src/kimi_cli/cli/__init__.py +++ b/src/kimi_cli/cli/__init__.py @@ -10,6 +10,7 @@ from kimi_cli.constant import VERSION +from .export import cli as export_cli from .info import cli as info_cli from .mcp import cli as mcp_cli from .vis import cli as vis_cli @@ -802,6 +803,7 @@ def web_worker(session_id: str) -> None: asyncio.run(run_worker(parsed_session_id)) +cli.add_typer(export_cli, name="export") cli.add_typer(mcp_cli, name="mcp") cli.add_typer(vis_cli, name="vis") cli.add_typer(web_cli, name="web") diff --git a/src/kimi_cli/cli/export.py b/src/kimi_cli/cli/export.py new file mode 100644 index 000000000..2b7b37e99 --- /dev/null +++ b/src/kimi_cli/cli/export.py @@ -0,0 +1,74 @@ +"""Export command for packaging session data.""" + +from __future__ import annotations + +import io +import zipfile +from pathlib import Path +from typing import Annotated + +import typer + +cli = typer.Typer(help="Export session data.") + + +def _find_session_by_id(session_id: str) -> Path | None: + """Find a session directory by session ID across all work directories.""" + from kimi_cli.share import get_share_dir + + sessions_root = get_share_dir() / "sessions" + if not sessions_root.exists(): + return None + + for work_dir_hash_dir in sessions_root.iterdir(): + if not work_dir_hash_dir.is_dir(): + continue + candidate = work_dir_hash_dir / session_id + if candidate.is_dir(): + return candidate + + return None + + +@cli.callback(invoke_without_command=True) +def export( + session_id: Annotated[ + str, + typer.Argument(help="Session ID to export."), + ], + output: Annotated[ + Path | None, + typer.Option( + "--output", + "-o", + help="Output ZIP file path. Default: session-{id}.zip in current directory.", + ), + ] = None, +) -> None: + """Export a session as a ZIP archive.""" + session_dir = _find_session_by_id(session_id) + if session_dir is None: + typer.echo(f"Error: session '{session_id}' not found.", err=True) + raise typer.Exit(code=1) + + # Collect files + files = sorted(f for f in session_dir.iterdir() if f.is_file()) + if not files: + typer.echo(f"Error: session '{session_id}' has no files.", err=True) + raise typer.Exit(code=1) + + # Determine output path + if output is None: + output = Path.cwd() / f"session-{session_id}.zip" + + # Create ZIP + buf = io.BytesIO() + with zipfile.ZipFile(buf, "w", zipfile.ZIP_DEFLATED) as zf: + for file_path in files: + zf.write(file_path, arcname=file_path.name) + buf.seek(0) + + output.parent.mkdir(parents=True, exist_ok=True) + output.write_bytes(buf.getvalue()) + + typer.echo(str(output)) diff --git a/src/kimi_cli/vis/api/sessions.py b/src/kimi_cli/vis/api/sessions.py index 1957cc55d..11c84792f 100644 --- a/src/kimi_cli/vis/api/sessions.py +++ b/src/kimi_cli/vis/api/sessions.py @@ -3,14 +3,19 @@ from __future__ import annotations import contextlib +import io import json import logging import re +import shutil +import zipfile from pathlib import Path from typing import Any +from uuid import uuid4 import aiofiles -from fastapi import APIRouter, HTTPException +from fastapi import APIRouter, HTTPException, UploadFile +from fastapi.responses import StreamingResponse from kimi_cli.metadata import load_metadata from kimi_cli.share import get_share_dir @@ -38,11 +43,24 @@ def collect_events( _SESSION_ID_RE = re.compile(r"^[a-zA-Z0-9_-]+$") +_IMPORTED_HASH = "__imported__" + + +def _get_imported_root() -> Path: + """Return the root directory for imported sessions.""" + return get_share_dir() / "imported_sessions" def _find_session_dir(work_dir_hash: str, session_id: str) -> Path | None: """Find session directory by work_dir_hash and session_id.""" - if not _SESSION_ID_RE.match(session_id) or not _SESSION_ID_RE.match(work_dir_hash): + if not _SESSION_ID_RE.match(session_id): + return None + if work_dir_hash == _IMPORTED_HASH: + session_dir = _get_imported_root() / session_id + if session_dir.is_dir(): + return session_dir + return None + if not _SESSION_ID_RE.match(work_dir_hash): return None sessions_root = get_share_dir() / "sessions" session_dir = sessions_root / work_dir_hash / session_id @@ -69,91 +87,117 @@ def get_work_dir_for_hash(hash_dir_name: str) -> str | None: return None +def _scan_session_dir( + session_dir: Path, + work_dir_hash: str, + work_dir: str | None, + *, + imported: bool = False, +) -> dict[str, Any] | None: + """Extract session info from a session directory.""" + if not session_dir.is_dir(): + return None + + wire_path = session_dir / "wire.jsonl" + context_path = session_dir / "context.jsonl" + state_path = session_dir / "state.json" + + # Get last updated time from most recent file + mtimes: list[float] = [] + for p in [wire_path, context_path, state_path]: + if p.exists(): + mtimes.append(p.stat().st_mtime) + + # Extract title and count turns from wire.jsonl + title = "" + turn_count = 0 + if wire_path.exists(): + try: + with wire_path.open(encoding="utf-8") as f: + for line in f: + line = line.strip() + if not line: + continue + try: + parsed = parse_wire_file_line(line) + except Exception: + logger.debug("Skipped malformed line in %s", wire_path) + continue + if isinstance(parsed, WireFileMetadata): + continue + if parsed.message.type == "TurnBegin": + turn_count += 1 + if turn_count == 1: + user_input = parsed.message.payload.get("user_input", "") + if isinstance(user_input, str): + title = user_input[:100] + elif isinstance(user_input, list) and user_input: + first = user_input[0] + if isinstance(first, dict): + title = str(first.get("text", ""))[:100] + except Exception: + pass + + # File sizes (cheap stat calls) + wire_size = wire_path.stat().st_size if wire_path.exists() else 0 + context_size = context_path.stat().st_size if context_path.exists() else 0 + state_size = state_path.stat().st_size if state_path.exists() else 0 + + # Read metadata.json if it exists + metadata_info: dict[str, Any] | None = None + metadata_path = session_dir / "metadata.json" + if metadata_path.exists(): + with contextlib.suppress(Exception): + metadata_info = json.loads(metadata_path.read_text(encoding="utf-8")) + + return { + "session_id": session_dir.name, + "work_dir": work_dir, + "work_dir_hash": work_dir_hash, + "title": title, + "last_updated": max(mtimes) if mtimes else 0, + "has_wire": wire_path.exists(), + "has_context": context_path.exists(), + "has_state": state_path.exists(), + "metadata": metadata_info, + "wire_size": wire_size, + "context_size": context_size, + "state_size": state_size, + "total_size": wire_size + context_size + state_size, + "turns": turn_count, + "imported": imported, + } + + @router.get("/sessions") def list_sessions() -> list[dict[str, Any]]: """List all available sessions across all work directories.""" - sessions_root = get_share_dir() / "sessions" - if not sessions_root.exists(): - return [] - results: list[dict[str, Any]] = [] - for work_dir_hash_dir in sessions_root.iterdir(): - if not work_dir_hash_dir.is_dir(): - continue - work_dir = get_work_dir_for_hash(work_dir_hash_dir.name) - for session_dir in work_dir_hash_dir.iterdir(): - if not session_dir.is_dir(): + + # Scan normal sessions + sessions_root = get_share_dir() / "sessions" + if sessions_root.exists(): + for work_dir_hash_dir in sessions_root.iterdir(): + if not work_dir_hash_dir.is_dir(): continue - wire_path = session_dir / "wire.jsonl" - context_path = session_dir / "context.jsonl" - state_path = session_dir / "state.json" - - # Get last updated time from most recent file - mtimes: list[float] = [] - for p in [wire_path, context_path, state_path]: - if p.exists(): - mtimes.append(p.stat().st_mtime) - - # Extract title and count turns from wire.jsonl - title = "" - turn_count = 0 - if wire_path.exists(): - try: - with wire_path.open(encoding="utf-8") as f: - for line in f: - line = line.strip() - if not line: - continue - try: - parsed = parse_wire_file_line(line) - except Exception: - logger.debug("Skipped malformed line in %s", wire_path) - continue - if isinstance(parsed, WireFileMetadata): - continue - if parsed.message.type == "TurnBegin": - turn_count += 1 - if turn_count == 1: - user_input = parsed.message.payload.get("user_input", "") - if isinstance(user_input, str): - title = user_input[:100] - elif isinstance(user_input, list) and user_input: - first = user_input[0] - if isinstance(first, dict): - title = str(first.get("text", ""))[:100] - except Exception: - pass - - # File sizes (cheap stat calls) - wire_size = wire_path.stat().st_size if wire_path.exists() else 0 - context_size = context_path.stat().st_size if context_path.exists() else 0 - state_size = state_path.stat().st_size if state_path.exists() else 0 - - # Read metadata.json if it exists - metadata_info: dict[str, Any] | None = None - metadata_path = session_dir / "metadata.json" - if metadata_path.exists(): - with contextlib.suppress(Exception): - metadata_info = json.loads(metadata_path.read_text(encoding="utf-8")) - - results.append( - { - "session_id": session_dir.name, - "work_dir": work_dir, - "work_dir_hash": work_dir_hash_dir.name, - "title": title, - "last_updated": max(mtimes) if mtimes else 0, - "has_wire": wire_path.exists(), - "has_context": context_path.exists(), - "has_state": state_path.exists(), - "metadata": metadata_info, - "wire_size": wire_size, - "context_size": context_size, - "state_size": state_size, - "total_size": wire_size + context_size + state_size, - "turns": turn_count, - } + work_dir = get_work_dir_for_hash(work_dir_hash_dir.name) + for session_dir in work_dir_hash_dir.iterdir(): + info = _scan_session_dir(session_dir, work_dir_hash_dir.name, work_dir) + if info: + results.append(info) + + # Scan imported sessions + imported_root = _get_imported_root() + if imported_root.exists(): + for session_dir in imported_root.iterdir(): + info = _scan_session_dir( + session_dir, + _IMPORTED_HASH, + None, + imported=True, ) + if info: + results.append(info) results.sort(key=lambda s: s["last_updated"], reverse=True) return results @@ -351,3 +395,109 @@ async def get_session_summary(work_dir_hash: str, session_id: str) -> dict[str, "state_size": state_size, "total_size": wire_size + context_size + state_size, } + + +@router.get("/sessions/{work_dir_hash}/{session_id}/download") +def download_session(work_dir_hash: str, session_id: str) -> StreamingResponse: + """Download all files in a session directory as a ZIP archive.""" + session_dir = _find_session_dir(work_dir_hash, session_id) + if session_dir is None: + raise HTTPException(status_code=404, detail="Session not found") + + buf = io.BytesIO() + with zipfile.ZipFile(buf, "w", zipfile.ZIP_DEFLATED) as zf: + for file_path in sorted(session_dir.iterdir()): + if file_path.is_file(): + zf.write(file_path, arcname=file_path.name) + buf.seek(0) + + filename = f"session-{session_id}.zip" + return StreamingResponse( + buf, + media_type="application/zip", + headers={"Content-Disposition": f'attachment; filename="{filename}"'}, + ) + + +@router.post("/sessions/import") +async def import_session(file: UploadFile) -> dict[str, Any]: + """Import a session from an uploaded ZIP archive.""" + if not file.filename or not file.filename.endswith(".zip"): + raise HTTPException(status_code=400, detail="Only .zip files are accepted") + + content = await file.read() + if not content: + raise HTTPException(status_code=400, detail="Empty file") + + # Reject uploads larger than 200 MB + _MAX_UPLOAD_BYTES = 200 * 1024 * 1024 + if len(content) > _MAX_UPLOAD_BYTES: + raise HTTPException(status_code=413, detail="File too large (max 200 MB)") + + # Validate ZIP + buf = io.BytesIO(content) + try: + zf = zipfile.ZipFile(buf, "r") + except zipfile.BadZipFile as err: + raise HTTPException(status_code=400, detail="Invalid ZIP file") from err + + with zf: + names = zf.namelist() + # Must contain wire.jsonl or context.jsonl at root or under exactly one directory + _VALID_FILES = ("wire.jsonl", "context.jsonl") + has_valid = any( + n in _VALID_FILES or (n.count("/") == 1 and n.endswith(_VALID_FILES)) for n in names + ) + if not has_valid: + raise HTTPException( + status_code=400, + detail="ZIP must contain wire.jsonl or context.jsonl at the top level " + "(or inside a single directory)", + ) + + session_id = uuid4().hex[:16] + imported_root = _get_imported_root() + session_dir = imported_root / session_id + session_dir.mkdir(parents=True, exist_ok=True) + + # Zip Slip protection: reject entries with path traversal or absolute paths + for info in zf.infolist(): + if info.filename.startswith("/") or ".." in info.filename.split("/"): + shutil.rmtree(session_dir, ignore_errors=True) + raise HTTPException( + status_code=400, + detail="ZIP contains unsafe path entries", + ) + + # Extract - handle both flat ZIPs and ZIPs with a single top-level directory + zf.extractall(session_dir) + + # If all files are under a single subdirectory, flatten them + entries = list(session_dir.iterdir()) + if len(entries) == 1 and entries[0].is_dir(): + nested_dir = entries[0] + for item in nested_dir.iterdir(): + shutil.move(str(item), str(session_dir / item.name)) + nested_dir.rmdir() + + return { + "session_id": session_id, + "work_dir_hash": _IMPORTED_HASH, + } + + +@router.delete("/sessions/{work_dir_hash}/{session_id}") +def delete_session(work_dir_hash: str, session_id: str) -> dict[str, str]: + """Delete an imported session.""" + if work_dir_hash != _IMPORTED_HASH: + raise HTTPException(status_code=403, detail="Only imported sessions can be deleted") + + if not _SESSION_ID_RE.match(session_id): + raise HTTPException(status_code=400, detail="Invalid session ID") + + session_dir = _get_imported_root() / session_id + if not session_dir.is_dir(): + raise HTTPException(status_code=404, detail="Session not found") + + shutil.rmtree(session_dir) + return {"status": "deleted"} diff --git a/vis/src/App.tsx b/vis/src/App.tsx index cb0e81381..bf9c84fc2 100644 --- a/vis/src/App.tsx +++ b/vis/src/App.tsx @@ -5,10 +5,11 @@ import { WireViewer } from "@/features/wire-viewer/wire-viewer"; import { ContextViewer } from "@/features/context-viewer/context-viewer"; import { StateViewer } from "@/features/state-viewer/state-viewer"; import { useTheme } from "@/hooks/use-theme"; -import { type WireEvent, getWireEvents, listSessions } from "@/lib/api"; +import { type WireEvent, getSessionDownloadUrl, getWireEvents, listSessions } from "@/lib/api"; import { isErrorEvent } from "@/features/wire-viewer/wire-event-card"; -import { ArrowLeft, BarChart3, Columns, List, Moon, RefreshCw, Sun, X } from "lucide-react"; +import { ArrowLeft, BarChart3, Columns, Download, List, Moon, RefreshCw, Sun, X } from "lucide-react"; import { DualView } from "@/features/dual-view/dual-view"; +import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip"; type Tab = "wire" | "context" | "state" | "dual"; @@ -68,6 +69,7 @@ function formatTokens(n: number): string { } function SessionStats({ sessionId, refreshKey }: { sessionId: string; refreshKey: number }) { + const [copied, setCopied] = useState(false); const [events, setEvents] = useState([]); const [loaded, setLoaded] = useState(false); @@ -93,7 +95,24 @@ function SessionStats({ sessionId, refreshKey }: { sessionId: string; refreshKey return (
- {(sessionId.split("/").pop() ?? sessionId).slice(0, 8)}... + + + { + const fullId = sessionId.split("/").pop() ?? sessionId; + navigator.clipboard.writeText(fullId).catch(() => {}); + setCopied(true); + setTimeout(() => setCopied(false), 2000); + }} + > + {sessionId.split("/").pop() ?? sessionId} + + + + {copied ? "Copied!" : "Click to copy"} + + | {parts.join(" · ")} | @@ -235,6 +254,14 @@ export function App() { {sessionId && (
+ + + + {/* Group toggle */} +
+ + {/* Import button */} + + { + const file = e.target.files?.[0]; + if (file) onImport(file); + e.target.value = ""; + }} + /> + {/* Count */} {filteredCount === totalCount diff --git a/vis/src/features/sessions-explorer/project-group.tsx b/vis/src/features/sessions-explorer/project-group.tsx index 1457e01d5..08d63db31 100644 --- a/vis/src/features/sessions-explorer/project-group.tsx +++ b/vis/src/features/sessions-explorer/project-group.tsx @@ -15,6 +15,7 @@ interface ProjectGroupProps { onSelectSession: (sessionId: string) => void; compact?: boolean; searchQuery?: string; + onSessionDeleted?: (sessionId: string) => void; } export function ProjectGroup({ @@ -23,6 +24,7 @@ export function ProjectGroup({ onSelectSession, compact, searchQuery, + onSessionDeleted, }: ProjectGroupProps) { const [collapsed, setCollapsed] = useState(false); @@ -64,6 +66,7 @@ export function ProjectGroup({ onSelect={() => onSelectSession(`${s.work_dir_hash}/${s.session_id}`)} compact={compact} searchQuery={searchQuery} + onDeleted={onSessionDeleted} /> ))}
diff --git a/vis/src/features/sessions-explorer/session-card.tsx b/vis/src/features/sessions-explorer/session-card.tsx index f99780303..c9d3995c6 100644 --- a/vis/src/features/sessions-explorer/session-card.tsx +++ b/vis/src/features/sessions-explorer/session-card.tsx @@ -2,9 +2,21 @@ import { useEffect, useRef, useState } from "react"; import { type SessionInfo, type SessionSummary, + deleteSession, + getSessionDownloadUrl, getSessionSummary, } from "@/lib/api"; -import { AlertCircle, Clock, RefreshCw, Zap } from "lucide-react"; +import { + AlertDialog, + AlertDialogAction, + AlertDialogCancel, + AlertDialogContent, + AlertDialogDescription, + AlertDialogFooter, + AlertDialogHeader, + AlertDialogTitle, +} from "@/components/ui/alert-dialog"; +import { AlertCircle, Clock, Download, RefreshCw, Trash2, Zap } from "lucide-react"; function formatRelativeTime(epochSec: number): string { if (!epochSec) return ""; @@ -55,80 +67,192 @@ interface SessionCardProps { onSelect: () => void; compact?: boolean; searchQuery?: string; + onDeleted?: (sessionId: string) => void; } -export function SessionCard({ session, onSelect, compact, searchQuery }: SessionCardProps) { +export function SessionCard({ session, onSelect, compact, searchQuery, onDeleted }: SessionCardProps) { + const [deleteDialogOpen, setDeleteDialogOpen] = useState(false); + const [deleting, setDeleting] = useState(false); + const displayTitle = session.metadata?.title && session.metadata.title !== "Untitled Session" ? session.metadata.title : session.title || "Untitled Session"; + const sessionPath = `${session.work_dir_hash}/${session.session_id}`; + const downloadUrl = getSessionDownloadUrl(sessionPath); + + const handleDownload = (e: React.MouseEvent) => { + e.stopPropagation(); + window.open(downloadUrl, "_blank", "noopener,noreferrer"); + }; + + const handleDeleteClick = (e: React.MouseEvent) => { + e.stopPropagation(); + setDeleteDialogOpen(true); + }; + + const handleDeleteConfirm = () => { + setDeleting(true); + deleteSession(sessionPath) + .then(() => { + setDeleteDialogOpen(false); + onDeleted?.(session.session_id); + }) + .catch((err) => alert(err instanceof Error ? err.message : "Delete failed")) + .finally(() => setDeleting(false)); + }; + + const deleteDialog = session.imported ? ( + + e.stopPropagation()}> + + Delete imported session? + + This will permanently delete the imported session + "{displayTitle}". This action cannot be undone. + + + + Cancel + + {deleting ? "Deleting..." : "Delete"} + + + + + ) : null; + if (compact) { return ( - + <> + + {deleteDialog} + ); } return ( - + {/* Row 4+: Lazy-loaded stats */} + + + {deleteDialog} + ); } diff --git a/vis/src/features/sessions-explorer/sessions-explorer.tsx b/vis/src/features/sessions-explorer/sessions-explorer.tsx index 388d2bb10..85fa701be 100644 --- a/vis/src/features/sessions-explorer/sessions-explorer.tsx +++ b/vis/src/features/sessions-explorer/sessions-explorer.tsx @@ -1,7 +1,8 @@ import { useEffect, useMemo, useState } from "react"; -import { type SessionInfo, listSessions } from "@/lib/api"; +import { type SessionInfo, importSession, listSessions } from "@/lib/api"; import { ExplorerToolbar, + type FilterMode, type SortMode, type ViewMode, } from "./explorer-toolbar"; @@ -31,6 +32,17 @@ export function SessionsExplorer({ onSelectSession }: SessionsExplorerProps) { const [sortMode, setSortMode] = useState("time"); const [grouped, setGrouped] = useState(true); const [viewMode, setViewMode] = useState("cards"); + const [filterMode, setFilterMode] = useState("all"); + const [importing, setImporting] = useState(false); + + const refreshSessions = async () => { + try { + const updated = await listSessions(true); + setSessions(updated); + } catch (err) { + console.error(err); + } + }; useEffect(() => { listSessions() @@ -54,16 +66,47 @@ export function SessionsExplorer({ onSelectSession }: SessionsExplorerProps) { return () => window.removeEventListener("keydown", handler); }, []); + const handleImport = async (file: File) => { + setImporting(true); + try { + await importSession(file); + await refreshSessions(); + } catch (err) { + console.error("Import failed:", err); + alert(err instanceof Error ? err.message : "Import failed"); + } finally { + setImporting(false); + } + }; + + const handleSessionDeleted = (deletedSessionId: string) => { + // Optimistic removal from local state + setSessions((prev) => prev.filter((s) => s.session_id !== deletedSessionId)); + // Then refresh from server to ensure consistency + refreshSessions(); + }; + const filtered = useMemo(() => { - if (!search) return sessions; - const q = search.toLowerCase(); - return sessions.filter( - (s) => - s.session_id.toLowerCase().includes(q) || - s.title.toLowerCase().includes(q) || - (s.work_dir && s.work_dir.toLowerCase().includes(q)), - ); - }, [sessions, search]); + let result = sessions; + + // Apply imported filter + if (filterMode === "imported") { + result = result.filter((s) => s.imported); + } + + // Apply search filter + if (search) { + const q = search.toLowerCase(); + result = result.filter( + (s) => + s.session_id.toLowerCase().includes(q) || + s.title.toLowerCase().includes(q) || + (s.work_dir && s.work_dir.toLowerCase().includes(q)), + ); + } + + return result; + }, [sessions, search, filterMode]); const sorted = useMemo(() => { const arr = [...filtered]; @@ -81,7 +124,7 @@ export function SessionsExplorer({ onSelectSession }: SessionsExplorerProps) { if (!grouped) return []; const map = new Map(); for (const s of sorted) { - const key = s.work_dir ?? "Unknown"; + const key = s.imported ? "Imported" : (s.work_dir ?? "Unknown"); const list = map.get(key); if (list) { list.push(s); @@ -136,17 +179,6 @@ export function SessionsExplorer({ onSelectSession }: SessionsExplorerProps) { ); } - if (sessions.length === 0) { - return ( -
- No sessions found - - Run kimi to create your first session. - -
- ); - } - return (
setGrouped((v) => !v)} viewMode={viewMode} onViewModeChange={setViewMode} + filterMode={filterMode} + onFilterModeChange={setFilterMode} totalCount={sessions.length} filteredCount={filtered.length} + onImport={handleImport} + importing={importing} />
@@ -172,6 +208,7 @@ export function SessionsExplorer({ onSelectSession }: SessionsExplorerProps) { onSelectSession={onSelectSession} compact={viewMode === "compact"} searchQuery={search} + onSessionDeleted={handleSessionDeleted} /> )) ) : viewMode === "compact" ? ( @@ -183,6 +220,7 @@ export function SessionsExplorer({ onSelectSession }: SessionsExplorerProps) { onSelect={() => onSelectSession(`${s.work_dir_hash}/${s.session_id}`)} compact searchQuery={search} + onDeleted={handleSessionDeleted} /> ))}
@@ -194,6 +232,7 @@ export function SessionsExplorer({ onSelectSession }: SessionsExplorerProps) { session={s} onSelect={() => onSelectSession(`${s.work_dir_hash}/${s.session_id}`)} searchQuery={search} + onDeleted={handleSessionDeleted} /> ))}
@@ -204,6 +243,22 @@ export function SessionsExplorer({ onSelectSession }: SessionsExplorerProps) { No sessions matching "{search}"
)} + + {filtered.length === 0 && !search && filterMode === "imported" && ( +
+ No imported sessions + Import a session ZIP to get started. +
+ )} + + {sessions.length === 0 && !search && filterMode === "all" && ( +
+ No sessions found + + Run kimi to create your first session, or import a session ZIP. + +
+ )}
{/* Footer */} diff --git a/vis/src/lib/api.ts b/vis/src/lib/api.ts index fb9146872..2d483c4e2 100644 --- a/vis/src/lib/api.ts +++ b/vis/src/lib/api.ts @@ -46,6 +46,7 @@ export interface SessionInfo { state_size: number; total_size: number; turns: number; + imported?: boolean; } export interface SessionSummary { @@ -213,6 +214,10 @@ export function getAggregateStats(forceRefresh = false): Promise return apiCache.get(key, () => fetchJSON("/statistics"), 60_000); } +export function getSessionDownloadUrl(sessionId: string): string { + return `${BASE}/sessions/${sessionId}/download`; +} + export function getSessionSummary( sessionId: string, forceRefresh = false, @@ -221,3 +226,46 @@ export function getSessionSummary( if (forceRefresh) apiCache.invalidate(key); return apiCache.get(key, () => fetchJSON(`/sessions/${sessionId}/summary`)); } + +export async function importSession(file: File): Promise<{ session_id: string; work_dir_hash: string }> { + const controller = new AbortController(); + const timeout = setTimeout(() => controller.abort(), 120_000); + try { + const formData = new FormData(); + formData.append("file", file); + const res = await fetch(`${BASE}/sessions/import`, { method: "POST", body: formData, signal: controller.signal }); + if (!res.ok) { + const detail = await res.json().catch(() => ({})); + throw new Error(detail.detail || `Import failed: ${res.status}`); + } + apiCache.invalidate("sessions"); + return res.json(); + } catch (e) { + if (e instanceof DOMException && e.name === "AbortError") { + throw new Error("Import request timed out"); + } + throw e; + } finally { + clearTimeout(timeout); + } +} + +export async function deleteSession(sessionId: string): Promise { + const controller = new AbortController(); + const timeout = setTimeout(() => controller.abort(), 30_000); + try { + const res = await fetch(`${BASE}/sessions/${sessionId}`, { method: "DELETE", signal: controller.signal }); + if (!res.ok) { + const detail = await res.json().catch(() => ({})); + throw new Error(detail.detail || `Delete failed: ${res.status}`); + } + apiCache.invalidate("sessions"); + } catch (e) { + if (e instanceof DOMException && e.name === "AbortError") { + throw new Error("Delete request timed out"); + } + throw e; + } finally { + clearTimeout(timeout); + } +} diff --git a/vis/src/main.tsx b/vis/src/main.tsx index 7c7e28ece..25602a432 100644 --- a/vis/src/main.tsx +++ b/vis/src/main.tsx @@ -1,6 +1,7 @@ import { Component, StrictMode, type ErrorInfo, type ReactNode } from "react"; import { createRoot } from "react-dom/client"; import { App } from "./App.tsx"; +import { TooltipProvider } from "@/components/ui/tooltip"; import "./index.css"; class ErrorBoundary extends Component< @@ -42,8 +43,10 @@ class ErrorBoundary extends Component< createRoot(document.getElementById("root")!).render( - - - + + + + + , );