diff --git a/.claude-plugin/plugin.json b/.claude-plugin/plugin.json index 05cdf37..6b27980 100644 --- a/.claude-plugin/plugin.json +++ b/.claude-plugin/plugin.json @@ -1,6 +1,6 @@ { "name": "context-memory", - "version": "1.1.0", + "version": "1.2.0", "description": "Persistent, searchable context storage across Claude Code sessions using SQLite + FTS5", "author": { "name": "ErebusEnigma" diff --git a/.gitignore b/.gitignore index f65dcf4..1630972 100644 --- a/.gitignore +++ b/.gitignore @@ -3,14 +3,20 @@ __pycache__/ *.py[cod] *$py.class *.egg-info/ +.eggs/ dist/ build/ *.egg +*.whl +*.tar.gz +pip-log.txt +MANIFEST # SQLite *.db *.db-wal *.db-shm +*.db-journal # IDE .vscode/ @@ -23,11 +29,17 @@ build/ .DS_Store Thumbs.db Desktop.ini +ehthumbs.db # Testing & Coverage .pytest_cache/ .coverage +.coverage.* +coverage.xml htmlcov/ +.tox/ +.nox/ +.hypothesis/ # Type checking & Linting .mypy_cache/ @@ -35,8 +47,13 @@ htmlcov/ # Environment .env +.env.* .venv/ venv/ +.python-version + +# Logs +*.log # Claude Code local settings .claude/ diff --git a/CHANGELOG.md b/CHANGELOG.md index fbce483..a4e219f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [1.2.0] - 2026-02-17 + +### Added +- **Web dashboard** (`dashboard.py`) — Flask-based single-page app for browsing, searching, managing, and analyzing stored sessions + - REST API with 17 endpoints: sessions CRUD, full-text search, analytics (timeline, topics, projects, outcomes, technologies), database management (init, prune, export), and search hint chips + - Frontend SPA with hash-based routing, dark/light theme toggle, session detail views, code syntax highlighting (Highlight.js), and interactive charts (Chart.js) + - Session editing (summary, topics, user notes) and deletion from the UI + - Database export as JSON, prune with dry-run preview +- `context_dashboard` MCP tool to launch the web dashboard from any MCP-compatible client +- 73 new tests covering high and medium priority gaps across install, uninstall, search, save, init, utils, and auto-save modules + +### Changed +- `.gitignore` expanded with additional patterns: build artifacts (`*.whl`, `*.tar.gz`, `MANIFEST`), SQLite journal files, coverage variants, test framework caches (`.tox/`, `.nox/`, `.hypothesis/`), log files + ## [1.1.0] - 2026-02-16 ### Added diff --git a/CLAUDE.md b/CLAUDE.md index 9b83d49..42c7ca0 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -17,6 +17,8 @@ skills/context-memory/ # Skill definition (SKILL.md) db_prune.py # Database pruning (by age/count) db_utils.py # Connection management, helpers, VALID_TABLES mcp_server.py # MCP server (FastMCP, stdio transport) + dashboard.py # Web dashboard (Flask REST API + SPA) + static/ # Dashboard frontend (vanilla JS, CSS) commands/ # /remember and /recall command definitions hooks/ # Auto-save stop hook (with dedup) ``` @@ -27,6 +29,7 @@ hooks/ # Auto-save stop hook (with dedup) python skills/context-memory/scripts/db_init.py --verify # Verify schema python skills/context-memory/scripts/db_init.py --stats # DB statistics python skills/context-memory/scripts/db_prune.py --max-sessions 100 --dry-run # Preview prune +python skills/context-memory/scripts/dashboard.py # Launch web dashboard (localhost:5111) python -m pytest tests/ -v # Run tests ruff check . # Lint ``` diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 62eb28c..13dcd89 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -17,6 +17,7 @@ cd context-memory ```bash pip install ruff pytest +pip install flask flask-cors # optional, for dashboard development ``` ## Verify Schema @@ -43,6 +44,14 @@ ruff check . python -m pytest tests/ -v ``` +## Running the Dashboard + +```bash +python skills/context-memory/scripts/dashboard.py +``` + +Opens at [http://127.0.0.1:5111](http://127.0.0.1:5111). The frontend is vanilla JS in `skills/context-memory/scripts/static/` — no build step required. + ## Project Structure Notes - `.claude-plugin/plugin.json` is **not** part of the Anthropic skill spec. It exists for potential future plugin registry use. The canonical metadata lives in `skills/context-memory/SKILL.md` frontmatter. diff --git a/README.md b/README.md index c099bb2..bf79157 100644 --- a/README.md +++ b/README.md @@ -17,6 +17,7 @@ Persistent, searchable context storage across Claude Code sessions using SQLite - [How It Works](#how-it-works) - [Usage Examples](#usage-examples) - [Trigger Phrases](#trigger-phrases) +- [Web Dashboard](#web-dashboard) - [Database Management](#database-management) - [Contributing](#contributing) - [License](#license) @@ -84,6 +85,7 @@ This removes the skill, commands, hooks, and MCP server registration. Your saved - Python >= 3.8 - SQLite with FTS5 support (included in Python's standard library) - MCP server (optional): Python >= 3.10 and `pip install mcp` +- Web dashboard (optional): `pip install flask flask-cors` ## Commands @@ -152,6 +154,7 @@ An optional MCP (Model Context Protocol) server exposes context-memory operation - `context_save` — Save a session with messages, summary, topics, snippets - `context_stats` — Database statistics - `context_init` — Initialize/verify database +- `context_dashboard` — Launch the web dashboard (see [Web Dashboard](#web-dashboard)) **Setup:** @@ -208,6 +211,34 @@ The context-memory skill also activates on natural language: - "what did we discuss about..." - "find previous work on..." +## Web Dashboard + +A browser-based UI for browsing, searching, and managing your stored sessions. + +```bash +pip install flask flask-cors +python skills/context-memory/scripts/dashboard.py +``` + +Then open [http://127.0.0.1:5111](http://127.0.0.1:5111). + +**Features:** +- **Search** — Full-text search with topic/technology hint chips +- **Sessions** — Browse all sessions with pagination, project filtering, and sorting +- **Session detail** — View full summaries, messages, and code snippets; edit topics and notes inline +- **Analytics** — Timeline charts, topic frequency, project distribution, outcome breakdown, technology usage +- **Settings** — Initialize or reinitialize the database, prune old sessions (with dry-run preview), export all data as JSON + +Use `--port` to change the default port: + +```bash +python skills/context-memory/scripts/dashboard.py --port 8080 +``` + +The dashboard can also be launched via the MCP `context_dashboard` tool, which starts it in the background. + +> **Note**: The dashboard requires `flask` and `flask-cors` (`pip install flask flask-cors`). These are not needed for the core plugin. + ## Database Management Initialize or verify the database manually: diff --git a/pyproject.toml b/pyproject.toml index c06fea8..707137e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "context-memory" -version = "1.1.0" +version = "1.2.0" description = "Persistent, searchable context storage across Claude Code sessions using SQLite + FTS5" readme = "README.md" license = "MIT" diff --git a/skills/context-memory/scripts/__init__.py b/skills/context-memory/scripts/__init__.py index a50b49a..ba5c949 100644 --- a/skills/context-memory/scripts/__init__.py +++ b/skills/context-memory/scripts/__init__.py @@ -4,7 +4,7 @@ Database utilities for persistent, searchable context storage. """ -__version__ = "1.1.0" +__version__ = "1.2.0" from .db_init import get_stats, init_database, verify_schema from .db_prune import prune_sessions diff --git a/skills/context-memory/scripts/dashboard.py b/skills/context-memory/scripts/dashboard.py new file mode 100644 index 0000000..0e24cc8 --- /dev/null +++ b/skills/context-memory/scripts/dashboard.py @@ -0,0 +1,506 @@ +#!/usr/bin/env python3 +""" +Web dashboard for context-memory plugin. + +Serves a REST API and single-page app for browsing, searching, +managing, and analyzing stored Claude Code sessions. + +Requires: pip install flask flask-cors +""" +from __future__ import annotations + +import argparse +import json +import sys +from pathlib import Path + +# Ensure sibling modules are importable (same pattern as mcp_server.py) +_scripts_dir = str(Path(__file__).resolve().parent) +if _scripts_dir not in sys.path: + sys.path.insert(0, _scripts_dir) + +try: + from flask import Flask, jsonify, request, send_from_directory + from flask_cors import CORS +except ImportError: + if __name__ == "__main__": + print( + "Error: flask and flask-cors are required for the dashboard.\n" + "Install with: pip install flask flask-cors", + file=sys.stderr, + ) + sys.exit(1) + raise + +from db_init import get_stats, init_database # noqa: E402 +from db_prune import CHILD_TABLES, prune_sessions # noqa: E402 +from db_save import save_summary, save_topics # noqa: E402 +from db_search import full_search, search_tier2 # noqa: E402 +from db_utils import DB_PATH, VALID_TABLES, db_exists, get_connection # noqa: E402 + +STATIC_DIR = Path(__file__).resolve().parent / "static" + +app = Flask(__name__, static_folder=str(STATIC_DIR), static_url_path="/static") +CORS(app) + + +# --------------------------------------------------------------------------- +# Static file serving +# --------------------------------------------------------------------------- + +@app.route("/") +def index(): + return send_from_directory(str(STATIC_DIR), "index.html") + + +# --------------------------------------------------------------------------- +# API: Sessions +# --------------------------------------------------------------------------- + +@app.route("/api/sessions") +def api_list_sessions(): + """List sessions with pagination and optional project filter.""" + page = request.args.get("page", 1, type=int) + per_page = request.args.get("per_page", 20, type=int) + project = request.args.get("project", None) + sort = request.args.get("sort", "created_at") + order = request.args.get("order", "desc") + + if not db_exists(): + return jsonify({"sessions": [], "total": 0, "page": page, "per_page": per_page}) + + allowed_sorts = {"created_at", "updated_at", "message_count"} + if sort not in allowed_sorts: + sort = "created_at" + order_dir = "ASC" if order == "asc" else "DESC" + + with get_connection(readonly=True) as conn: + # Count total + count_sql = "SELECT COUNT(*) FROM sessions" + count_params = [] + if project: + count_sql += " WHERE project_path LIKE ?" + count_params.append(f"%{project}%") + + total = conn.execute(count_sql, count_params).fetchone()[0] + + # Fetch page + sql = """ + SELECT s.id, s.session_id, s.project_path, s.created_at, s.updated_at, + s.message_count, s.metadata, + sum.brief, sum.outcome, sum.technologies, sum.user_note + FROM sessions s + LEFT JOIN summaries sum ON sum.session_id = s.id + """ + params = [] + if project: + sql += " WHERE s.project_path LIKE ?" + params.append(f"%{project}%") + + sql += f" ORDER BY s.{sort} {order_dir} LIMIT ? OFFSET ?" + params.extend([per_page, (page - 1) * per_page]) + + cursor = conn.execute(sql, params) + sessions = [] + session_ids = [] + for row in cursor.fetchall(): + s = dict(row) + # Parse JSON fields + for field in ["metadata", "technologies"]: + if s.get(field) and isinstance(s[field], str): + try: + s[field] = json.loads(s[field]) + except (json.JSONDecodeError, ValueError): + pass + sessions.append(s) + session_ids.append(s["id"]) + + # Batch-fetch topics + if session_ids: + placeholders = ",".join("?" * len(session_ids)) + cursor = conn.execute( + f"SELECT session_id, topic FROM topics WHERE session_id IN ({placeholders})", + session_ids, + ) + topics_map = {} + for row in cursor.fetchall(): + topics_map.setdefault(row["session_id"], []).append(row["topic"]) + for s in sessions: + s["topics"] = topics_map.get(s["id"], []) + + return jsonify({"sessions": sessions, "total": total, "page": page, "per_page": per_page}) + + +@app.route("/api/sessions/") +def api_get_session(session_db_id): + """Get full session detail.""" + if not db_exists(): + return jsonify({"error": "Database does not exist"}), 404 + + results = search_tier2([session_db_id], include_messages=True, include_snippets=True) + if not results: + return jsonify({"error": "Session not found"}), 404 + + return jsonify(results[0]) + + +@app.route("/api/sessions/", methods=["PUT"]) +def api_update_session(session_db_id): + """Update session summary, topics, or user_note.""" + if not db_exists(): + return jsonify({"error": "Database does not exist"}), 404 + + data = request.get_json() + if not data: + return jsonify({"error": "No data provided"}), 400 + + with get_connection() as conn: + # Verify session exists + row = conn.execute("SELECT id FROM sessions WHERE id = ?", (session_db_id,)).fetchone() + if not row: + return jsonify({"error": "Session not found"}), 404 + + updated = {} + + # Update summary fields + summary_fields = {"brief", "detailed", "key_decisions", "problems_solved", "technologies", "outcome", "user_note"} + summary_data = {k: v for k, v in data.items() if k in summary_fields} + if summary_data: + save_summary(session_db_id, **summary_data) + updated["summary"] = list(summary_data.keys()) + + # Update topics + if "topics" in data: + save_topics(session_db_id, data["topics"], replace=True) + updated["topics"] = data["topics"] + + return jsonify({"updated": updated}) + + +@app.route("/api/sessions/", methods=["DELETE"]) +def api_delete_session(session_db_id): + """Delete a single session (FTS-safe: delete children first).""" + if not db_exists(): + return jsonify({"error": "Database does not exist"}), 404 + + with get_connection() as conn: + row = conn.execute("SELECT id, session_id FROM sessions WHERE id = ?", (session_db_id,)).fetchone() + if not row: + return jsonify({"error": "Session not found"}), 404 + + # Delete child rows explicitly so FTS triggers fire + for table in CHILD_TABLES: + if table not in VALID_TABLES: + continue + conn.execute(f"DELETE FROM {table} WHERE session_id = ?", (session_db_id,)) + + conn.execute("DELETE FROM sessions WHERE id = ?", (session_db_id,)) + conn.commit() + + return jsonify({"deleted": session_db_id}) + + +# --------------------------------------------------------------------------- +# API: Search +# --------------------------------------------------------------------------- + +@app.route("/api/search") +def api_search(): + """Full-text search across sessions.""" + query = request.args.get("q", "").strip() + if not query: + return jsonify({"error": "Query parameter 'q' is required"}), 400 + + project_path = request.args.get("project", None) + detailed = request.args.get("detailed", "false").lower() == "true" + limit = request.args.get("limit", 10, type=int) + + results = full_search(query=query, project_path=project_path, detailed=detailed, limit=limit) + return jsonify(results) + + +# --------------------------------------------------------------------------- +# API: Stats +# --------------------------------------------------------------------------- + +@app.route("/api/stats") +def api_stats(): + """Get database statistics.""" + if not db_exists(): + return jsonify({"error": "Database does not exist", "exists": False}) + stats = get_stats() + return jsonify(stats) + + +# --------------------------------------------------------------------------- +# API: Analytics +# --------------------------------------------------------------------------- + +@app.route("/api/analytics/timeline") +def api_analytics_timeline(): + """Sessions per day/week/month.""" + granularity = request.args.get("granularity", "week") + if not db_exists(): + return jsonify({"data": []}) + + if granularity == "day": + date_expr = "date(created_at)" + elif granularity == "month": + date_expr = "strftime('%Y-%m', created_at)" + else: + date_expr = "strftime('%Y-W%W', created_at)" + + with get_connection(readonly=True) as conn: + cursor = conn.execute(f""" + SELECT {date_expr} as period, + COUNT(*) as count, + SUM(CASE WHEN json_extract(metadata, '$.auto_save') = 1 THEN 1 ELSE 0 END) as auto_count, + SUM(CASE WHEN json_extract(metadata, '$.auto_save') = 1 THEN 0 ELSE 1 END) as manual_count + FROM sessions + GROUP BY period + ORDER BY period + """) + data = [dict(row) for row in cursor.fetchall()] + + return jsonify({"data": data, "granularity": granularity}) + + +@app.route("/api/analytics/topics") +def api_analytics_topics(): + """Topic frequency distribution.""" + limit = request.args.get("limit", 20, type=int) + if not db_exists(): + return jsonify({"data": []}) + + with get_connection(readonly=True) as conn: + cursor = conn.execute(""" + SELECT topic, COUNT(*) as count + FROM topics + WHERE topic != 'auto-save' + GROUP BY topic + ORDER BY count DESC + LIMIT ? + """, (limit,)) + data = [dict(row) for row in cursor.fetchall()] + + return jsonify({"data": data}) + + +@app.route("/api/analytics/projects") +def api_analytics_projects(): + """Sessions per project.""" + if not db_exists(): + return jsonify({"data": []}) + + with get_connection(readonly=True) as conn: + cursor = conn.execute(""" + SELECT project_path, COUNT(*) as count + FROM sessions + WHERE project_path IS NOT NULL AND project_path != '' + GROUP BY project_path + ORDER BY count DESC + LIMIT 20 + """) + data = [dict(row) for row in cursor.fetchall()] + + return jsonify({"data": data}) + + +@app.route("/api/analytics/outcomes") +def api_analytics_outcomes(): + """Outcome distribution.""" + if not db_exists(): + return jsonify({"data": []}) + + with get_connection(readonly=True) as conn: + cursor = conn.execute(""" + SELECT COALESCE(outcome, 'unknown') as outcome, COUNT(*) as count + FROM summaries + GROUP BY outcome + ORDER BY count DESC + """) + data = [dict(row) for row in cursor.fetchall()] + + return jsonify({"data": data}) + + +@app.route("/api/analytics/technologies") +def api_analytics_technologies(): + """Technology usage frequency.""" + limit = request.args.get("limit", 15, type=int) + if not db_exists(): + return jsonify({"data": []}) + + with get_connection(readonly=True) as conn: + cursor = conn.execute("SELECT technologies FROM summaries WHERE technologies IS NOT NULL") + tech_counts = {} + for row in cursor.fetchall(): + try: + techs = json.loads(row["technologies"]) if isinstance(row["technologies"], str) else row["technologies"] + if isinstance(techs, list): + for t in techs: + t_lower = t.strip().lower() + if t_lower: + tech_counts[t_lower] = tech_counts.get(t_lower, 0) + 1 + except (json.JSONDecodeError, ValueError, TypeError): + pass + + sorted_techs = sorted(tech_counts.items(), key=lambda x: x[1], reverse=True)[:limit] + data = [{"technology": t, "count": c} for t, c in sorted_techs] + return jsonify({"data": data}) + + +# --------------------------------------------------------------------------- +# API: Management +# --------------------------------------------------------------------------- + +@app.route("/api/prune", methods=["POST"]) +def api_prune(): + """Prune sessions by age and/or count.""" + data = request.get_json() + if not data: + return jsonify({"error": "No data provided"}), 400 + + result = prune_sessions( + max_age_days=data.get("max_age_days"), + max_sessions=data.get("max_sessions"), + dry_run=data.get("dry_run", True), + ) + return jsonify(result) + + +@app.route("/api/init", methods=["POST"]) +def api_init(): + """Initialize or reinitialize the database.""" + data = request.get_json() or {} + force = data.get("force", False) + created = init_database(force=force) + if created: + return jsonify({"created": True, "message": "Database initialized."}) + return jsonify({"created": False, "message": "Database already exists."}) + + +@app.route("/api/export") +def api_export(): + """Export all sessions as JSON.""" + if not db_exists(): + return jsonify({"error": "Database does not exist"}), 404 + + with get_connection(readonly=True) as conn: + # Get all session IDs + cursor = conn.execute("SELECT id FROM sessions ORDER BY created_at DESC") + session_ids = [row["id"] for row in cursor.fetchall()] + + if not session_ids: + return jsonify({"sessions": []}) + + sessions = search_tier2(session_ids, include_messages=True, include_snippets=True) + return jsonify({"sessions": sessions, "count": len(sessions), "db_path": str(DB_PATH)}) + + +# --------------------------------------------------------------------------- +# API: Projects list (for filter dropdowns) +# --------------------------------------------------------------------------- + +@app.route("/api/projects") +def api_projects(): + """List all distinct project paths with session counts.""" + if not db_exists(): + return jsonify({"projects": []}) + + with get_connection(readonly=True) as conn: + cursor = conn.execute(""" + SELECT project_path, COUNT(*) as session_count + FROM sessions + WHERE project_path IS NOT NULL AND project_path != '' + GROUP BY project_path + ORDER BY session_count DESC + """) + projects = [dict(row) for row in cursor.fetchall()] + + return jsonify({"projects": projects}) + + +# --------------------------------------------------------------------------- +# API: Search hints (topics + technologies scoped to project) +# --------------------------------------------------------------------------- + +@app.route("/api/hints") +def api_hints(): + """Get search hint chips (topics + technologies) for a project or globally.""" + project = request.args.get("project", None) + if not db_exists(): + return jsonify({"topics": [], "technologies": []}) + + with get_connection(readonly=True) as conn: + # Topics scoped to project + if project: + cursor = conn.execute(""" + SELECT t.topic, COUNT(*) as count + FROM topics t + JOIN sessions s ON s.id = t.session_id + WHERE t.topic != 'auto-save' AND s.project_path LIKE ? + GROUP BY t.topic + ORDER BY count DESC + LIMIT 15 + """, (f"%{project}%",)) + else: + cursor = conn.execute(""" + SELECT topic, COUNT(*) as count + FROM topics + WHERE topic != 'auto-save' + GROUP BY topic + ORDER BY count DESC + LIMIT 15 + """) + topics = [dict(row) for row in cursor.fetchall()] + + # Technologies scoped to project + if project: + cursor = conn.execute(""" + SELECT sum.technologies + FROM summaries sum + JOIN sessions s ON s.id = sum.session_id + WHERE sum.technologies IS NOT NULL AND s.project_path LIKE ? + """, (f"%{project}%",)) + else: + cursor = conn.execute( + "SELECT technologies FROM summaries WHERE technologies IS NOT NULL" + ) + + tech_counts = {} + for row in cursor.fetchall(): + try: + techs = json.loads(row["technologies"]) if isinstance(row["technologies"], str) else row["technologies"] + if isinstance(techs, list): + for t in techs: + t_lower = t.strip().lower() + if t_lower: + tech_counts[t_lower] = tech_counts.get(t_lower, 0) + 1 + except (json.JSONDecodeError, ValueError, TypeError): + pass + + sorted_techs = sorted(tech_counts.items(), key=lambda x: x[1], reverse=True)[:15] + technologies = [{"technology": t, "count": c} for t, c in sorted_techs] + + return jsonify({"topics": topics, "technologies": technologies}) + + +# --------------------------------------------------------------------------- +# Entry point +# --------------------------------------------------------------------------- + +def start_server(host: str = "127.0.0.1", port: int = 5111, debug: bool = False): + """Start the Flask dashboard server.""" + print(f"Context Memory Dashboard: http://{host}:{port}") + app.run(host=host, port=port, debug=debug) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Context Memory Web Dashboard") + parser.add_argument("--host", default="127.0.0.1", help="Host to bind to (default: 127.0.0.1)") + parser.add_argument("--port", type=int, default=5111, help="Port to listen on (default: 5111)") + parser.add_argument("--debug", action="store_true", help="Enable Flask debug mode") + + args = parser.parse_args() + start_server(host=args.host, port=args.port, debug=args.debug) diff --git a/skills/context-memory/scripts/mcp_server.py b/skills/context-memory/scripts/mcp_server.py index 71f34e5..0699113 100644 --- a/skills/context-memory/scripts/mcp_server.py +++ b/skills/context-memory/scripts/mcp_server.py @@ -11,7 +11,9 @@ import contextlib import io +import socket import sys +import threading from pathlib import Path # Ensure sibling modules (db_init, db_save, db_search) are importable regardless @@ -162,5 +164,53 @@ def context_init(force: bool = False) -> dict: return {"created": False, "message": "Database already exists."} +_dashboard_thread = None +_dashboard_port = None + + +def _port_in_use(port: int) -> bool: + """Check if a port is already in use.""" + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + return s.connect_ex(("127.0.0.1", port)) == 0 + + +@mcp.tool() +def context_dashboard(port: int = 5111) -> dict: + """Launch the context memory web dashboard in the background. + + Starts a local Flask web server serving the dashboard UI. + If already running, returns the existing URL. + + Requires: pip install flask flask-cors + + Args: + port: Port to listen on (default 5111). + + Returns: + Dict with 'url' and 'status'. + """ + global _dashboard_thread, _dashboard_port + + if _dashboard_thread is not None and _dashboard_thread.is_alive(): + return {"url": f"http://127.0.0.1:{_dashboard_port}", "status": "already_running"} + + if _port_in_use(port): + return {"url": f"http://127.0.0.1:{port}", "status": "port_in_use"} + + try: + from dashboard import app as flask_app # noqa: E402 + except ImportError as e: + return {"error": str(e), "status": "import_error"} + + def _run(): + flask_app.run(host="127.0.0.1", port=port, debug=False, use_reloader=False) + + _dashboard_port = port + _dashboard_thread = threading.Thread(target=_run, daemon=True) + _dashboard_thread.start() + + return {"url": f"http://127.0.0.1:{port}", "status": "started"} + + if __name__ == "__main__": mcp.run() diff --git a/skills/context-memory/scripts/static/css/dashboard.css b/skills/context-memory/scripts/static/css/dashboard.css new file mode 100644 index 0000000..731fc8a --- /dev/null +++ b/skills/context-memory/scripts/static/css/dashboard.css @@ -0,0 +1,796 @@ +/* ========================================================================== + Context Memory Dashboard — Styles + ========================================================================== */ + +/* ---------- CSS Custom Properties (Dark theme default) ---------- */ +:root, +[data-theme="dark"] { + --bg-primary: #0d1117; + --bg-secondary: #161b22; + --bg-tertiary: #21262d; + --bg-hover: #30363d; + --border: #30363d; + --border-light: #21262d; + --text-primary: #e6edf3; + --text-secondary: #8b949e; + --text-muted: #6e7681; + --accent: #58a6ff; + --accent-hover: #79c0ff; + --accent-bg: rgba(56, 139, 253, 0.1); + --success: #3fb950; + --success-bg: rgba(63, 185, 80, 0.1); + --warning: #d29922; + --warning-bg: rgba(210, 153, 34, 0.1); + --danger: #f85149; + --danger-bg: rgba(248, 81, 73, 0.1); + --info: #58a6ff; + --shadow: 0 1px 3px rgba(0, 0, 0, 0.3); + --shadow-lg: 0 4px 12px rgba(0, 0, 0, 0.4); + --radius: 6px; + --radius-lg: 10px; + --font-sans: -apple-system, BlinkMacSystemFont, "Segoe UI", Helvetica, Arial, sans-serif; + --font-mono: "SF Mono", "Fira Code", "Fira Mono", Menlo, Consolas, monospace; + --transition: 150ms ease; +} + +[data-theme="light"] { + --bg-primary: #ffffff; + --bg-secondary: #f6f8fa; + --bg-tertiary: #eaeef2; + --bg-hover: #d0d7de; + --border: #d0d7de; + --border-light: #eaeef2; + --text-primary: #1f2328; + --text-secondary: #656d76; + --text-muted: #8b949e; + --accent: #0969da; + --accent-hover: #0550ae; + --accent-bg: rgba(9, 105, 218, 0.08); + --success: #1a7f37; + --success-bg: rgba(26, 127, 55, 0.08); + --warning: #9a6700; + --warning-bg: rgba(154, 103, 0, 0.08); + --danger: #cf222e; + --danger-bg: rgba(207, 34, 46, 0.08); + --info: #0969da; + --shadow: 0 1px 3px rgba(31, 35, 40, 0.12); + --shadow-lg: 0 4px 12px rgba(31, 35, 40, 0.15); +} + +/* ---------- Reset & Base ---------- */ +*, *::before, *::after { box-sizing: border-box; margin: 0; padding: 0; } + +html { font-size: 14px; } + +body { + font-family: var(--font-sans); + background: var(--bg-primary); + color: var(--text-primary); + line-height: 1.5; + min-height: 100vh; +} + +a { color: var(--accent); text-decoration: none; } +a:hover { color: var(--accent-hover); } + +/* ---------- Navigation ---------- */ +.nav { + display: flex; + align-items: center; + gap: 1.5rem; + padding: 0 1.5rem; + height: 48px; + background: var(--bg-secondary); + border-bottom: 1px solid var(--border); + position: sticky; + top: 0; + z-index: 100; +} + +.nav-brand { + display: flex; + align-items: center; + gap: 0.5rem; + font-weight: 600; + font-size: 1rem; + color: var(--text-primary); +} + +.nav-logo { color: var(--accent); font-size: 1.2rem; } + +.nav-links { display: flex; gap: 0.25rem; } + +.nav-link { + padding: 0.375rem 0.75rem; + border-radius: var(--radius); + color: var(--text-secondary); + font-size: 0.875rem; + font-weight: 500; + transition: color var(--transition), background var(--transition); +} + +.nav-link:hover { color: var(--text-primary); background: var(--bg-hover); } +.nav-link.active { color: var(--text-primary); background: var(--bg-tertiary); } + +.nav-actions { margin-left: auto; } + +/* ---------- Main ---------- */ +.main { + max-width: 1280px; + margin: 0 auto; + padding: 1.5rem; +} + +/* ---------- Buttons ---------- */ +.btn { + display: inline-flex; + align-items: center; + gap: 0.375rem; + padding: 0.375rem 0.75rem; + border: 1px solid var(--border); + border-radius: var(--radius); + background: var(--bg-secondary); + color: var(--text-primary); + font-size: 0.8125rem; + font-weight: 500; + cursor: pointer; + transition: background var(--transition), border-color var(--transition); + font-family: var(--font-sans); +} + +.btn:hover { background: var(--bg-hover); } + +.btn-primary { + background: var(--accent); + border-color: var(--accent); + color: #fff; +} + +.btn-primary:hover { background: var(--accent-hover); border-color: var(--accent-hover); } + +.btn-danger { + background: var(--danger-bg); + border-color: var(--danger); + color: var(--danger); +} + +.btn-danger:hover { background: var(--danger); color: #fff; } + +.btn-icon { + padding: 0.375rem; + border: none; + background: transparent; + color: var(--text-secondary); + cursor: pointer; + border-radius: var(--radius); + font-size: 1.1rem; + line-height: 1; +} + +.btn-icon:hover { background: var(--bg-hover); color: var(--text-primary); } + +.btn-sm { padding: 0.25rem 0.5rem; font-size: 0.75rem; } + +/* ---------- Theme toggle icons ---------- */ +[data-theme="dark"] .icon-sun { display: inline; } +[data-theme="dark"] .icon-moon { display: none; } +[data-theme="light"] .icon-sun { display: none; } +[data-theme="light"] .icon-moon { display: inline; } + +/* ---------- Cards ---------- */ +.card { + background: var(--bg-secondary); + border: 1px solid var(--border); + border-radius: var(--radius-lg); + padding: 1rem 1.25rem; + transition: border-color var(--transition), box-shadow var(--transition); +} + +.card:hover { + border-color: var(--accent); + box-shadow: var(--shadow); +} + +.card-clickable { cursor: pointer; } + +/* ---------- Badges ---------- */ +.badge { + display: inline-flex; + align-items: center; + padding: 0.125rem 0.5rem; + border-radius: 9999px; + font-size: 0.6875rem; + font-weight: 600; + text-transform: uppercase; + letter-spacing: 0.02em; +} + +.badge-success { background: var(--success-bg); color: var(--success); } +.badge-warning { background: var(--warning-bg); color: var(--warning); } +.badge-danger { background: var(--danger-bg); color: var(--danger); } +.badge-info { background: var(--accent-bg); color: var(--accent); } +.badge-muted { background: var(--bg-tertiary); color: var(--text-muted); } + +/* ---------- Tags ---------- */ +.tag { + display: inline-flex; + align-items: center; + gap: 0.25rem; + padding: 0.125rem 0.5rem; + background: var(--bg-tertiary); + border: 1px solid var(--border); + border-radius: 9999px; + font-size: 0.75rem; + color: var(--text-secondary); +} + +.tag-remove { + cursor: pointer; + color: var(--text-muted); + font-size: 0.875rem; + line-height: 1; +} + +.tag-remove:hover { color: var(--danger); } + +/* ---------- Inputs ---------- */ +.input, .textarea, .select { + width: 100%; + padding: 0.5rem 0.75rem; + background: var(--bg-primary); + border: 1px solid var(--border); + border-radius: var(--radius); + color: var(--text-primary); + font-size: 0.875rem; + font-family: var(--font-sans); + transition: border-color var(--transition); +} + +.input:focus, .textarea:focus, .select:focus { + outline: none; + border-color: var(--accent); + box-shadow: 0 0 0 3px var(--accent-bg); +} + +.input-lg { padding: 0.75rem 1rem; font-size: 1rem; } + +.textarea { resize: vertical; min-height: 80px; } + +.select { cursor: pointer; } + +/* ---------- Search Layout (sidebar + content) ---------- */ +.search-layout { + display: grid; + grid-template-columns: 220px 1fr; + gap: 1.5rem; + align-items: start; +} + +.search-sidebar { + position: sticky; + top: 64px; + max-height: calc(100vh - 80px); + overflow-y: auto; +} + +.sidebar-section { + margin-bottom: 1.25rem; +} + +.sidebar-section-title { + font-size: 0.6875rem; + font-weight: 600; + color: var(--text-muted); + text-transform: uppercase; + letter-spacing: 0.06em; + margin-bottom: 0.5rem; + padding: 0 0.25rem; +} + +.sidebar-list { + list-style: none; +} + +.sidebar-item { + display: flex; + align-items: center; + justify-content: space-between; + padding: 0.375rem 0.5rem; + border-radius: var(--radius); + cursor: pointer; + font-size: 0.8125rem; + color: var(--text-secondary); + transition: background var(--transition), color var(--transition); +} + +.sidebar-item:hover { background: var(--bg-hover); color: var(--text-primary); } + +.sidebar-item.active { + background: var(--accent-bg); + color: var(--accent); + font-weight: 500; +} + +.sidebar-item-count { + font-size: 0.6875rem; + color: var(--text-muted); + font-family: var(--font-mono); +} + +.sidebar-item.active .sidebar-item-count { color: var(--accent); } + +/* ---------- Search Box ---------- */ +.search-box { + position: relative; + margin-bottom: 1rem; +} + +.search-box .input { + padding-left: 2.5rem; +} + +.search-icon { + position: absolute; + left: 0.75rem; + top: 50%; + transform: translateY(-50%); + color: var(--text-muted); + font-size: 1rem; +} + +/* ---------- Search Hints ---------- */ +.search-hints { + display: flex; + gap: 0.375rem; + flex-wrap: wrap; + margin-bottom: 1rem; +} + +.hint-chip { + display: inline-flex; + align-items: center; + padding: 0.1875rem 0.5rem; + background: var(--bg-tertiary); + border: 1px solid var(--border); + border-radius: 9999px; + font-size: 0.6875rem; + color: var(--text-secondary); + cursor: pointer; + transition: background var(--transition), color var(--transition), border-color var(--transition); +} + +.hint-chip:hover { + background: var(--accent-bg); + border-color: var(--accent); + color: var(--accent); +} + +.hint-chip-tech { + border-style: dashed; +} + +/* ---------- Filters ---------- */ +.filters { + display: flex; + gap: 0.5rem; + flex-wrap: wrap; + margin-bottom: 1rem; +} + +@media (max-width: 768px) { + .search-layout { + grid-template-columns: 1fr; + } + .search-sidebar { + position: static; + max-height: none; + border-bottom: 1px solid var(--border); + padding-bottom: 1rem; + margin-bottom: 1rem; + } +} + +/* ---------- Session Card ---------- */ +.session-card { + display: flex; + flex-direction: column; + gap: 0.5rem; +} + +.session-card-header { + display: flex; + align-items: center; + gap: 0.75rem; + flex-wrap: wrap; +} + +.session-card-date { + font-size: 0.75rem; + color: var(--text-muted); + font-family: var(--font-mono); +} + +.session-card-project { + font-size: 0.75rem; + color: var(--accent); + font-weight: 500; +} + +.session-card-brief { + color: var(--text-primary); + font-size: 0.875rem; + line-height: 1.4; +} + +.session-card-footer { + display: flex; + align-items: center; + gap: 0.5rem; + flex-wrap: wrap; +} + +.session-card-meta { + font-size: 0.75rem; + color: var(--text-muted); +} + +/* ---------- Session Detail ---------- */ +.detail-header { + margin-bottom: 1.5rem; +} + +.detail-title { + font-size: 1.25rem; + font-weight: 600; + margin-bottom: 0.5rem; +} + +.detail-meta { + display: flex; + gap: 1rem; + flex-wrap: wrap; + font-size: 0.8125rem; + color: var(--text-secondary); +} + +.detail-section { + margin-bottom: 1.5rem; +} + +.detail-section-title { + font-size: 0.875rem; + font-weight: 600; + color: var(--text-secondary); + text-transform: uppercase; + letter-spacing: 0.04em; + margin-bottom: 0.75rem; + padding-bottom: 0.375rem; + border-bottom: 1px solid var(--border-light); +} + +/* ---------- Messages ---------- */ +.message { + padding: 0.75rem 1rem; + border-radius: var(--radius); + margin-bottom: 0.5rem; + font-size: 0.8125rem; + line-height: 1.5; +} + +.message-user { + background: var(--accent-bg); + border-left: 3px solid var(--accent); +} + +.message-assistant { + background: var(--bg-tertiary); + border-left: 3px solid var(--success); +} + +.message-role { + font-weight: 600; + font-size: 0.6875rem; + text-transform: uppercase; + letter-spacing: 0.04em; + margin-bottom: 0.25rem; + color: var(--text-muted); +} + +/* ---------- Code Blocks ---------- */ +.code-block { + background: var(--bg-tertiary); + border: 1px solid var(--border); + border-radius: var(--radius); + overflow: hidden; + margin-bottom: 0.75rem; +} + +.code-block-header { + display: flex; + justify-content: space-between; + align-items: center; + padding: 0.375rem 0.75rem; + background: var(--bg-hover); + font-size: 0.75rem; + color: var(--text-secondary); +} + +.code-block pre { + margin: 0; + padding: 0.75rem; + overflow-x: auto; + font-family: var(--font-mono); + font-size: 0.8125rem; + line-height: 1.5; +} + +.code-block code { + background: transparent; + padding: 0; +} + +/* ---------- Analytics ---------- */ +.analytics-grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(320px, 1fr)); + gap: 1rem; + margin-bottom: 1.5rem; +} + +.analytics-card { + background: var(--bg-secondary); + border: 1px solid var(--border); + border-radius: var(--radius-lg); + padding: 1rem 1.25rem; +} + +.analytics-card-title { + font-size: 0.8125rem; + font-weight: 600; + color: var(--text-secondary); + margin-bottom: 0.75rem; +} + +.stat-grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(140px, 1fr)); + gap: 0.75rem; + margin-bottom: 1.5rem; +} + +.stat-card { + background: var(--bg-secondary); + border: 1px solid var(--border); + border-radius: var(--radius); + padding: 0.75rem 1rem; + text-align: center; +} + +.stat-value { + font-size: 1.5rem; + font-weight: 700; + color: var(--accent); + font-family: var(--font-mono); +} + +.stat-label { + font-size: 0.6875rem; + color: var(--text-muted); + text-transform: uppercase; + letter-spacing: 0.04em; +} + +/* ---------- Pagination ---------- */ +.pagination { + display: flex; + align-items: center; + justify-content: center; + gap: 0.5rem; + margin-top: 1.5rem; +} + +.pagination-info { + font-size: 0.8125rem; + color: var(--text-muted); + margin: 0 0.5rem; +} + +/* ---------- Modal ---------- */ +.modal-overlay { + position: fixed; + inset: 0; + background: rgba(0, 0, 0, 0.5); + display: flex; + align-items: center; + justify-content: center; + z-index: 200; +} + +.modal-overlay.hidden { display: none; } + +.modal { + background: var(--bg-secondary); + border: 1px solid var(--border); + border-radius: var(--radius-lg); + padding: 1.5rem; + max-width: 500px; + width: 90%; + box-shadow: var(--shadow-lg); +} + +.modal-title { + font-size: 1rem; + font-weight: 600; + margin-bottom: 0.75rem; +} + +.modal-body { + font-size: 0.875rem; + color: var(--text-secondary); + margin-bottom: 1.25rem; +} + +.modal-actions { + display: flex; + gap: 0.5rem; + justify-content: flex-end; +} + +/* ---------- Toast ---------- */ +.toast-container { + position: fixed; + bottom: 1rem; + right: 1rem; + display: flex; + flex-direction: column; + gap: 0.5rem; + z-index: 300; +} + +.toast { + padding: 0.75rem 1rem; + border-radius: var(--radius); + font-size: 0.8125rem; + box-shadow: var(--shadow-lg); + animation: slideIn 200ms ease-out; + max-width: 360px; +} + +.toast-success { background: var(--success); color: #fff; } +.toast-error { background: var(--danger); color: #fff; } +.toast-info { background: var(--accent); color: #fff; } + +@keyframes slideIn { + from { transform: translateX(100%); opacity: 0; } + to { transform: translateX(0); opacity: 1; } +} + +/* ---------- Loading ---------- */ +.loading { + display: flex; + align-items: center; + justify-content: center; + padding: 3rem; + color: var(--text-muted); + font-size: 0.875rem; +} + +.spinner { + width: 20px; + height: 20px; + border: 2px solid var(--border); + border-top-color: var(--accent); + border-radius: 50%; + animation: spin 0.6s linear infinite; + margin-right: 0.5rem; +} + +@keyframes spin { to { transform: rotate(360deg); } } + +/* ---------- Empty State ---------- */ +.empty-state { + text-align: center; + padding: 3rem 1rem; + color: var(--text-muted); +} + +.empty-state-icon { + font-size: 2.5rem; + margin-bottom: 0.75rem; + opacity: 0.5; +} + +.empty-state-title { + font-size: 1rem; + font-weight: 600; + color: var(--text-secondary); + margin-bottom: 0.375rem; +} + +.empty-state-text { font-size: 0.8125rem; } + +/* ---------- Settings ---------- */ +.settings-section { + margin-bottom: 2rem; +} + +.settings-title { + font-size: 1rem; + font-weight: 600; + margin-bottom: 1rem; + padding-bottom: 0.5rem; + border-bottom: 1px solid var(--border); +} + +.settings-row { + display: flex; + align-items: center; + gap: 1rem; + margin-bottom: 0.75rem; +} + +.settings-label { + font-size: 0.8125rem; + color: var(--text-secondary); + min-width: 120px; +} + +/* ---------- Inline Edit ---------- */ +.inline-edit { + display: flex; + align-items: center; + gap: 0.375rem; +} + +.inline-edit-text { + cursor: pointer; + padding: 0.125rem 0.25rem; + border-radius: var(--radius); + transition: background var(--transition); +} + +.inline-edit-text:hover { background: var(--bg-hover); } + +/* ---------- Lists ---------- */ +.list-item { + padding: 0.5rem 0; + border-bottom: 1px solid var(--border-light); + font-size: 0.8125rem; +} + +.list-item:last-child { border-bottom: none; } + +/* ---------- Utility ---------- */ +.flex { display: flex; } +.flex-col { flex-direction: column; } +.items-center { align-items: center; } +.justify-between { justify-content: space-between; } +.gap-sm { gap: 0.375rem; } +.gap-md { gap: 0.75rem; } +.gap-lg { gap: 1.25rem; } +.mt-sm { margin-top: 0.5rem; } +.mt-md { margin-top: 1rem; } +.mt-lg { margin-top: 1.5rem; } +.mb-sm { margin-bottom: 0.5rem; } +.mb-md { margin-bottom: 1rem; } +.text-muted { color: var(--text-muted); } +.text-sm { font-size: 0.8125rem; } +.text-xs { font-size: 0.75rem; } +.font-mono { font-family: var(--font-mono); } +.truncate { overflow: hidden; text-overflow: ellipsis; white-space: nowrap; } +.hidden { display: none !important; } + +.session-list { + display: flex; + flex-direction: column; + gap: 0.75rem; +} + +/* ---------- Responsive ---------- */ +@media (max-width: 768px) { + .nav { padding: 0 0.75rem; gap: 0.75rem; } + .nav-title { display: none; } + .main { padding: 1rem; } + .analytics-grid { grid-template-columns: 1fr; } + .stat-grid { grid-template-columns: repeat(2, 1fr); } +} diff --git a/skills/context-memory/scripts/static/index.html b/skills/context-memory/scripts/static/index.html new file mode 100644 index 0000000..84f39b3 --- /dev/null +++ b/skills/context-memory/scripts/static/index.html @@ -0,0 +1,53 @@ + + + + + + Context Memory Dashboard + + + + + + + + + + + + + + + + +
+
Loading...
+
+ + +
+ + + + + + + + diff --git a/skills/context-memory/scripts/static/js/api.js b/skills/context-memory/scripts/static/js/api.js new file mode 100644 index 0000000..713b437 --- /dev/null +++ b/skills/context-memory/scripts/static/js/api.js @@ -0,0 +1,111 @@ +/** + * API client for Context Memory Dashboard. + * Wraps all REST endpoints with fetch calls. + */ + +const BASE = ''; + +async function fetchJSON(url, options = {}) { + const resp = await fetch(`${BASE}${url}`, { + headers: { 'Content-Type': 'application/json', ...options.headers }, + ...options, + }); + if (!resp.ok) { + const err = await resp.json().catch(() => ({ error: resp.statusText })); + throw new Error(err.error || `HTTP ${resp.status}`); + } + return resp.json(); +} + +// Sessions +export function listSessions({ page = 1, perPage = 20, project, sort, order } = {}) { + const params = new URLSearchParams({ page, per_page: perPage }); + if (project) params.set('project', project); + if (sort) params.set('sort', sort); + if (order) params.set('order', order); + return fetchJSON(`/api/sessions?${params}`); +} + +export function getSession(id) { + return fetchJSON(`/api/sessions/${id}`); +} + +export function updateSession(id, data) { + return fetchJSON(`/api/sessions/${id}`, { + method: 'PUT', + body: JSON.stringify(data), + }); +} + +export function deleteSession(id) { + return fetchJSON(`/api/sessions/${id}`, { method: 'DELETE' }); +} + +// Search +export function search(query, { project, detailed = false, limit = 10 } = {}) { + const params = new URLSearchParams({ q: query, limit }); + if (project) params.set('project', project); + if (detailed) params.set('detailed', 'true'); + return fetchJSON(`/api/search?${params}`); +} + +// Stats +export function getStats() { + return fetchJSON('/api/stats'); +} + +// Analytics +export function getTimeline(granularity = 'week') { + return fetchJSON(`/api/analytics/timeline?granularity=${granularity}`); +} + +export function getTopTopics(limit = 20) { + return fetchJSON(`/api/analytics/topics?limit=${limit}`); +} + +export function getProjects() { + return fetchJSON('/api/analytics/projects'); +} + +export function getOutcomes() { + return fetchJSON('/api/analytics/outcomes'); +} + +export function getTechnologies(limit = 15) { + return fetchJSON(`/api/analytics/technologies?limit=${limit}`); +} + +// Management +export function pruneSessions({ maxAgeDays, maxSessions, dryRun = true }) { + return fetchJSON('/api/prune', { + method: 'POST', + body: JSON.stringify({ + max_age_days: maxAgeDays || null, + max_sessions: maxSessions || null, + dry_run: dryRun, + }), + }); +} + +export function initDatabase(force = false) { + return fetchJSON('/api/init', { + method: 'POST', + body: JSON.stringify({ force }), + }); +} + +export function exportAll() { + return fetchJSON('/api/export'); +} + +// Project list (for filters) +export function listProjects() { + return fetchJSON('/api/projects'); +} + +// Search hints (topics + technologies, optionally scoped to project) +export function getHints(project) { + const params = new URLSearchParams(); + if (project) params.set('project', project); + return fetchJSON(`/api/hints?${params}`); +} diff --git a/skills/context-memory/scripts/static/js/app.js b/skills/context-memory/scripts/static/js/app.js new file mode 100644 index 0000000..1fd5c91 --- /dev/null +++ b/skills/context-memory/scripts/static/js/app.js @@ -0,0 +1,137 @@ +/** + * Context Memory Dashboard — Main App + * Hash-based SPA router and view lifecycle management. + */ + +import { renderSearchView } from './views/search.js'; +import { renderSessionsView } from './views/sessions.js'; +import { renderDetailView } from './views/detail.js'; +import { renderAnalyticsView } from './views/analytics.js'; +import { renderSettingsView } from './views/settings.js'; + +const appEl = document.getElementById('app'); +const navLinks = document.querySelectorAll('.nav-link'); + +// --------------------------------------------------------------------------- +// Router +// --------------------------------------------------------------------------- + +const routes = [ + { pattern: /^\/$/, view: renderSearchView }, + { pattern: /^\/sessions$/, view: renderSessionsView }, + { pattern: /^\/session\/(\d+)$/, view: renderDetailView }, + { pattern: /^\/analytics$/, view: renderAnalyticsView }, + { pattern: /^\/settings$/, view: renderSettingsView }, +]; + +let currentCleanup = null; + +function getPath() { + const hash = window.location.hash.slice(1) || '/'; + return hash; +} + +async function navigate() { + const path = getPath(); + + // Cleanup previous view + if (typeof currentCleanup === 'function') { + currentCleanup(); + currentCleanup = null; + } + + // Update active nav link + navLinks.forEach(link => { + const route = link.dataset.route; + if (route === path || (route === '/sessions' && path.startsWith('/session/'))) { + link.classList.add('active'); + } else { + link.classList.remove('active'); + } + }); + + // Match route + for (const route of routes) { + const match = path.match(route.pattern); + if (match) { + appEl.innerHTML = '
Loading...
'; + try { + const cleanup = await route.view(appEl, ...match.slice(1)); + currentCleanup = cleanup || null; + } catch (err) { + appEl.innerHTML = `
+
+
Error
+
${escapeHtml(err.message)}
+
`; + } + return; + } + } + + // 404 + appEl.innerHTML = `
+
+
Page not found
+ +
`; +} + +window.addEventListener('hashchange', navigate); + +// --------------------------------------------------------------------------- +// Theme toggle +// --------------------------------------------------------------------------- + +const themeToggle = document.getElementById('theme-toggle'); +const savedTheme = localStorage.getItem('cm-theme') || 'dark'; +document.documentElement.setAttribute('data-theme', savedTheme); + +themeToggle.addEventListener('click', () => { + const current = document.documentElement.getAttribute('data-theme'); + const next = current === 'dark' ? 'light' : 'dark'; + document.documentElement.setAttribute('data-theme', next); + localStorage.setItem('cm-theme', next); +}); + +// --------------------------------------------------------------------------- +// Helpers (exported for views) +// --------------------------------------------------------------------------- + +export function escapeHtml(str) { + if (!str) return ''; + const div = document.createElement('div'); + div.textContent = str; + return div.innerHTML; +} + +export function formatDate(dateStr) { + if (!dateStr) return 'Unknown'; + try { + const d = new Date(dateStr); + return d.toLocaleDateString(undefined, { year: 'numeric', month: 'short', day: 'numeric' }); + } catch { + return dateStr.split('T')[0] || dateStr; + } +} + +export function projectName(path) { + if (!path) return 'Unknown'; + return path.replace(/\\/g, '/').split('/').filter(Boolean).pop() || path; +} + +export function outcomeBadge(outcome) { + const map = { + success: 'badge-success', + partial: 'badge-warning', + abandoned: 'badge-danger', + }; + const cls = map[outcome] || 'badge-muted'; + return `${escapeHtml(outcome || 'unknown')}`; +} + +// --------------------------------------------------------------------------- +// Boot +// --------------------------------------------------------------------------- + +navigate(); diff --git a/skills/context-memory/scripts/static/js/components/charts.js b/skills/context-memory/scripts/static/js/components/charts.js new file mode 100644 index 0000000..68250b7 --- /dev/null +++ b/skills/context-memory/scripts/static/js/components/charts.js @@ -0,0 +1,196 @@ +/** + * Chart.js wrapper functions for analytics. + */ + +const chartInstances = new Map(); + +function getCtx(canvasId) { + return document.getElementById(canvasId)?.getContext('2d'); +} + +function destroyChart(canvasId) { + if (chartInstances.has(canvasId)) { + chartInstances.get(canvasId).destroy(); + chartInstances.delete(canvasId); + } +} + +function getColors(count) { + const palette = [ + '#58a6ff', '#3fb950', '#d29922', '#f85149', '#bc8cff', + '#79c0ff', '#56d364', '#e3b341', '#ff7b72', '#d2a8ff', + '#a5d6ff', '#7ee787', '#f0c846', '#ffa198', '#e8d5ff', + ]; + const result = []; + for (let i = 0; i < count; i++) { + result.push(palette[i % palette.length]); + } + return result; +} + +function getChartDefaults() { + const style = getComputedStyle(document.documentElement); + return { + textColor: style.getPropertyValue('--text-secondary').trim() || '#8b949e', + gridColor: style.getPropertyValue('--border-light').trim() || '#21262d', + }; +} + +export function createTimelineChart(canvasId, data) { + destroyChart(canvasId); + const ctx = getCtx(canvasId); + if (!ctx) return; + + const defaults = getChartDefaults(); + const chart = new Chart(ctx, { + type: 'bar', + data: { + labels: data.map(d => d.period), + datasets: [ + { + label: 'Manual', + data: data.map(d => d.manual_count || 0), + backgroundColor: '#58a6ff', + }, + { + label: 'Auto-save', + data: data.map(d => d.auto_count || 0), + backgroundColor: '#30363d', + }, + ], + }, + options: { + responsive: true, + maintainAspectRatio: false, + plugins: { legend: { labels: { color: defaults.textColor } } }, + scales: { + x: { + stacked: true, + ticks: { color: defaults.textColor, maxTicksLimit: 12 }, + grid: { color: defaults.gridColor }, + }, + y: { + stacked: true, + beginAtZero: true, + ticks: { color: defaults.textColor, stepSize: 1 }, + grid: { color: defaults.gridColor }, + }, + }, + }, + }); + chartInstances.set(canvasId, chart); +} + +export function createTopicsChart(canvasId, data) { + destroyChart(canvasId); + const ctx = getCtx(canvasId); + if (!ctx) return; + + const defaults = getChartDefaults(); + const colors = getColors(data.length); + const chart = new Chart(ctx, { + type: 'bar', + data: { + labels: data.map(d => d.topic), + datasets: [{ + label: 'Sessions', + data: data.map(d => d.count), + backgroundColor: colors, + }], + }, + options: { + indexAxis: 'y', + responsive: true, + maintainAspectRatio: false, + plugins: { legend: { display: false } }, + scales: { + x: { + beginAtZero: true, + ticks: { color: defaults.textColor, stepSize: 1 }, + grid: { color: defaults.gridColor }, + }, + y: { + ticks: { color: defaults.textColor }, + grid: { display: false }, + }, + }, + }, + }); + chartInstances.set(canvasId, chart); +} + +export function createDoughnutChart(canvasId, labels, values) { + destroyChart(canvasId); + const ctx = getCtx(canvasId); + if (!ctx) return; + + const defaults = getChartDefaults(); + const colors = getColors(labels.length); + const chart = new Chart(ctx, { + type: 'doughnut', + data: { + labels: labels, + datasets: [{ + data: values, + backgroundColor: colors, + borderWidth: 0, + }], + }, + options: { + responsive: true, + maintainAspectRatio: false, + plugins: { + legend: { + position: 'bottom', + labels: { color: defaults.textColor, padding: 12 }, + }, + }, + }, + }); + chartInstances.set(canvasId, chart); +} + +export function createBarChart(canvasId, labels, values, label = 'Count') { + destroyChart(canvasId); + const ctx = getCtx(canvasId); + if (!ctx) return; + + const defaults = getChartDefaults(); + const colors = getColors(labels.length); + const chart = new Chart(ctx, { + type: 'bar', + data: { + labels: labels, + datasets: [{ + label: label, + data: values, + backgroundColor: colors, + }], + }, + options: { + indexAxis: 'y', + responsive: true, + maintainAspectRatio: false, + plugins: { legend: { display: false } }, + scales: { + x: { + beginAtZero: true, + ticks: { color: defaults.textColor, stepSize: 1 }, + grid: { color: defaults.gridColor }, + }, + y: { + ticks: { color: defaults.textColor }, + grid: { display: false }, + }, + }, + }, + }); + chartInstances.set(canvasId, chart); +} + +export function destroyAllCharts() { + for (const [id, chart] of chartInstances) { + chart.destroy(); + } + chartInstances.clear(); +} diff --git a/skills/context-memory/scripts/static/js/components/code-block.js b/skills/context-memory/scripts/static/js/components/code-block.js new file mode 100644 index 0000000..fbdc583 --- /dev/null +++ b/skills/context-memory/scripts/static/js/components/code-block.js @@ -0,0 +1,30 @@ +/** + * Code Block component — renders syntax-highlighted code snippets. + */ + +import { escapeHtml } from '../app.js'; + +export function renderCodeBlock(snippet) { + const lang = escapeHtml(snippet.language || ''); + const desc = escapeHtml(snippet.description || 'Code snippet'); + const filePath = escapeHtml(snippet.file_path || ''); + const code = escapeHtml(snippet.code || ''); + + return ` +
+
+ ${desc} + ${filePath ? filePath : lang} +
+
${code}
+
+ `; +} + +export function highlightAll(container) { + if (typeof hljs !== 'undefined') { + container.querySelectorAll('pre code').forEach(block => { + hljs.highlightElement(block); + }); + } +} diff --git a/skills/context-memory/scripts/static/js/components/modal.js b/skills/context-memory/scripts/static/js/components/modal.js new file mode 100644 index 0000000..1e33f9c --- /dev/null +++ b/skills/context-memory/scripts/static/js/components/modal.js @@ -0,0 +1,83 @@ +/** + * Modal component — confirmation dialogs and edit forms. + */ + +import { escapeHtml } from '../app.js'; + +const overlay = document.getElementById('modal-overlay'); + +export function showConfirm({ title, body, confirmText = 'Confirm', danger = false }) { + return new Promise(resolve => { + const btnClass = danger ? 'btn btn-danger' : 'btn btn-primary'; + overlay.innerHTML = ` + + `; + overlay.classList.remove('hidden'); + + const cancel = () => { overlay.classList.add('hidden'); resolve(false); }; + const confirm = () => { overlay.classList.add('hidden'); resolve(true); }; + + document.getElementById('modal-cancel').addEventListener('click', cancel); + document.getElementById('modal-confirm').addEventListener('click', confirm); + overlay.addEventListener('click', (e) => { + if (e.target === overlay) cancel(); + }, { once: true }); + }); +} + +export function showEditModal({ title, fields }) { + return new Promise(resolve => { + const fieldHtml = fields.map(f => { + const val = escapeHtml(f.value || ''); + if (f.type === 'textarea') { + return `
+ + +
`; + } + return `
+ + +
`; + }).join(''); + + overlay.innerHTML = ` + + `; + overlay.classList.remove('hidden'); + + const cancel = () => { overlay.classList.add('hidden'); resolve(null); }; + const save = () => { + const result = {}; + fields.forEach(f => { + result[f.key] = document.getElementById(`edit-${f.key}`).value; + }); + overlay.classList.add('hidden'); + resolve(result); + }; + + document.getElementById('modal-cancel').addEventListener('click', cancel); + document.getElementById('modal-confirm').addEventListener('click', save); + overlay.addEventListener('click', (e) => { + if (e.target === overlay) cancel(); + }, { once: true }); + }); +} + +export function hideModal() { + overlay.classList.add('hidden'); +} diff --git a/skills/context-memory/scripts/static/js/components/session-card.js b/skills/context-memory/scripts/static/js/components/session-card.js new file mode 100644 index 0000000..9813ea7 --- /dev/null +++ b/skills/context-memory/scripts/static/js/components/session-card.js @@ -0,0 +1,31 @@ +/** + * Session Card component — renders a clickable session summary card. + */ + +import { escapeHtml, formatDate, projectName, outcomeBadge } from '../app.js'; + +export function renderSessionCard(session) { + const date = formatDate(session.created_at); + const project = projectName(session.project_path); + const brief = escapeHtml(session.brief || 'No summary'); + const topics = (session.topics || []).map(t => `${escapeHtml(t)}`).join(''); + const outcome = outcomeBadge(session.outcome); + const msgCount = session.message_count || 0; + const isAuto = session.metadata && (session.metadata.auto_save === true || session.metadata === '{"auto_save": true}'); + + return ` +
+
+ ${date} + ${escapeHtml(project)} + ${outcome} + ${isAuto ? 'auto' : ''} +
+
${brief}
+ +
+ `; +} diff --git a/skills/context-memory/scripts/static/js/components/toast.js b/skills/context-memory/scripts/static/js/components/toast.js new file mode 100644 index 0000000..b866546 --- /dev/null +++ b/skills/context-memory/scripts/static/js/components/toast.js @@ -0,0 +1,18 @@ +/** + * Toast notification component. + */ + +const container = document.getElementById('toast-container'); + +export function showToast(message, type = 'info', duration = 3000) { + const el = document.createElement('div'); + el.className = `toast toast-${type}`; + el.textContent = message; + container.appendChild(el); + + setTimeout(() => { + el.style.opacity = '0'; + el.style.transition = 'opacity 200ms'; + setTimeout(() => el.remove(), 200); + }, duration); +} diff --git a/skills/context-memory/scripts/static/js/views/analytics.js b/skills/context-memory/scripts/static/js/views/analytics.js new file mode 100644 index 0000000..ef188b4 --- /dev/null +++ b/skills/context-memory/scripts/static/js/views/analytics.js @@ -0,0 +1,151 @@ +/** + * Analytics view — charts and summary statistics. + */ + +import { getStats, getTimeline, getTopTopics, getProjects, getOutcomes, getTechnologies } from '../api.js'; +import { createTimelineChart, createTopicsChart, createDoughnutChart, createBarChart, destroyAllCharts } from '../components/charts.js'; +import { escapeHtml, projectName } from '../app.js'; + +export async function renderAnalyticsView(container) { + container.innerHTML = ` +

Analytics

+ +
+ +
+
+
+
Session Timeline
+ +
+
+
+ +
+
Top Topics
+
+
+ +
+
Projects
+
+
+ +
+
Outcomes
+
+
+ +
+
Technologies
+
+
+
+ `; + + const statsGrid = document.getElementById('stats-grid'); + + // Load stats + try { + const stats = await getStats(); + if (stats.error) { + statsGrid.innerHTML = `
--
No database
`; + } else { + const dbSizeMB = stats.db_size_bytes ? (stats.db_size_bytes / 1024 / 1024).toFixed(1) : '0'; + statsGrid.innerHTML = ` +
+
${stats.sessions || 0}
+
Sessions
+
+
+
${stats.messages || 0}
+
Messages
+
+
+
${stats.code_snippets || 0}
+
Code Snippets
+
+
+
${stats.topics || 0}
+
Topics
+
+
+
${dbSizeMB} MB
+
DB Size
+
+ `; + } + } catch { + statsGrid.innerHTML = '

Could not load stats

'; + } + + // Load all charts concurrently + async function loadTimeline(granularity = 'week') { + try { + const data = await getTimeline(granularity); + createTimelineChart('chart-timeline', data.data || []); + } catch { + // Silently fail for individual charts + } + } + + const chartLoads = [ + loadTimeline('week'), + (async () => { + try { + const data = await getTopTopics(); + createTopicsChart('chart-topics', data.data || []); + } catch { /* skip */ } + })(), + (async () => { + try { + const data = await getProjects(); + const items = data.data || []; + createDoughnutChart( + 'chart-projects', + items.map(d => projectName(d.project_path)), + items.map(d => d.count), + ); + } catch { /* skip */ } + })(), + (async () => { + try { + const data = await getOutcomes(); + const items = data.data || []; + createDoughnutChart( + 'chart-outcomes', + items.map(d => d.outcome), + items.map(d => d.count), + ); + } catch { /* skip */ } + })(), + (async () => { + try { + const data = await getTechnologies(); + const items = data.data || []; + createBarChart( + 'chart-technologies', + items.map(d => d.technology), + items.map(d => d.count), + 'Sessions', + ); + } catch { /* skip */ } + })(), + ]; + + await Promise.allSettled(chartLoads); + + // Timeline granularity change + document.getElementById('timeline-granularity')?.addEventListener('change', (e) => { + loadTimeline(e.target.value); + }); + + // Cleanup charts on view exit + return () => { + destroyAllCharts(); + }; +} diff --git a/skills/context-memory/scripts/static/js/views/detail.js b/skills/context-memory/scripts/static/js/views/detail.js new file mode 100644 index 0000000..44cc980 --- /dev/null +++ b/skills/context-memory/scripts/static/js/views/detail.js @@ -0,0 +1,236 @@ +/** + * Session Detail view — full session with messages, snippets, edit, delete. + */ + +import { getSession, updateSession, deleteSession } from '../api.js'; +import { renderCodeBlock, highlightAll } from '../components/code-block.js'; +import { showConfirm, showEditModal } from '../components/modal.js'; +import { showToast } from '../components/toast.js'; +import { escapeHtml, formatDate, projectName, outcomeBadge } from '../app.js'; + +export async function renderDetailView(container, idStr) { + const sessionId = parseInt(idStr); + let session; + + try { + session = await getSession(sessionId); + } catch (err) { + container.innerHTML = `
+
+
Session not found
+
${escapeHtml(err.message)}
+
`; + return; + } + + function render() { + const date = formatDate(session.created_at); + const project = projectName(session.project_path); + const outcome = outcomeBadge(session.outcome); + const topics = (session.topics || []).map(t => + `${escapeHtml(t)} ×` + ).join(''); + + const decisions = session.key_decisions || []; + const problems = session.problems_solved || []; + const messages = session.messages || []; + const snippets = session.code_snippets || []; + const techs = session.technologies || []; + + const techsArr = typeof techs === 'string' ? (() => { try { return JSON.parse(techs); } catch { return [techs]; } })() : techs; + const decisionsArr = typeof decisions === 'string' ? (() => { try { return JSON.parse(decisions); } catch { return [decisions]; } })() : decisions; + const problemsArr = typeof problems === 'string' ? (() => { try { return JSON.parse(problems); } catch { return [problems]; } })() : problems; + + container.innerHTML = ` +
+ ← Back +
+ +
+
${escapeHtml(session.brief || 'Untitled session')}
+
+ ${date} + ${escapeHtml(project)} + ${outcome} + ${session.message_count || 0} messages + ${escapeHtml(session.session_id || '')} +
+
+ +
+ + + +
+ + ${session.user_note ? ` +
+
User Note
+

${escapeHtml(session.user_note)}

+
+ ` : ''} + + ${session.detailed ? ` +
+
Detailed Summary
+

${escapeHtml(session.detailed)}

+
+ ` : ''} + +
+
Topics
+
+ ${topics} + +
+
+ + ${techsArr.length ? ` +
+
Technologies
+
+ ${techsArr.map(t => `${escapeHtml(t)}`).join('')} +
+
+ ` : ''} + + ${decisionsArr.length ? ` +
+
Key Decisions
+
    + ${decisionsArr.map(d => `
  • ${escapeHtml(d)}
  • `).join('')} +
+
+ ` : ''} + + ${problemsArr.length ? ` +
+
Problems Solved
+
    + ${problemsArr.map(p => `
  • ${escapeHtml(p)}
  • `).join('')} +
+
+ ` : ''} + + ${messages.length ? ` +
+
Messages (${messages.length})
+ ${messages.map(m => ` +
+
${escapeHtml(m.role || 'user')}
+
${escapeHtml(m.content || '')}
+
+ `).join('')} +
+ ` : ''} + + ${snippets.length ? ` +
+
Code Snippets (${snippets.length})
+ ${snippets.map(s => renderCodeBlock(s)).join('')} +
+ ` : ''} + `; + + highlightAll(container); + wireEvents(); + } + + function wireEvents() { + // Delete + document.getElementById('delete-btn')?.addEventListener('click', async () => { + const confirmed = await showConfirm({ + title: 'Delete session', + body: '

This will permanently delete this session and all its data.

', + confirmText: 'Delete', + danger: true, + }); + if (!confirmed) return; + try { + await deleteSession(sessionId); + showToast('Session deleted', 'success'); + window.location.hash = '#/sessions'; + } catch (err) { + showToast(`Delete failed: ${err.message}`, 'error'); + } + }); + + // Edit summary + document.getElementById('edit-summary-btn')?.addEventListener('click', async () => { + const result = await showEditModal({ + title: 'Edit Summary', + fields: [ + { key: 'brief', label: 'Brief', value: session.brief || '' }, + { key: 'detailed', label: 'Detailed', type: 'textarea', value: session.detailed || '' }, + { key: 'user_note', label: 'User Note', value: session.user_note || '' }, + ], + }); + if (!result) return; + try { + await updateSession(sessionId, result); + Object.assign(session, result); + render(); + showToast('Summary updated', 'success'); + } catch (err) { + showToast(`Update failed: ${err.message}`, 'error'); + } + }); + + // Edit note shortcut + document.getElementById('edit-note-btn')?.addEventListener('click', async () => { + const result = await showEditModal({ + title: 'Edit Note', + fields: [ + { key: 'user_note', label: 'User Note', type: 'textarea', value: session.user_note || '' }, + ], + }); + if (!result) return; + try { + await updateSession(sessionId, result); + session.user_note = result.user_note; + render(); + showToast('Note updated', 'success'); + } catch (err) { + showToast(`Update failed: ${err.message}`, 'error'); + } + }); + + // Remove topic + container.querySelectorAll('.tag-remove').forEach(btn => { + btn.addEventListener('click', async (e) => { + e.stopPropagation(); + const topic = btn.dataset.topic; + const newTopics = (session.topics || []).filter(t => t !== topic); + try { + await updateSession(sessionId, { topics: newTopics }); + session.topics = newTopics; + render(); + showToast(`Removed topic: ${topic}`, 'info'); + } catch (err) { + showToast(`Failed: ${err.message}`, 'error'); + } + }); + }); + + // Add topic + document.getElementById('add-topic-btn')?.addEventListener('click', async () => { + const result = await showEditModal({ + title: 'Add Topic', + fields: [{ key: 'topic', label: 'Topic name' }], + }); + if (!result || !result.topic.trim()) return; + const newTopic = result.topic.trim().toLowerCase(); + const newTopics = [...(session.topics || []), newTopic]; + try { + await updateSession(sessionId, { topics: newTopics }); + session.topics = newTopics; + render(); + showToast(`Added topic: ${newTopic}`, 'success'); + } catch (err) { + showToast(`Failed: ${err.message}`, 'error'); + } + }); + } + + render(); +} diff --git a/skills/context-memory/scripts/static/js/views/search.js b/skills/context-memory/scripts/static/js/views/search.js new file mode 100644 index 0000000..9f7eacc --- /dev/null +++ b/skills/context-memory/scripts/static/js/views/search.js @@ -0,0 +1,205 @@ +/** + * Search view — sidebar project list + search with hint chips. + */ + +import { search, listProjects, getHints } from '../api.js'; +import { renderSessionCard } from '../components/session-card.js'; +import { escapeHtml, projectName } from '../app.js'; + +export async function renderSearchView(container) { + let debounceTimer = null; + let abortController = null; + let selectedProject = ''; + let hints = { topics: [], technologies: [] }; + + // Load projects for sidebar + let projects = []; + try { + const resp = await listProjects(); + projects = resp.projects || []; + } catch { + // Non-critical + } + + // Build sidebar project list + function renderProjectList() { + const items = projects.map(p => { + const name = projectName(p.project_path); + const count = p.session_count || 0; + const active = selectedProject === p.project_path ? 'active' : ''; + return ``; + }).join(''); + + const allActive = selectedProject === '' ? 'active' : ''; + return `${items}`; + } + + // Build hint chips + function renderHints() { + if (!hints.topics.length && !hints.technologies.length) { + return 'No hints available'; + } + const topicChips = hints.topics.map(t => + `${escapeHtml(t.topic)}` + ).join(''); + const techChips = hints.technologies.map(t => + `${escapeHtml(t.technology)}` + ).join(''); + return topicChips + techChips; + } + + container.innerHTML = ` +
+ + +
+ + +
+ ${renderHints()} +
+ +
+ +
+ +
+
+
+ `; + + const input = document.getElementById('search-input'); + const detailedCheck = document.getElementById('search-detailed'); + const resultsEl = document.getElementById('search-results'); + const hintsEl = document.getElementById('search-hints'); + const projectListEl = document.getElementById('project-list'); + + // Load hints for current project + async function loadHints() { + try { + hints = await getHints(selectedProject || undefined); + } catch { + hints = { topics: [], technologies: [] }; + } + hintsEl.innerHTML = renderHints(); + wireHintClicks(); + } + + // Wire hint chip clicks — fill search input with the hint text + function wireHintClicks() { + hintsEl.querySelectorAll('.hint-chip').forEach(chip => { + chip.addEventListener('click', () => { + input.value = chip.dataset.hint; + input.focus(); + doSearch(); + }); + }); + } + + async function doSearch() { + const query = input.value.trim(); + if (!query) { + resultsEl.innerHTML = `
+
🔎
+
Start typing to search
+
Or click a hint above to explore a topic
+
`; + return; + } + + if (abortController) abortController.abort(); + abortController = new AbortController(); + + resultsEl.innerHTML = '
Searching...
'; + + try { + const results = await search(query, { + project: selectedProject || undefined, + detailed: detailedCheck.checked, + }); + + if (!results.sessions || results.sessions.length === 0) { + resultsEl.innerHTML = `
+
📄
+
No results
+
Try broader terms${selectedProject ? ' or select "All projects"' : ''}
+
`; + return; + } + + resultsEl.innerHTML = ` +

${results.result_count} session(s) found

+
+ ${results.sessions.map(s => renderSessionCard(s)).join('')} +
+ `; + + resultsEl.querySelectorAll('.session-card').forEach(card => { + card.addEventListener('click', () => { + window.location.hash = `#/session/${card.dataset.id}`; + }); + }); + } catch (err) { + if (err.name === 'AbortError') return; + resultsEl.innerHTML = `
+
+
Search error
+
${escapeHtml(err.message)}
+
`; + } + } + + function debouncedSearch() { + clearTimeout(debounceTimer); + debounceTimer = setTimeout(doSearch, 300); + } + + // Wire sidebar project clicks + function wireProjectClicks() { + projectListEl.querySelectorAll('.sidebar-item').forEach(item => { + item.addEventListener('click', () => { + selectedProject = item.dataset.project; + projectListEl.innerHTML = renderProjectList(); + wireProjectClicks(); + loadHints(); + if (input.value.trim()) doSearch(); + }); + }); + } + + input.addEventListener('input', debouncedSearch); + detailedCheck.addEventListener('change', () => { if (input.value.trim()) doSearch(); }); + + wireProjectClicks(); + wireHintClicks(); + + // Load initial hints + await loadHints(); + + // Show initial empty state + doSearch(); + + return () => { + clearTimeout(debounceTimer); + if (abortController) abortController.abort(); + }; +} diff --git a/skills/context-memory/scripts/static/js/views/sessions.js b/skills/context-memory/scripts/static/js/views/sessions.js new file mode 100644 index 0000000..b239b6b --- /dev/null +++ b/skills/context-memory/scripts/static/js/views/sessions.js @@ -0,0 +1,173 @@ +/** + * Sessions browser view — paginated list with filters and bulk actions. + */ + +import { listSessions, listProjects, deleteSession } from '../api.js'; +import { renderSessionCard } from '../components/session-card.js'; +import { showConfirm } from '../components/modal.js'; +import { showToast } from '../components/toast.js'; +import { escapeHtml } from '../app.js'; + +export async function renderSessionsView(container) { + let page = 1; + const perPage = 20; + let currentProject = ''; + let currentSort = 'created_at'; + let currentOrder = 'desc'; + let selectedIds = new Set(); + + // Load projects for filter + let projects = []; + try { + const resp = await listProjects(); + projects = resp.projects || []; + } catch { + // Non-critical + } + + const projectOptions = projects.map(p => { + const path = p.project_path || p; + const name = path.replace(/\\/g, '/').split('/').filter(Boolean).pop(); + return ``; + }).join(''); + + container.innerHTML = ` +
+

Sessions

+
+ +
+
+ +
+ + + +
+ +
+ + `; + + const listEl = document.getElementById('sessions-list'); + const paginationEl = document.getElementById('sessions-pagination'); + const projectSelect = document.getElementById('sessions-project'); + const sortSelect = document.getElementById('sessions-sort'); + const orderSelect = document.getElementById('sessions-order'); + const bulkDeleteBtn = document.getElementById('bulk-delete'); + + async function loadSessions() { + listEl.innerHTML = '
Loading...
'; + + try { + const data = await listSessions({ + page, perPage, project: currentProject || undefined, + sort: currentSort, order: currentOrder, + }); + + if (!data.sessions || data.sessions.length === 0) { + listEl.innerHTML = `
+
📄
+
No sessions
+
Save sessions with /remember or auto-save
+
`; + paginationEl.innerHTML = ''; + return; + } + + listEl.innerHTML = `
+ ${data.sessions.map(s => ` +
+ + ${renderSessionCard(s)} +
+ `).join('')} +
`; + + // Pagination + const totalPages = Math.ceil(data.total / perPage); + if (totalPages > 1) { + let pagHtml = ''; + if (page > 1) pagHtml += ``; + pagHtml += `Page ${page} of ${totalPages} (${data.total} sessions)`; + if (page < totalPages) pagHtml += ``; + paginationEl.innerHTML = pagHtml; + } else { + paginationEl.innerHTML = `${data.total} session(s)`; + } + + // Wire pagination + paginationEl.querySelectorAll('[data-page]').forEach(btn => { + btn.addEventListener('click', () => { + page = parseInt(btn.dataset.page); + loadSessions(); + }); + }); + + // Wire card clicks + listEl.querySelectorAll('.session-card').forEach(card => { + card.addEventListener('click', () => { + window.location.hash = `#/session/${card.dataset.id}`; + }); + }); + + // Wire checkboxes + listEl.querySelectorAll('.session-checkbox').forEach(cb => { + cb.addEventListener('click', (e) => e.stopPropagation()); + cb.addEventListener('change', () => { + const id = parseInt(cb.dataset.id); + if (cb.checked) selectedIds.add(id); + else selectedIds.delete(id); + bulkDeleteBtn.classList.toggle('hidden', selectedIds.size === 0); + bulkDeleteBtn.textContent = `Delete selected (${selectedIds.size})`; + }); + }); + } catch (err) { + listEl.innerHTML = `
+
+
Error loading sessions
+
${escapeHtml(err.message)}
+
`; + } + } + + projectSelect.addEventListener('change', () => { currentProject = projectSelect.value; page = 1; loadSessions(); }); + sortSelect.addEventListener('change', () => { currentSort = sortSelect.value; page = 1; loadSessions(); }); + orderSelect.addEventListener('change', () => { currentOrder = orderSelect.value; page = 1; loadSessions(); }); + + bulkDeleteBtn.addEventListener('click', async () => { + const confirmed = await showConfirm({ + title: 'Delete sessions', + body: `

Delete ${selectedIds.size} selected session(s)? This cannot be undone.

`, + confirmText: 'Delete', + danger: true, + }); + if (!confirmed) return; + + let deleted = 0; + for (const id of selectedIds) { + try { + await deleteSession(id); + deleted++; + } catch (err) { + showToast(`Failed to delete session ${id}: ${err.message}`, 'error'); + } + } + showToast(`Deleted ${deleted} session(s)`, 'success'); + selectedIds.clear(); + bulkDeleteBtn.classList.add('hidden'); + loadSessions(); + }); + + await loadSessions(); +} diff --git a/skills/context-memory/scripts/static/js/views/settings.js b/skills/context-memory/scripts/static/js/views/settings.js new file mode 100644 index 0000000..8cd3ca6 --- /dev/null +++ b/skills/context-memory/scripts/static/js/views/settings.js @@ -0,0 +1,209 @@ +/** + * Settings view — DB management, pruning, export, init. + */ + +import { getStats, pruneSessions, initDatabase, exportAll } from '../api.js'; +import { showConfirm } from '../components/modal.js'; +import { showToast } from '../components/toast.js'; +import { escapeHtml } from '../app.js'; + +export async function renderSettingsView(container) { + let stats = null; + try { + stats = await getStats(); + } catch { + // Will show empty state + } + + const dbExists = stats && !stats.error; + const dbSizeMB = dbExists && stats.db_size_bytes ? (stats.db_size_bytes / 1024 / 1024).toFixed(2) : '0'; + + container.innerHTML = ` +

Settings

+ + +
+
Database
+ ${dbExists ? ` +
+
+
${stats.sessions || 0}
+
Sessions
+
+
+
${stats.messages || 0}
+
Messages
+
+
+
${stats.summaries || 0}
+
Summaries
+
+
+
${stats.topics || 0}
+
Topics
+
+
+
${stats.code_snippets || 0}
+
Snippets
+
+
+
${dbSizeMB} MB
+
File Size
+
+
+

~/.claude/context-memory/context.db

+ ` : ` +

Database does not exist yet.

+ `} +
+ + +
+
Prune Sessions
+

Remove old or excess sessions to manage database size.

+ +
+ + +
+
+ + +
+
+ + +
+
+
+ + +
+
Export
+

Download all sessions as JSON.

+ +
+ + +
+
Initialize Database
+

Create or reinitialize the database. Force-init will drop and recreate all tables.

+
+ + +
+
+ `; + + const pruneAge = document.getElementById('prune-age'); + const pruneCount = document.getElementById('prune-count'); + const pruneResult = document.getElementById('prune-result'); + + // Prune preview + document.getElementById('prune-preview-btn')?.addEventListener('click', async () => { + const maxAge = pruneAge.value ? parseInt(pruneAge.value) : null; + const maxSessions = pruneCount.value ? parseInt(pruneCount.value) : null; + if (!maxAge && !maxSessions) { + showToast('Set max age or max sessions', 'error'); + return; + } + try { + const result = await pruneSessions({ maxAgeDays: maxAge, maxSessions, dryRun: true }); + if (result.pruned === 0) { + pruneResult.innerHTML = '

No sessions would be pruned.

'; + } else { + const sessions = result.sessions || []; + pruneResult.innerHTML = ` +

${result.pruned} session(s) would be removed:

+
    + ${sessions.slice(0, 10).map(s => `
  • ${escapeHtml(s.session_id)} — ${escapeHtml(s.created_at)}
  • `).join('')} + ${sessions.length > 10 ? `
  • ...and ${sessions.length - 10} more
  • ` : ''} +
+ `; + } + } catch (err) { + showToast(`Preview failed: ${err.message}`, 'error'); + } + }); + + // Prune execute + document.getElementById('prune-btn')?.addEventListener('click', async () => { + const maxAge = pruneAge.value ? parseInt(pruneAge.value) : null; + const maxSessions = pruneCount.value ? parseInt(pruneCount.value) : null; + if (!maxAge && !maxSessions) { + showToast('Set max age or max sessions', 'error'); + return; + } + const confirmed = await showConfirm({ + title: 'Prune sessions', + body: '

This will permanently delete matching sessions. Run Preview first to see what will be removed.

', + confirmText: 'Prune', + danger: true, + }); + if (!confirmed) return; + try { + const result = await pruneSessions({ maxAgeDays: maxAge, maxSessions, dryRun: false }); + showToast(`Pruned ${result.pruned} session(s)`, 'success'); + pruneResult.innerHTML = `

Pruned ${result.pruned} session(s).

`; + } catch (err) { + showToast(`Prune failed: ${err.message}`, 'error'); + } + }); + + // Export + document.getElementById('export-btn')?.addEventListener('click', async () => { + try { + showToast('Exporting...', 'info'); + const data = await exportAll(); + const blob = new Blob([JSON.stringify(data, null, 2)], { type: 'application/json' }); + const url = URL.createObjectURL(blob); + const a = document.createElement('a'); + a.href = url; + a.download = `context-memory-export-${new Date().toISOString().slice(0, 10)}.json`; + a.click(); + URL.revokeObjectURL(url); + showToast(`Exported ${data.count || 0} sessions`, 'success'); + } catch (err) { + showToast(`Export failed: ${err.message}`, 'error'); + } + }); + + // Init + document.getElementById('init-btn')?.addEventListener('click', async () => { + try { + const result = await initDatabase(false); + showToast(result.message, result.created ? 'success' : 'info'); + } catch (err) { + showToast(`Init failed: ${err.message}`, 'error'); + } + }); + + // Force init + document.getElementById('force-init-btn')?.addEventListener('click', async () => { + const confirmed = await showConfirm({ + title: 'Force reinitialize', + body: '

This will delete ALL data and recreate the database from scratch. This cannot be undone.

', + confirmText: 'Reinitialize', + danger: true, + }); + if (!confirmed) return; + + // Double confirm + const really = await showConfirm({ + title: 'Are you sure?', + body: '

All sessions, messages, summaries, and code snippets will be permanently deleted.

', + confirmText: 'Yes, delete everything', + danger: true, + }); + if (!really) return; + + try { + const result = await initDatabase(true); + showToast(result.message, 'success'); + // Reload stats + renderSettingsView(container); + } catch (err) { + showToast(`Init failed: ${err.message}`, 'error'); + } + }); +} diff --git a/tests/test_auto_save.py b/tests/test_auto_save.py index 14b4721..83259e3 100644 --- a/tests/test_auto_save.py +++ b/tests/test_auto_save.py @@ -12,7 +12,7 @@ ) AUTO_SAVE_SCRIPT = os.path.join(SCRIPTS_DIR, "auto_save.py") -from auto_save import build_brief, extract_text_content, parse_transcript # noqa: E402 +from auto_save import build_brief, extract_text_content, parse_transcript, read_hook_input # noqa: E402 # --------------------------------------------------------------------------- @@ -45,6 +45,56 @@ def test_empty_input(self): assert extract_text_content(None) == "" assert extract_text_content([]) == "" + def test_non_string_non_list_returns_empty(self): + """Non-string, non-list types (int, dict) should return empty string.""" + assert extract_text_content(42) == "" + assert extract_text_content({"key": "value"}) == "" + assert extract_text_content(True) == "" + + +# --------------------------------------------------------------------------- +# Unit tests — read_hook_input +# --------------------------------------------------------------------------- +class TestReadHookInput: + def test_valid_json(self, monkeypatch): + """Valid JSON from stdin should be parsed.""" + import io + payload = json.dumps({"session_id": "test-123", "cwd": "/tmp"}) + monkeypatch.setattr("sys.stdin", io.StringIO(payload)) + result = read_hook_input() + assert result == {"session_id": "test-123", "cwd": "/tmp"} + + def test_empty_stdin(self, monkeypatch): + """Empty stdin should return None.""" + import io + monkeypatch.setattr("sys.stdin", io.StringIO("")) + assert read_hook_input() is None + + def test_whitespace_only_stdin(self, monkeypatch): + """Whitespace-only stdin should return None.""" + import io + monkeypatch.setattr("sys.stdin", io.StringIO(" \n ")) + assert read_hook_input() is None + + def test_invalid_json(self, monkeypatch): + """Invalid JSON should return None (not raise).""" + import io + monkeypatch.setattr("sys.stdin", io.StringIO("{bad json")) + assert read_hook_input() is None + + def test_closed_stdin(self, monkeypatch): + """Closed stdin should return None.""" + import io + closed_stream = io.StringIO("") + closed_stream.close() + monkeypatch.setattr("sys.stdin", closed_stream) + assert read_hook_input() is None + + def test_none_stdin(self, monkeypatch): + """None stdin should return None.""" + monkeypatch.setattr("sys.stdin", None) + assert read_hook_input() is None + # --------------------------------------------------------------------------- # Unit tests — parse_transcript @@ -99,6 +149,36 @@ def test_non_message_types_skipped(self, tmp_path): assert msgs[0]["role"] == "user" assert msgs[1]["role"] == "assistant" + def test_malformed_json_lines_skipped(self, tmp_path): + """Lines with invalid JSON should be silently skipped.""" + transcript = tmp_path / "transcript.jsonl" + content = ( + json.dumps({"type": "user", "message": {"content": "first"}}) + "\n" + + "this is not valid json\n" + + "{also bad\n" + + json.dumps({"type": "assistant", "message": {"content": "second"}}) + "\n" + ) + transcript.write_text(content, encoding="utf-8") + msgs = parse_transcript(str(transcript)) + assert len(msgs) == 2 + assert msgs[0]["content"] == "first" + assert msgs[1]["content"] == "second" + + def test_empty_lines_skipped(self, tmp_path): + """Blank lines interspersed in transcript should be skipped.""" + transcript = tmp_path / "transcript.jsonl" + content = ( + "\n" + + json.dumps({"type": "user", "message": {"content": "hello"}}) + "\n" + + "\n" + + " \n" + + json.dumps({"type": "assistant", "message": {"content": "world"}}) + "\n" + + "\n" + ) + transcript.write_text(content, encoding="utf-8") + msgs = parse_transcript(str(transcript)) + assert len(msgs) == 2 + def test_list_content_blocks(self, tmp_path): transcript = tmp_path / "transcript.jsonl" lines = [ diff --git a/tests/test_db_init.py b/tests/test_db_init.py index 8f1af01..cd7ce73 100644 --- a/tests/test_db_init.py +++ b/tests/test_db_init.py @@ -118,3 +118,19 @@ def test_apply_migrations_returns_final_version(self, isolated_db): conn.commit() final = db_init.apply_migrations(conn) assert final == db_init.CURRENT_SCHEMA_VERSION + + def test_apply_migrations_missing_migration_raises(self, isolated_db, monkeypatch): + """apply_migrations() should raise RuntimeError if a migration function is missing.""" + # Create a legacy DB at version 1 + with db_utils.get_connection() as conn: + legacy_sql = db_init.SCHEMA_SQL.split("-- Schema versioning")[0] + conn.executescript(legacy_sql) + conn.commit() + + # Bump CURRENT_SCHEMA_VERSION beyond what MIGRATIONS covers + monkeypatch.setattr(db_init, "CURRENT_SCHEMA_VERSION", 99) + + with db_utils.get_connection() as conn: + import pytest + with pytest.raises(RuntimeError, match="No migration found for version"): + db_init.apply_migrations(conn) diff --git a/tests/test_db_save.py b/tests/test_db_save.py index 6235aa4..899b8a2 100644 --- a/tests/test_db_save.py +++ b/tests/test_db_save.py @@ -54,6 +54,32 @@ def test_replace_messages(self, isolated_db): cursor = conn.execute("SELECT COUNT(*) FROM messages WHERE session_id = ?", (sid,)) assert cursor.fetchone()[0] == 1 + def test_append_messages(self, isolated_db): + """Appending messages (replace=False) should add to existing messages and update count.""" + db_init.init_database() + sid = db_save.save_session("test-session-1") + db_save.save_messages(sid, [{"role": "user", "content": "First"}]) + db_save.save_messages(sid, [{"role": "assistant", "content": "Second"}], replace=False) + with db_utils.get_connection(readonly=True) as conn: + msg_count = conn.execute("SELECT COUNT(*) FROM messages WHERE session_id = ?", (sid,)).fetchone()[0] + session_count = conn.execute("SELECT message_count FROM sessions WHERE id = ?", (sid,)).fetchone()[0] + assert msg_count == 2 + assert session_count == 2 + + def test_messages_missing_role_and_content(self, isolated_db): + """Messages with missing role/content should use defaults.""" + db_init.init_database() + sid = db_save.save_session("test-session-1") + db_save.save_messages(sid, [{"other_key": "value"}, {}]) + with db_utils.get_connection(readonly=True) as conn: + rows = conn.execute( + "SELECT role, content FROM messages WHERE session_id = ? ORDER BY sequence", (sid,) + ).fetchall() + assert len(rows) == 2 + assert rows[0]["role"] == "user" + assert rows[0]["content"] == "" + assert rows[1]["role"] == "user" + class TestSaveSummary: def test_save_summary(self, isolated_db): @@ -82,6 +108,27 @@ def test_update_summary(self, isolated_db): id2 = db_save.save_summary(sid, brief="Updated") assert id1 == id2 + def test_save_summary_with_problems_and_user_note(self, isolated_db): + """problems_solved and user_note should be stored correctly.""" + db_init.init_database() + sid = db_save.save_session("test-session-1") + summary_id = db_save.save_summary( + sid, + brief="Fixed auth issues", + problems_solved=["Token expiration", "CORS errors"], + user_note="Important fix for production", + ) + assert summary_id >= 1 + with db_utils.get_connection(readonly=True) as conn: + row = conn.execute( + "SELECT problems_solved, user_note FROM summaries WHERE id = ?", (summary_id,) + ).fetchone() + assert row["user_note"] == "Important fix for production" + import json + problems = json.loads(row["problems_solved"]) + assert "Token expiration" in problems + assert "CORS errors" in problems + class TestSaveTopics: def test_save_topics(self, isolated_db): @@ -99,6 +146,43 @@ def test_replace_topics(self, isolated_db): cursor = conn.execute("SELECT COUNT(*) FROM topics WHERE session_id = ?", (sid,)) assert cursor.fetchone()[0] == 1 + def test_empty_and_whitespace_topics_filtered(self, isolated_db): + """Empty strings and whitespace-only topics should be skipped.""" + db_init.init_database() + sid = db_save.save_session("test-session-1") + count = db_save.save_topics(sid, ["", " ", " ", "valid", " also-valid "]) + assert count == 2 + with db_utils.get_connection(readonly=True) as conn: + rows = conn.execute( + "SELECT topic FROM topics WHERE session_id = ? ORDER BY topic", (sid,) + ).fetchall() + topics = [r["topic"] for r in rows] + assert "valid" in topics + assert "also-valid" in topics + + def test_append_topics(self, isolated_db): + """Appending topics (replace=False) should add to existing topics.""" + db_init.init_database() + sid = db_save.save_session("test-session-1") + db_save.save_topics(sid, ["first"]) + db_save.save_topics(sid, ["second"], replace=False) + with db_utils.get_connection(readonly=True) as conn: + count = conn.execute("SELECT COUNT(*) FROM topics WHERE session_id = ?", (sid,)).fetchone()[0] + assert count == 2 + + +class TestSaveSessionMetadata: + def test_metadata_stored_as_json(self, isolated_db): + """Metadata dict should be serialized as JSON in the database.""" + db_init.init_database() + sid = db_save.save_session("meta-1", metadata={"auto_save": True, "source": "hook"}) + with db_utils.get_connection(readonly=True) as conn: + row = conn.execute("SELECT metadata FROM sessions WHERE id = ?", (sid,)).fetchone() + import json + meta = json.loads(row["metadata"]) + assert meta["auto_save"] is True + assert meta["source"] == "hook" + class TestSaveCodeSnippet: def test_save_snippet(self, isolated_db): @@ -109,6 +193,20 @@ def test_save_snippet(self, isolated_db): ) assert snippet_id >= 1 + def test_save_snippet_minimal(self, isolated_db): + """Code snippet with only required code field should save successfully.""" + db_init.init_database() + sid = db_save.save_session("test-session-1") + snippet_id = db_save.save_code_snippet(sid, code="x = 1") + assert snippet_id >= 1 + with db_utils.get_connection(readonly=True) as conn: + row = conn.execute( + "SELECT language, description, file_path FROM code_snippets WHERE id = ?", (snippet_id,) + ).fetchone() + assert row["language"] is None + assert row["description"] is None + assert row["file_path"] is None + class TestSaveFullSession: def test_save_full_session(self, isolated_db): diff --git a/tests/test_db_search.py b/tests/test_db_search.py index a8fbfd7..223d31d 100644 --- a/tests/test_db_search.py +++ b/tests/test_db_search.py @@ -3,6 +3,7 @@ import db_init import db_save import db_search +import db_utils def _seed_data(isolated_db): @@ -39,6 +40,36 @@ def _seed_data(isolated_db): ) +def _seed_data_with_snippets(isolated_db): + """Create test data including code snippets for search tests.""" + _seed_data(isolated_db) + db_save.save_full_session( + session_id="search-session-3", + project_path="/tmp/webapp", + messages=[ + {"role": "user", "content": "Write a React component for login"}, + {"role": "assistant", "content": "Here is the LoginForm component..."}, + ], + summary={ + "brief": "Created React login form with validation", + "detailed": "Built a LoginForm component with email/password fields and client-side validation", + "key_decisions": ["Use controlled components", "Validate on blur"], + "problems_solved": ["Form validation UX"], + "technologies": ["react", "typescript"], + "outcome": "success", + }, + topics=["react", "frontend", "forms"], + code_snippets=[ + { + "code": "function LoginForm() {\n return
...
;\n}", + "language": "tsx", + "description": "LoginForm React component", + "file_path": "src/components/LoginForm.tsx", + } + ], + ) + + class TestSearchTier1: def test_search_returns_results(self, isolated_db): _seed_data(isolated_db) @@ -125,3 +156,194 @@ def test_search_messages_no_results(self, isolated_db): _seed_data(isolated_db) results = db_search.search_messages("xyznonexistent") assert len(results) == 0 + + +class TestSearchTier1CodeSnippets: + def test_finds_session_via_code_snippet(self, isolated_db): + """Tier 1 should find sessions matching code snippet content.""" + _seed_data_with_snippets(isolated_db) + results = db_search.search_tier1("LoginForm") + assert len(results) >= 1 + # Should find the session with the React component + session_ids = [r["session_id"] for r in results] + assert "search-session-3" in session_ids + + def test_finds_session_via_snippet_description(self, isolated_db): + """Tier 1 should find sessions matching code snippet description.""" + _seed_data_with_snippets(isolated_db) + results = db_search.search_tier1("React component") + assert len(results) >= 1 + + def test_finds_session_via_snippet_file_path(self, isolated_db): + """Tier 1 should find sessions matching code snippet file path terms.""" + _seed_data_with_snippets(isolated_db) + # FTS5 tokenizes on punctuation, so search for a path component + results = db_search.search_tier1("components") + assert len(results) >= 1 + + +class TestSearchTier1ProjectFilter: + def test_filters_by_project_path(self, isolated_db): + """Tier 1 should filter results to the specified project.""" + db_init.init_database() + db_save.save_full_session( + session_id="proj-a-1", + project_path="/tmp/project-a", + summary={"brief": "Working on authentication"}, + topics=["auth"], + ) + db_save.save_full_session( + session_id="proj-b-1", + project_path="/tmp/project-b", + summary={"brief": "Working on authentication in another project"}, + topics=["auth"], + ) + + results = db_search.search_tier1("authentication", project_path="/tmp/project-a") + session_ids = [r["session_id"] for r in results] + assert "proj-a-1" in session_ids + assert "proj-b-1" not in session_ids + + +class TestSearchTier2Flags: + def test_include_messages_false(self, isolated_db): + """Tier 2 with include_messages=False should omit messages key.""" + _seed_data(isolated_db) + tier1 = db_search.search_tier1("authentication") + ids = [r["id"] for r in tier1] + tier2 = db_search.search_tier2(ids, include_messages=False) + assert len(tier2) >= 1 + assert "messages" not in tier2[0] + assert "topics" in tier2[0] + assert "code_snippets" in tier2[0] + + def test_include_snippets_false(self, isolated_db): + """Tier 2 with include_snippets=False should omit code_snippets key.""" + _seed_data(isolated_db) + tier1 = db_search.search_tier1("authentication") + ids = [r["id"] for r in tier1] + tier2 = db_search.search_tier2(ids, include_snippets=False) + assert len(tier2) >= 1 + assert "code_snippets" not in tier2[0] + assert "messages" in tier2[0] + assert "topics" in tier2[0] + + def test_both_flags_false(self, isolated_db): + """Tier 2 with both flags False should still return session + summary data.""" + _seed_data(isolated_db) + tier1 = db_search.search_tier1("authentication") + ids = [r["id"] for r in tier1] + tier2 = db_search.search_tier2(ids, include_messages=False, include_snippets=False) + assert len(tier2) >= 1 + assert "messages" not in tier2[0] + assert "code_snippets" not in tier2[0] + assert "brief" in tier2[0] + assert "topics" in tier2[0] + + +class TestSearchTier2MalformedJson: + def test_malformed_key_decisions(self, isolated_db): + """Tier 2 should handle malformed JSON in key_decisions gracefully.""" + db_init.init_database() + session_db_id = db_save.save_session("malformed-session", "/tmp/test") + # Insert a summary with malformed JSON directly + with db_utils.get_connection() as conn: + conn.execute(""" + INSERT INTO summaries (session_id, brief, key_decisions, problems_solved, technologies) + VALUES (?, ?, ?, ?, ?) + """, (session_db_id, "Test brief", "not valid json [", "also {bad", '["valid"]')) + conn.commit() + + tier2 = db_search.search_tier2([session_db_id]) + assert len(tier2) == 1 + # Malformed fields should remain as strings (not parsed) + assert tier2[0]["key_decisions"] == "not valid json [" + assert tier2[0]["problems_solved"] == "also {bad" + # Valid JSON should be parsed + assert tier2[0]["technologies"] == ["valid"] + + def test_nonexistent_session_ids(self, isolated_db): + """Tier 2 with IDs that don't exist should return empty list.""" + db_init.init_database() + result = db_search.search_tier2([9999, 8888]) + assert result == [] + + +class TestFormatResultsMarkdownDetailed: + def test_detailed_with_messages_and_snippets(self, isolated_db): + """format_results_markdown(detailed=True) should include expandable content.""" + _seed_data_with_snippets(isolated_db) + results = db_search.full_search("React login", detailed=True) + md = db_search.format_results_markdown(results, detailed=True) + + assert "
" in md + assert "Full Context" in md + assert "### Detailed Summary" in md + assert "### Key Messages" in md + assert "### Code Snippets" in md + assert "```tsx" in md + assert "LoginForm" in md + assert "
" in md + + def test_detailed_with_decisions(self, isolated_db): + """Detailed mode should render key decisions.""" + _seed_data_with_snippets(isolated_db) + results = db_search.full_search("React login", detailed=True) + md = db_search.format_results_markdown(results, detailed=True) + + assert "**Decisions**:" in md + assert "- Use controlled components" in md + + def test_detailed_without_content_no_details_block(self, isolated_db): + """Detailed mode with no detailed/messages/snippets should not add details block.""" + db_init.init_database() + db_save.save_full_session( + session_id="bare-session", + project_path="/tmp/bare", + summary={"brief": "Bare session with no details"}, + ) + results = db_search.full_search("Bare session", detailed=True) + md = db_search.format_results_markdown(results, detailed=True) + + assert "Bare session" in md + assert "
" not in md + + def test_technologies_as_json_string(self, isolated_db): + """Technologies stored as a JSON string should be parsed for display.""" + db_init.init_database() + session_db_id = db_save.save_session("tech-session", "/tmp/test") + with db_utils.get_connection() as conn: + conn.execute(""" + INSERT INTO summaries (session_id, brief, technologies) + VALUES (?, ?, ?) + """, (session_db_id, "Tech test", '["python", "rust"]')) + conn.commit() + + results = db_search.full_search("Tech test") + md = db_search.format_results_markdown(results) + + assert "python" in md + assert "rust" in md + + def test_date_formatting(self, isolated_db): + """Dates with T separator should be displayed as date only.""" + _seed_data(isolated_db) + results = db_search.full_search("authentication") + md = db_search.format_results_markdown(results) + + # Should not contain the time portion (T...) in the header + for line in md.split("\n"): + if line.startswith("## "): + assert "T" not in line.split("|")[0] + + def test_project_name_from_path(self, isolated_db): + """Project path should be shortened to just the directory name.""" + _seed_data(isolated_db) + results = db_search.full_search("authentication") + md = db_search.format_results_markdown(results) + + assert "webapp" in md + # Full path should not appear in headers + for line in md.split("\n"): + if line.startswith("## "): + assert "/tmp/webapp" not in line diff --git a/tests/test_db_utils.py b/tests/test_db_utils.py index 6ac4dfd..0477c33 100644 --- a/tests/test_db_utils.py +++ b/tests/test_db_utils.py @@ -1,5 +1,7 @@ """Tests for database utilities.""" +from unittest.mock import patch + import db_utils @@ -19,6 +21,29 @@ def test_different_paths_different_hashes(self): h2 = db_utils.hash_project_path("/tmp/project-b") assert h1 != h2 + def test_msys2_path_normalized_on_windows(self): + """MSYS2-style /c/Users/... should be normalized to C:\\ on Windows.""" + with patch.object(db_utils.platform, "system", return_value="Windows"), \ + patch.object(db_utils.os.path, "abspath", return_value="C:\\c\\Users\\dev\\project"), \ + patch.object(db_utils.os.path, "normpath", return_value="C:\\c\\Users\\dev\\project"): + result = db_utils.hash_project_path("/c/Users/dev/project") + # Should produce a consistent hash (just verify it's a valid hex string) + assert isinstance(result, str) + assert len(result) == 16 + + def test_windows_case_insensitive(self): + """On Windows, paths should be case-insensitive for hashing.""" + with patch.object(db_utils.platform, "system", return_value="Windows"): + h1 = db_utils.hash_project_path("C:\\Users\\Dev\\Project") + h2 = db_utils.hash_project_path("C:\\Users\\dev\\project") + assert h1 == h2 + + def test_trailing_slash_normalized(self): + """Trailing slashes should be normalized away.""" + h1 = db_utils.hash_project_path("/tmp/myproject/") + h2 = db_utils.hash_project_path("/tmp/myproject") + assert h1 == h2 + class TestFormatFtsQuery: def test_simple_query(self): @@ -37,6 +62,37 @@ def test_empty_query(self): result = db_utils.format_fts_query("") assert result == '""' + def test_special_characters_stripped(self): + """Special characters like @#$%! should be stripped from terms.""" + result = db_utils.format_fts_query("hello@world!") + assert "@" not in result + assert "!" not in result + assert "helloworld" in result + + def test_multi_word_joined_with_or(self): + """Multiple words should be joined with OR.""" + result = db_utils.format_fts_query("hello world") + assert " OR " in result + assert "hello" in result + assert "world" in result + + def test_hyphen_and_underscore_preserved(self): + """Hyphens and underscores should be preserved in terms.""" + result = db_utils.format_fts_query("my-project_name") + assert "my-project_name" in result + + def test_all_special_chars_returns_empty(self): + """A query of only special characters should return empty query.""" + result = db_utils.format_fts_query("@#$%") + assert result == '""' + + def test_mixed_valid_and_empty_terms(self): + """Terms that become empty after cleaning should be skipped.""" + result = db_utils.format_fts_query("valid @#$ also-valid") + assert "valid" in result + assert "also-valid" in result + assert " OR " in result + class TestTruncateText: def test_short_text_unchanged(self): @@ -110,3 +166,30 @@ def test_get_table_count_validates(self): import pytest with pytest.raises(ValueError): db_utils.get_table_count("nonexistent_table") + + def test_get_table_count_returns_row_count(self, isolated_db): + """get_table_count should return actual row count for a populated table.""" + import db_init + import db_save + db_init.init_database() + db_save.save_session("count-test-1") + db_save.save_session("count-test-2") + assert db_utils.get_table_count("sessions") == 2 + + def test_get_table_count_empty_table(self, isolated_db): + """get_table_count should return 0 for an empty table.""" + import db_init + db_init.init_database() + assert db_utils.get_table_count("sessions") == 0 + + def test_get_table_count_no_db(self, isolated_db): + """get_table_count should return 0 when no database exists.""" + assert db_utils.get_table_count("sessions") == 0 + + def test_get_session_count(self, isolated_db): + """get_session_count should delegate to get_table_count('sessions').""" + import db_init + import db_save + db_init.init_database() + db_save.save_session("session-count-1") + assert db_utils.get_session_count() == 1 diff --git a/tests/test_install.py b/tests/test_install.py index fb1babf..a40bd01 100644 --- a/tests/test_install.py +++ b/tests/test_install.py @@ -181,3 +181,247 @@ def test_skips_when_mcp_not_installed(self, tmp_path): result = install.install_mcp() assert "not installed" in result + + +class TestInstallCommands: + def test_fresh_install(self, tmp_path): + """Command files should be installed when destination doesn't exist.""" + src = tmp_path / "src_commands" + src.mkdir() + (src / "remember.md").write_text("# remember", encoding="utf-8") + (src / "recall.md").write_text("# recall", encoding="utf-8") + + dst = tmp_path / "commands" + dst.mkdir() + + with patch.object(install, "COMMANDS_SRC", src), \ + patch.object(install, "COMMANDS_DST", dst): + result = install.install_commands() + + assert "remember.md: installed" in result + assert "recall.md: installed" in result + assert (dst / "remember.md").read_text(encoding="utf-8") == "# remember" + assert (dst / "recall.md").read_text(encoding="utf-8") == "# recall" + + def test_already_up_to_date(self, tmp_path): + """Identical files should be reported as up to date.""" + src = tmp_path / "src_commands" + src.mkdir() + (src / "remember.md").write_text("# same content", encoding="utf-8") + (src / "recall.md").write_text("# recall content", encoding="utf-8") + + dst = tmp_path / "commands" + dst.mkdir() + (dst / "remember.md").write_text("# same content", encoding="utf-8") + (dst / "recall.md").write_text("# recall content", encoding="utf-8") + + with patch.object(install, "COMMANDS_SRC", src), \ + patch.object(install, "COMMANDS_DST", dst): + result = install.install_commands() + + assert "already up to date" in result + + def test_creates_backup_on_update(self, tmp_path): + """Changed files should be backed up before overwriting.""" + src = tmp_path / "src_commands" + src.mkdir() + (src / "remember.md").write_text("# new version", encoding="utf-8") + + dst = tmp_path / "commands" + dst.mkdir() + (dst / "remember.md").write_text("# old version", encoding="utf-8") + + with patch.object(install, "COMMANDS_SRC", src), \ + patch.object(install, "COMMANDS_DST", dst): + result = install.install_commands() + + assert "updated" in result + assert "backup" in result + assert (dst / "remember.md").read_text(encoding="utf-8") == "# new version" + assert (dst / "remember.md.bak").exists() + assert (dst / "remember.md.bak").read_text(encoding="utf-8") == "# old version" + + def test_source_not_found(self, tmp_path): + """Missing source files should be skipped gracefully.""" + src = tmp_path / "src_commands" + src.mkdir() + # Only create one of the two expected files + + dst = tmp_path / "commands" + dst.mkdir() + + with patch.object(install, "COMMANDS_SRC", src), \ + patch.object(install, "COMMANDS_DST", dst): + result = install.install_commands() + + assert "source not found" in result + + +class TestInstallDb: + def test_skips_existing_db(self, tmp_path): + """install_db() should skip when database already exists.""" + db_dir = tmp_path / "context-memory" + db_dir.mkdir() + (db_dir / "context.db").write_bytes(b"fake db") + + with patch.object(install, "DB_DIR", db_dir): + result = install.install_db() + + assert result == "Database: already exists" + + def test_init_success(self, tmp_path): + """install_db() should report success when db_init.py succeeds.""" + db_dir = tmp_path / "context-memory" + db_dir.mkdir() + + with patch.object(install, "DB_DIR", db_dir), \ + patch("subprocess.run") as mock_run: + mock_run.return_value = subprocess.CompletedProcess( + args=[], returncode=0, stdout="", stderr="" + ) + result = install.install_db() + + assert result == "Database: initialized" + + def test_init_failure(self, tmp_path): + """install_db() should report failure when db_init.py fails.""" + db_dir = tmp_path / "context-memory" + db_dir.mkdir() + + with patch.object(install, "DB_DIR", db_dir), \ + patch("subprocess.run") as mock_run: + mock_run.return_value = subprocess.CompletedProcess( + args=[], returncode=1, stdout="", stderr="Schema error" + ) + result = install.install_db() + + assert "init failed" in result + assert "Schema error" in result + + +class TestInstallSkill: + def test_copies_skill_directory(self, tmp_path): + """install_skill() should copy the skill directory to the destination.""" + src = tmp_path / "src" / "skills" / "context-memory" + src.mkdir(parents=True) + (src / "SKILL.md").write_text("# Skill", encoding="utf-8") + scripts = src / "scripts" + scripts.mkdir() + (scripts / "db_init.py").write_text("# init", encoding="utf-8") + + dst = tmp_path / "dst" / "skills" / "context-memory" + + with patch.object(install, "SKILL_SRC", src), \ + patch.object(install, "SKILL_DST", dst): + result = install.install_skill(symlink=False) + + assert result == "Skill copied" + assert dst.exists() + assert (dst / "SKILL.md").exists() + assert (dst / "scripts" / "db_init.py").exists() + + def test_creates_parent_directories(self, tmp_path): + """install_skill() should create parent dirs if they don't exist.""" + src = tmp_path / "src" / "skills" / "context-memory" + src.mkdir(parents=True) + (src / "SKILL.md").write_text("# Skill", encoding="utf-8") + + dst = tmp_path / "deep" / "nested" / "skills" / "context-memory" + + with patch.object(install, "SKILL_SRC", src), \ + patch.object(install, "SKILL_DST", dst): + result = install.install_skill(symlink=False) + + assert result == "Skill copied" + assert dst.exists() + + def test_overwrites_existing_directory(self, tmp_path): + """install_skill() should remove and replace an existing skill directory.""" + src = tmp_path / "src" / "skills" / "context-memory" + src.mkdir(parents=True) + (src / "SKILL.md").write_text("# New version", encoding="utf-8") + + dst = tmp_path / "dst" / "skills" / "context-memory" + dst.mkdir(parents=True) + (dst / "SKILL.md").write_text("# Old version", encoding="utf-8") + (dst / "stale_file.txt").write_text("should be removed", encoding="utf-8") + + with patch.object(install, "SKILL_SRC", src), \ + patch.object(install, "SKILL_DST", dst): + result = install.install_skill(symlink=False) + + assert result == "Skill copied" + assert (dst / "SKILL.md").read_text(encoding="utf-8") == "# New version" + assert not (dst / "stale_file.txt").exists() + + def test_symlink_mode(self, tmp_path): + """install_skill(symlink=True) should create a symlink.""" + src = tmp_path / "src" / "skills" / "context-memory" + src.mkdir(parents=True) + (src / "SKILL.md").write_text("# Skill", encoding="utf-8") + + dst = tmp_path / "dst" / "skills" / "context-memory" + + with patch.object(install, "SKILL_SRC", src), \ + patch.object(install, "SKILL_DST", dst): + result = install.install_skill(symlink=True) + + assert result == "Skill symlinked" + assert dst.is_symlink() + assert dst.resolve() == src.resolve() + + def test_symlink_replaces_existing_symlink(self, tmp_path): + """install_skill(symlink=True) should replace an existing symlink.""" + old_target = tmp_path / "old_target" + old_target.mkdir() + + src = tmp_path / "src" / "skills" / "context-memory" + src.mkdir(parents=True) + + dst = tmp_path / "dst" / "skills" / "context-memory" + dst.parent.mkdir(parents=True) + dst.symlink_to(old_target, target_is_directory=True) + + with patch.object(install, "SKILL_SRC", src), \ + patch.object(install, "SKILL_DST", dst): + result = install.install_skill(symlink=True) + + assert result == "Skill symlinked" + assert dst.is_symlink() + assert dst.resolve() == src.resolve() + + def test_symlink_replaces_existing_directory(self, tmp_path): + """install_skill(symlink=True) should remove an existing directory and create symlink.""" + src = tmp_path / "src" / "skills" / "context-memory" + src.mkdir(parents=True) + + dst = tmp_path / "dst" / "skills" / "context-memory" + dst.mkdir(parents=True) + (dst / "old_file.txt").write_text("old", encoding="utf-8") + + with patch.object(install, "SKILL_SRC", src), \ + patch.object(install, "SKILL_DST", dst): + result = install.install_skill(symlink=True) + + assert result == "Skill symlinked" + assert dst.is_symlink() + + def test_ignores_pycache(self, tmp_path): + """install_skill() should not copy __pycache__ or .pyc files.""" + src = tmp_path / "src" / "skills" / "context-memory" + src.mkdir(parents=True) + (src / "SKILL.md").write_text("# Skill", encoding="utf-8") + pycache = src / "__pycache__" + pycache.mkdir() + (pycache / "module.cpython-311.pyc").write_bytes(b"\x00") + (src / "old.pyc").write_bytes(b"\x00") + + dst = tmp_path / "dst" / "skills" / "context-memory" + + with patch.object(install, "SKILL_SRC", src), \ + patch.object(install, "SKILL_DST", dst): + install.install_skill(symlink=False) + + assert (dst / "SKILL.md").exists() + assert not (dst / "__pycache__").exists() + assert not (dst / "old.pyc").exists() diff --git a/tests/test_uninstall.py b/tests/test_uninstall.py index 709098b..0652261 100644 --- a/tests/test_uninstall.py +++ b/tests/test_uninstall.py @@ -137,3 +137,206 @@ def test_no_config_file(self, tmp_path): result = uninstall.uninstall_mcp() assert "not found" in result + + +class TestUninstallData: + def test_remove_deletes_directory(self, tmp_path): + """uninstall_data(remove=True) should delete the DB directory.""" + db_dir = tmp_path / "context-memory" + db_dir.mkdir() + (db_dir / "context.db").write_bytes(b"fake db") + + with patch.object(uninstall, "DB_DIR", db_dir): + result = uninstall.uninstall_data(remove=True) + + assert result == "Database: removed" + assert not db_dir.exists() + + def test_preserve_keeps_directory(self, tmp_path): + """uninstall_data(remove=False) should preserve the DB directory.""" + db_dir = tmp_path / "context-memory" + db_dir.mkdir() + (db_dir / "context.db").write_bytes(b"fake db") + + with patch.object(uninstall, "DB_DIR", db_dir): + result = uninstall.uninstall_data(remove=False) + + assert "preserved" in result + assert db_dir.exists() + + def test_no_data_found(self, tmp_path): + """uninstall_data() should report no data when directory doesn't exist.""" + db_dir = tmp_path / "context-memory" + + with patch.object(uninstall, "DB_DIR", db_dir): + result = uninstall.uninstall_data(remove=True) + + assert result == "Database: no data found" + + +class TestUninstallSkill: + def test_removes_directory(self, tmp_path): + """Should remove the skill directory.""" + skill_dst = tmp_path / "skills" / "context-memory" + skill_dst.mkdir(parents=True) + (skill_dst / "SKILL.md").write_text("# Skill", encoding="utf-8") + + with patch.object(uninstall, "SKILL_DST", skill_dst): + result = uninstall.uninstall_skill() + + assert result == "Skill: removed" + assert not skill_dst.exists() + + def test_removes_symlink(self, tmp_path): + """Should remove a symlinked skill.""" + target = tmp_path / "target" + target.mkdir() + skill_dst = tmp_path / "skills" / "context-memory" + skill_dst.parent.mkdir(parents=True) + skill_dst.symlink_to(target, target_is_directory=True) + + with patch.object(uninstall, "SKILL_DST", skill_dst): + result = uninstall.uninstall_skill() + + assert result == "Skill: symlink removed" + assert not skill_dst.exists() + # Original target should still exist + assert target.exists() + + def test_not_installed(self, tmp_path): + """Should report not installed when directory doesn't exist.""" + skill_dst = tmp_path / "skills" / "context-memory" + + with patch.object(uninstall, "SKILL_DST", skill_dst): + result = uninstall.uninstall_skill() + + assert result == "Skill: not installed" + + +class TestUninstallHooks: + def _write_settings(self, path, settings): + path.write_text(json.dumps(settings, indent=2), encoding="utf-8") + + def test_removes_our_hook(self, tmp_path): + """Should remove context-memory hooks from settings.json.""" + settings_path = tmp_path / "settings.json" + settings = { + "hooks": { + "Stop": [ + {"hooks": [{"type": "command", "command": "python ~/.claude/skills/context-memory/scripts/auto_save.py"}]} + ] + } + } + self._write_settings(settings_path, settings) + + with patch.object(uninstall, "SETTINGS_PATH", settings_path): + result = uninstall.uninstall_hooks() + + assert result == "Hooks: removed from settings.json" + updated = json.loads(settings_path.read_text(encoding="utf-8")) + # hooks key should be cleaned up entirely + assert "hooks" not in updated + + def test_preserves_other_hooks(self, tmp_path): + """Should keep non-context-memory hooks in place.""" + settings_path = tmp_path / "settings.json" + settings = { + "hooks": { + "Stop": [ + {"hooks": [{"type": "command", "command": "python ~/.claude/skills/context-memory/scripts/auto_save.py"}]}, + {"hooks": [{"type": "command", "command": "python /some/other/plugin/hook.py"}]}, + ] + } + } + self._write_settings(settings_path, settings) + + with patch.object(uninstall, "SETTINGS_PATH", settings_path): + result = uninstall.uninstall_hooks() + + assert result == "Hooks: removed from settings.json" + updated = json.loads(settings_path.read_text(encoding="utf-8")) + assert len(updated["hooks"]["Stop"]) == 1 + assert "other/plugin" in updated["hooks"]["Stop"][0]["hooks"][0]["command"] + + def test_no_settings_file(self, tmp_path): + """Should report not found when settings.json doesn't exist.""" + settings_path = tmp_path / "settings.json" + + with patch.object(uninstall, "SETTINGS_PATH", settings_path): + result = uninstall.uninstall_hooks() + + assert result == "Hooks: settings.json not found" + + def test_malformed_settings_json(self, tmp_path): + """Should handle malformed settings.json gracefully.""" + settings_path = tmp_path / "settings.json" + settings_path.write_text("{invalid json", encoding="utf-8") + + with patch.object(uninstall, "SETTINGS_PATH", settings_path): + result = uninstall.uninstall_hooks() + + assert result == "Hooks: settings.json is malformed, skipped" + + def test_no_stop_hooks(self, tmp_path): + """Should report no Stop hooks when hooks section is empty.""" + settings_path = tmp_path / "settings.json" + self._write_settings(settings_path, {"hooks": {}}) + + with patch.object(uninstall, "SETTINGS_PATH", settings_path): + result = uninstall.uninstall_hooks() + + assert result == "Hooks: no Stop hooks found" + + def test_not_installed(self, tmp_path): + """Should report not installed when Stop hooks exist but none are ours.""" + settings_path = tmp_path / "settings.json" + settings = { + "hooks": { + "Stop": [ + {"hooks": [{"type": "command", "command": "python /other/hook.py"}]} + ] + } + } + self._write_settings(settings_path, settings) + + with patch.object(uninstall, "SETTINGS_PATH", settings_path): + result = uninstall.uninstall_hooks() + + assert result == "Hooks: not installed" + + def test_cleans_empty_hooks_structure(self, tmp_path): + """Should remove empty hooks/Stop keys after removing our hook.""" + settings_path = tmp_path / "settings.json" + settings = { + "hooks": { + "Stop": [ + {"hooks": [{"type": "command", "command": "python ~/.claude/skills/context-memory/scripts/auto_save.py"}]} + ] + }, + "other_setting": True, + } + self._write_settings(settings_path, settings) + + with patch.object(uninstall, "SETTINGS_PATH", settings_path): + uninstall.uninstall_hooks() + + updated = json.loads(settings_path.read_text(encoding="utf-8")) + assert "hooks" not in updated + assert updated["other_setting"] is True + + def test_matches_windows_expanded_path(self, tmp_path): + """Should recognize hooks with Windows expanded paths.""" + settings_path = tmp_path / "settings.json" + settings = { + "hooks": { + "Stop": [ + {"hooks": [{"type": "command", "command": "python C:/Users/Test/.claude/skills/context-memory/scripts/auto_save.py"}]} + ] + } + } + self._write_settings(settings_path, settings) + + with patch.object(uninstall, "SETTINGS_PATH", settings_path): + result = uninstall.uninstall_hooks() + + assert result == "Hooks: removed from settings.json"