diff --git a/CLAUDE.md b/CLAUDE.md index c712d6c6..bafb0d8b 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -50,7 +50,8 @@ JS source is plain JavaScript (ES modules) in `src/`. No transpilation step. The | `mcp.js` | MCP server exposing graph queries to AI agents; single-repo by default, `--multi-repo` to enable cross-repo access | | `graph/` | Unified graph model: `CodeGraph` class (`model.js`), algorithms (Tarjan SCC, Louvain, BFS, shortest path, centrality), classifiers (role, risk), builders (dependency, structure, temporal) | | `cycles.js` | Circular dependency detection (delegates to `graph/` subsystem) | -| `export.js` | DOT/Mermaid/JSON graph export | +| `export.js` | Graph export orchestration: loads data from DB, delegates to `presentation/export.js` serializers | +| `presentation/` | Pure output formatting: `viewer.js` (HTML renderer), `export.js` (DOT/Mermaid/GraphML/Neo4j serializers), `sequence-renderer.js` (Mermaid sequence diagrams), `table.js` (CLI table formatting), `result-formatter.js` (JSON/NDJSON output) | | `watcher.js` | Watch mode for incremental rebuilds | | `config.js` | `.codegraphrc.json` loading, env overrides, `apiKeyCommand` secret resolution | | `constants.js` | `EXTENSIONS` (derived from parser registry) and `IGNORE_DIRS` constants | @@ -68,7 +69,7 @@ JS source is plain JavaScript (ES modules) in `src/`. No transpilation step. The | `boundaries.js` | Architecture boundary rules with onion architecture preset | | `owners.js` | CODEOWNERS integration for ownership queries | | `snapshot.js` | SQLite DB backup and restore | -| `sequence.js` | Mermaid sequence diagram generation from call graph edges | +| `sequence.js` | Sequence diagram data generation (BFS traversal); Mermaid rendering delegated to `presentation/sequence-renderer.js` | | `paginate.js` | Pagination helpers for bounded query results | | `logger.js` | Structured logging (`warn`, `debug`, `info`, `error`) | diff --git a/docs/roadmap/ROADMAP.md b/docs/roadmap/ROADMAP.md index c5fd25c2..2fc1eaa1 100644 --- a/docs/roadmap/ROADMAP.md +++ b/docs/roadmap/ROADMAP.md @@ -944,11 +944,11 @@ src/ result-formatter.js # Structured result formatting (moved from infrastructure/) ``` -- 🔲 Extract rendering logic from `viewer.js` — keep graph data loading in domain, move formatting to presentation -- 🔲 Extract serialization from `export.js` — DOT/Mermaid/JSON writers become pure data → string transforms -- 🔲 Extract table formatting helpers used across `queries-cli.js`, `complexity`, `stats` -- 🔲 Move `result-formatter.js` from `infrastructure/` to `presentation/` (it's output formatting, not infrastructure) -- 🔲 Extract Mermaid rendering from `sequence.js` into `sequence-renderer.js` +- ✅ Extract rendering logic from `viewer.js` — keep graph data loading in domain, move formatting to presentation +- ✅ Extract serialization from `export.js` — DOT/Mermaid/JSON writers become pure data → string transforms +- ✅ Extract table formatting helpers used across `queries-cli.js`, `complexity`, `stats` +- ✅ Move `result-formatter.js` from `infrastructure/` to `presentation/` (it's output formatting, not infrastructure) +- ✅ Extract Mermaid rendering from `sequence.js` into `sequence-renderer.js` **Principle:** Domain functions return plain data objects. Presentation functions are pure transforms: `data → formatted string`. Commands wire the two together. diff --git a/src/export.js b/src/export.js index 4a3a7c91..8dfa768b 100644 --- a/src/export.js +++ b/src/export.js @@ -1,368 +1,254 @@ import path from 'node:path'; import { isTestFile } from './infrastructure/test-filter.js'; import { paginateResult } from './paginate.js'; +import { + renderFileLevelDOT, + renderFileLevelGraphML, + renderFileLevelMermaid, + renderFileLevelNeo4jCSV, + renderFunctionLevelDOT, + renderFunctionLevelGraphML, + renderFunctionLevelMermaid, + renderFunctionLevelNeo4jCSV, +} from './presentation/export.js'; const DEFAULT_MIN_CONFIDENCE = 0.5; -/** Escape special XML characters. */ -function escapeXml(s) { - return String(s) - .replace(/&/g, '&') - .replace(//g, '>') - .replace(/"/g, '"') - .replace(/'/g, '''); -} +// ─── Shared data loaders ───────────────────────────────────────────── -/** RFC 4180 CSV field escaping — quote fields containing commas, quotes, or newlines. */ -function escapeCsv(s) { - const str = String(s); - if (str.includes(',') || str.includes('"') || str.includes('\n') || str.includes('\r')) { - return `"${str.replace(/"/g, '""')}"`; - } - return str; +/** + * Load file-level edges from DB with filtering. + * @param {object} db + * @param {object} opts + * @param {boolean} [opts.includeKind] - Include edge_kind in SELECT DISTINCT + * @param {boolean} [opts.includeConfidence] - Include confidence (adds a column to DISTINCT — use only when needed) + * @returns {{ edges: Array, totalEdges: number }} + */ +function loadFileLevelEdges( + db, + { noTests, minConfidence, limit, includeKind = false, includeConfidence = false }, +) { + const minConf = minConfidence ?? DEFAULT_MIN_CONFIDENCE; + const kindClause = includeKind ? ', e.kind AS edge_kind' : ''; + const confidenceClause = includeConfidence ? ', e.confidence' : ''; + let edges = db + .prepare( + ` + SELECT DISTINCT n1.file AS source, n2.file AS target${kindClause}${confidenceClause} + FROM edges e + JOIN nodes n1 ON e.source_id = n1.id + JOIN nodes n2 ON e.target_id = n2.id + WHERE n1.file != n2.file AND e.kind IN ('imports', 'imports-type', 'calls') + AND e.confidence >= ? + `, + ) + .all(minConf); + if (noTests) edges = edges.filter((e) => !isTestFile(e.source) && !isTestFile(e.target)); + const totalEdges = edges.length; + if (limit && edges.length > limit) edges = edges.slice(0, limit); + return { edges, totalEdges }; } /** - * Export the dependency graph in DOT (Graphviz) format. + * Load function-level edges from DB with filtering. + * Returns the maximal field set needed by any serializer. + * @returns {{ edges: Array, totalEdges: number }} */ -export function exportDOT(db, opts = {}) { - const fileLevel = opts.fileLevel !== false; - const noTests = opts.noTests || false; - const minConf = opts.minConfidence ?? DEFAULT_MIN_CONFIDENCE; - const edgeLimit = opts.limit; - const lines = [ - 'digraph codegraph {', - ' rankdir=LR;', - ' node [shape=box, fontname="monospace", fontsize=10];', - ' edge [color="#666666"];', - '', - ]; - - if (fileLevel) { - let edges = db - .prepare(` - SELECT DISTINCT n1.file AS source, n2.file AS target +function loadFunctionLevelEdges(db, { noTests, minConfidence, limit }) { + const minConf = minConfidence ?? DEFAULT_MIN_CONFIDENCE; + let edges = db + .prepare( + ` + SELECT n1.id AS source_id, n1.name AS source_name, n1.kind AS source_kind, + n1.file AS source_file, n1.line AS source_line, n1.role AS source_role, + n2.id AS target_id, n2.name AS target_name, n2.kind AS target_kind, + n2.file AS target_file, n2.line AS target_line, n2.role AS target_role, + e.kind AS edge_kind, e.confidence FROM edges e JOIN nodes n1 ON e.source_id = n1.id JOIN nodes n2 ON e.target_id = n2.id - WHERE n1.file != n2.file AND e.kind IN ('imports', 'imports-type', 'calls') + WHERE n1.kind IN ('function', 'method', 'class', 'interface', 'type', 'struct', 'enum', 'trait', 'record', 'module') + AND n2.kind IN ('function', 'method', 'class', 'interface', 'type', 'struct', 'enum', 'trait', 'record', 'module') + AND e.kind = 'calls' AND e.confidence >= ? - `) - .all(minConf); - if (noTests) edges = edges.filter((e) => !isTestFile(e.source) && !isTestFile(e.target)); - const totalFileEdges = edges.length; - if (edgeLimit && edges.length > edgeLimit) edges = edges.slice(0, edgeLimit); + `, + ) + .all(minConf); + if (noTests) + edges = edges.filter((e) => !isTestFile(e.source_file) && !isTestFile(e.target_file)); + const totalEdges = edges.length; + if (limit && edges.length > limit) edges = edges.slice(0, limit); + return { edges, totalEdges }; +} - // Try to use directory nodes from DB (built by structure analysis) - const hasDirectoryNodes = - db.prepare("SELECT COUNT(*) as c FROM nodes WHERE kind = 'directory'").get().c > 0; +/** + * Load directory groupings for file-level graphs. + * Uses DB directory nodes if available, falls back to path.dirname(). + * @returns {Array<{ name: string, files: Array<{ path: string, basename: string }>, cohesion: number|null }>} + */ +function loadDirectoryGroups(db, allFiles) { + const hasDirectoryNodes = + db.prepare("SELECT COUNT(*) as c FROM nodes WHERE kind = 'directory'").get().c > 0; - const dirs = new Map(); - const allFiles = new Set(); - for (const { source, target } of edges) { - allFiles.add(source); - allFiles.add(target); - } + const dirs = new Map(); - if (hasDirectoryNodes) { - // Use DB directory structure with cohesion labels - const dbDirs = db + if (hasDirectoryNodes) { + const dbDirs = db + .prepare(` + SELECT n.id, n.name, nm.cohesion + FROM nodes n + LEFT JOIN node_metrics nm ON n.id = nm.node_id + WHERE n.kind = 'directory' + `) + .all(); + + for (const d of dbDirs) { + const containedFiles = db .prepare(` - SELECT n.id, n.name, nm.cohesion - FROM nodes n - LEFT JOIN node_metrics nm ON n.id = nm.node_id - WHERE n.kind = 'directory' + SELECT n.name FROM edges e + JOIN nodes n ON e.target_id = n.id + WHERE e.source_id = ? AND e.kind = 'contains' AND n.kind = 'file' `) - .all(); - - for (const d of dbDirs) { - const containedFiles = db - .prepare(` - SELECT n.name FROM edges e - JOIN nodes n ON e.target_id = n.id - WHERE e.source_id = ? AND e.kind = 'contains' AND n.kind = 'file' - `) - .all(d.id) - .map((r) => r.name) - .filter((f) => allFiles.has(f)); - - if (containedFiles.length > 0) { - dirs.set(d.name, { files: containedFiles, cohesion: d.cohesion }); - } - } - } else { - // Fallback: reconstruct from path.dirname() - for (const file of allFiles) { - const dir = path.dirname(file) || '.'; - if (!dirs.has(dir)) dirs.set(dir, { files: [], cohesion: null }); - dirs.get(dir).files.push(file); - } - } + .all(d.id) + .map((r) => r.name) + .filter((f) => allFiles.has(f)); - let clusterIdx = 0; - for (const [dir, info] of [...dirs].sort((a, b) => a[0].localeCompare(b[0]))) { - lines.push(` subgraph cluster_${clusterIdx++} {`); - const cohLabel = info.cohesion !== null ? ` (cohesion: ${info.cohesion.toFixed(2)})` : ''; - lines.push(` label="${dir}${cohLabel}";`); - lines.push(` style=dashed;`); - lines.push(` color="#999999";`); - for (const f of info.files) { - const label = path.basename(f); - lines.push(` "${f}" [label="${label}"];`); + if (containedFiles.length > 0) { + dirs.set(d.name, { files: containedFiles, cohesion: d.cohesion ?? null }); } - lines.push(` }`); - lines.push(''); - } - - for (const { source, target } of edges) { - lines.push(` "${source}" -> "${target}";`); - } - if (edgeLimit && totalFileEdges > edgeLimit) { - lines.push(` // Truncated: showing ${edges.length} of ${totalFileEdges} edges`); } } else { - let edges = db - .prepare(` - SELECT n1.name AS source_name, n1.kind AS source_kind, n1.file AS source_file, - n2.name AS target_name, n2.kind AS target_kind, n2.file AS target_file, - e.kind AS edge_kind - FROM edges e - JOIN nodes n1 ON e.source_id = n1.id - JOIN nodes n2 ON e.target_id = n2.id - WHERE n1.kind IN ('function', 'method', 'class', 'interface', 'type', 'struct', 'enum', 'trait', 'record', 'module') AND n2.kind IN ('function', 'method', 'class', 'interface', 'type', 'struct', 'enum', 'trait', 'record', 'module') - AND e.kind = 'calls' - AND e.confidence >= ? - `) - .all(minConf); - if (noTests) - edges = edges.filter((e) => !isTestFile(e.source_file) && !isTestFile(e.target_file)); - const totalFnEdges = edges.length; - if (edgeLimit && edges.length > edgeLimit) edges = edges.slice(0, edgeLimit); - - for (const e of edges) { - const sId = `${e.source_file}:${e.source_name}`.replace(/[^a-zA-Z0-9_]/g, '_'); - const tId = `${e.target_file}:${e.target_name}`.replace(/[^a-zA-Z0-9_]/g, '_'); - lines.push(` ${sId} [label="${e.source_name}\\n${path.basename(e.source_file)}"];`); - lines.push(` ${tId} [label="${e.target_name}\\n${path.basename(e.target_file)}"];`); - lines.push(` ${sId} -> ${tId};`); - } - if (edgeLimit && totalFnEdges > edgeLimit) { - lines.push(` // Truncated: showing ${edges.length} of ${totalFnEdges} edges`); + for (const file of allFiles) { + const dir = path.dirname(file) || '.'; + if (!dirs.has(dir)) dirs.set(dir, { files: [], cohesion: null }); + dirs.get(dir).files.push(file); } } - lines.push('}'); - return lines.join('\n'); + return [...dirs] + .sort((a, b) => a[0].localeCompare(b[0])) + .map(([name, info]) => ({ + name, + files: info.files.map((f) => ({ path: f, basename: path.basename(f) })), + cohesion: info.cohesion, + })); } -/** Escape double quotes for Mermaid labels. */ -function escapeLabel(label) { - return label.replace(/"/g, '#quot;'); +/** + * Load directory groupings for Mermaid file-level graphs (simplified — no cohesion, string arrays). + */ +function loadMermaidDirectoryGroups(db, allFiles) { + const hasDirectoryNodes = + db.prepare("SELECT COUNT(*) as c FROM nodes WHERE kind = 'directory'").get().c > 0; + + const dirs = new Map(); + + if (hasDirectoryNodes) { + const dbDirs = db.prepare("SELECT id, name FROM nodes WHERE kind = 'directory'").all(); + for (const d of dbDirs) { + const containedFiles = db + .prepare(` + SELECT n.name FROM edges e + JOIN nodes n ON e.target_id = n.id + WHERE e.source_id = ? AND e.kind = 'contains' AND n.kind = 'file' + `) + .all(d.id) + .map((r) => r.name) + .filter((f) => allFiles.has(f)); + if (containedFiles.length > 0) dirs.set(d.name, containedFiles); + } + } else { + for (const file of allFiles) { + const dir = path.dirname(file) || '.'; + if (!dirs.has(dir)) dirs.set(dir, []); + dirs.get(dir).push(file); + } + } + + return [...dirs] + .sort((a, b) => a[0].localeCompare(b[0])) + .map(([name, files]) => ({ name, files })); } -/** Map node kind to Mermaid shape wrapper. */ -function mermaidShape(kind, label) { - const escaped = escapeLabel(label); - switch (kind) { - case 'function': - case 'method': - return `(["${escaped}"])`; - case 'class': - case 'interface': - case 'type': - case 'struct': - case 'enum': - case 'trait': - case 'record': - return `{{"${escaped}"}}`; - case 'module': - return `[["${escaped}"]]`; - default: - return `["${escaped}"]`; +/** + * Load node roles for Mermaid function-level styling. + * @returns {Map} "file::name" → role + */ +function loadNodeRoles(db, edges) { + const roles = new Map(); + const seen = new Set(); + for (const e of edges) { + for (const [file, name] of [ + [e.source_file, e.source_name], + [e.target_file, e.target_name], + ]) { + const key = `${file}::${name}`; + if (seen.has(key)) continue; + seen.add(key); + const row = db + .prepare('SELECT role FROM nodes WHERE file = ? AND name = ? AND role IS NOT NULL LIMIT 1') + .get(file, name); + if (row?.role) roles.set(key, row.role); + } } + return roles; } -/** Map node role to Mermaid style colors. */ -const ROLE_STYLES = { - entry: 'fill:#e8f5e9,stroke:#4caf50', - core: 'fill:#e3f2fd,stroke:#2196f3', - utility: 'fill:#f5f5f5,stroke:#9e9e9e', - dead: 'fill:#ffebee,stroke:#f44336', - leaf: 'fill:#fffde7,stroke:#fdd835', -}; +// ─── Public API ────────────────────────────────────────────────────── /** - * Export the dependency graph in Mermaid format. + * Export the dependency graph in DOT (Graphviz) format. */ -export function exportMermaid(db, opts = {}) { +export function exportDOT(db, opts = {}) { const fileLevel = opts.fileLevel !== false; const noTests = opts.noTests || false; - const minConf = opts.minConfidence ?? DEFAULT_MIN_CONFIDENCE; - const direction = opts.direction || 'LR'; - const edgeLimit = opts.limit; - const lines = [`flowchart ${direction}`]; - - let nodeCounter = 0; - const nodeIdMap = new Map(); - function nodeId(key) { - if (!nodeIdMap.has(key)) nodeIdMap.set(key, `n${nodeCounter++}`); - return nodeIdMap.get(key); - } + const minConfidence = opts.minConfidence; + const limit = opts.limit; if (fileLevel) { - let edges = db - .prepare(` - SELECT DISTINCT n1.file AS source, n2.file AS target, e.kind AS edge_kind - FROM edges e - JOIN nodes n1 ON e.source_id = n1.id - JOIN nodes n2 ON e.target_id = n2.id - WHERE n1.file != n2.file AND e.kind IN ('imports', 'imports-type', 'calls') - AND e.confidence >= ? - `) - .all(minConf); - if (noTests) edges = edges.filter((e) => !isTestFile(e.source) && !isTestFile(e.target)); - const totalMermaidFileEdges = edges.length; - if (edgeLimit && edges.length > edgeLimit) edges = edges.slice(0, edgeLimit); - - // Collect all files referenced in edges + const { edges, totalEdges } = loadFileLevelEdges(db, { noTests, minConfidence, limit }); const allFiles = new Set(); for (const { source, target } of edges) { allFiles.add(source); allFiles.add(target); } + const dirs = loadDirectoryGroups(db, allFiles); + return renderFileLevelDOT({ dirs, edges, totalEdges, limit }); + } - // Build directory groupings — try DB directory nodes first, fall back to path.dirname() - const dirs = new Map(); - const hasDirectoryNodes = - db.prepare("SELECT COUNT(*) as c FROM nodes WHERE kind = 'directory'").get().c > 0; - - if (hasDirectoryNodes) { - const dbDirs = db.prepare("SELECT id, name FROM nodes WHERE kind = 'directory'").all(); - for (const d of dbDirs) { - const containedFiles = db - .prepare(` - SELECT n.name FROM edges e - JOIN nodes n ON e.target_id = n.id - WHERE e.source_id = ? AND e.kind = 'contains' AND n.kind = 'file' - `) - .all(d.id) - .map((r) => r.name) - .filter((f) => allFiles.has(f)); - if (containedFiles.length > 0) dirs.set(d.name, containedFiles); - } - } else { - for (const file of allFiles) { - const dir = path.dirname(file) || '.'; - if (!dirs.has(dir)) dirs.set(dir, []); - dirs.get(dir).push(file); - } - } - - // Emit subgraphs - for (const [dir, files] of [...dirs].sort((a, b) => a[0].localeCompare(b[0]))) { - const sgId = dir.replace(/[^a-zA-Z0-9]/g, '_'); - lines.push(` subgraph ${sgId}["${escapeLabel(dir)}"]`); - for (const f of files) { - const nId = nodeId(f); - lines.push(` ${nId}["${escapeLabel(path.basename(f))}"]`); - } - lines.push(' end'); - } - - // Deduplicate edges per source-target pair, collecting all distinct kinds - const edgeMap = new Map(); - for (const { source, target, edge_kind } of edges) { - const key = `${source}|${target}`; - const label = edge_kind === 'imports-type' ? 'imports' : edge_kind; - if (!edgeMap.has(key)) edgeMap.set(key, { source, target, labels: new Set() }); - edgeMap.get(key).labels.add(label); - } - - for (const { source, target, labels } of edgeMap.values()) { - lines.push(` ${nodeId(source)} -->|${[...labels].join(', ')}| ${nodeId(target)}`); - } - if (edgeLimit && totalMermaidFileEdges > edgeLimit) { - lines.push(` %% Truncated: showing ${edges.length} of ${totalMermaidFileEdges} edges`); - } - } else { - let edges = db - .prepare(` - SELECT n1.name AS source_name, n1.kind AS source_kind, n1.file AS source_file, - n2.name AS target_name, n2.kind AS target_kind, n2.file AS target_file, - e.kind AS edge_kind - FROM edges e - JOIN nodes n1 ON e.source_id = n1.id - JOIN nodes n2 ON e.target_id = n2.id - WHERE n1.kind IN ('function', 'method', 'class', 'interface', 'type', 'struct', 'enum', 'trait', 'record', 'module') - AND n2.kind IN ('function', 'method', 'class', 'interface', 'type', 'struct', 'enum', 'trait', 'record', 'module') - AND e.kind = 'calls' - AND e.confidence >= ? - `) - .all(minConf); - if (noTests) - edges = edges.filter((e) => !isTestFile(e.source_file) && !isTestFile(e.target_file)); - const totalMermaidFnEdges = edges.length; - if (edgeLimit && edges.length > edgeLimit) edges = edges.slice(0, edgeLimit); - - // Group nodes by file for subgraphs - const fileNodes = new Map(); - const nodeKinds = new Map(); - for (const e of edges) { - const sKey = `${e.source_file}::${e.source_name}`; - const tKey = `${e.target_file}::${e.target_name}`; - nodeId(sKey); - nodeId(tKey); - nodeKinds.set(sKey, e.source_kind); - nodeKinds.set(tKey, e.target_kind); - - if (!fileNodes.has(e.source_file)) fileNodes.set(e.source_file, new Map()); - fileNodes.get(e.source_file).set(sKey, e.source_name); - - if (!fileNodes.has(e.target_file)) fileNodes.set(e.target_file, new Map()); - fileNodes.get(e.target_file).set(tKey, e.target_name); - } - - // Emit subgraphs grouped by file - for (const [file, nodes] of [...fileNodes].sort((a, b) => a[0].localeCompare(b[0]))) { - const sgId = file.replace(/[^a-zA-Z0-9]/g, '_'); - lines.push(` subgraph ${sgId}["${escapeLabel(file)}"]`); - for (const [key, name] of nodes) { - const kind = nodeKinds.get(key); - lines.push(` ${nodeId(key)}${mermaidShape(kind, name)}`); - } - lines.push(' end'); - } + const { edges, totalEdges } = loadFunctionLevelEdges(db, { noTests, minConfidence, limit }); + return renderFunctionLevelDOT({ edges, totalEdges, limit }); +} - // Emit edges with labels - for (const e of edges) { - const sId = nodeId(`${e.source_file}::${e.source_name}`); - const tId = nodeId(`${e.target_file}::${e.target_name}`); - lines.push(` ${sId} -->|${e.edge_kind}| ${tId}`); - } - if (edgeLimit && totalMermaidFnEdges > edgeLimit) { - lines.push(` %% Truncated: showing ${edges.length} of ${totalMermaidFnEdges} edges`); - } +/** + * Export the dependency graph in Mermaid format. + */ +export function exportMermaid(db, opts = {}) { + const fileLevel = opts.fileLevel !== false; + const noTests = opts.noTests || false; + const minConfidence = opts.minConfidence; + const direction = opts.direction || 'LR'; + const limit = opts.limit; - // Role styling — query roles for all referenced nodes - const allKeys = [...nodeIdMap.keys()]; - const roleStyles = []; - for (const key of allKeys) { - const colonIdx = key.indexOf('::'); - if (colonIdx === -1) continue; - const file = key.slice(0, colonIdx); - const name = key.slice(colonIdx + 2); - const row = db - .prepare('SELECT role FROM nodes WHERE file = ? AND name = ? AND role IS NOT NULL LIMIT 1') - .get(file, name); - if (row?.role && ROLE_STYLES[row.role]) { - roleStyles.push(` style ${nodeIdMap.get(key)} ${ROLE_STYLES[row.role]}`); - } + if (fileLevel) { + const { edges, totalEdges } = loadFileLevelEdges(db, { + noTests, + minConfidence, + limit, + includeKind: true, + }); + const allFiles = new Set(); + for (const { source, target } of edges) { + allFiles.add(source); + allFiles.add(target); } - lines.push(...roleStyles); + const dirs = loadMermaidDirectoryGroups(db, allFiles); + return renderFileLevelMermaid({ direction, dirs, edges, totalEdges, limit }); } - return lines.join('\n'); + const { edges, totalEdges } = loadFunctionLevelEdges(db, { noTests, minConfidence, limit }); + const roles = loadNodeRoles(db, edges); + return renderFunctionLevelMermaid({ direction, edges, roles, totalEdges, limit }); } /** @@ -400,129 +286,16 @@ export function exportJSON(db, opts = {}) { export function exportGraphML(db, opts = {}) { const fileLevel = opts.fileLevel !== false; const noTests = opts.noTests || false; - const minConf = opts.minConfidence ?? DEFAULT_MIN_CONFIDENCE; - const edgeLimit = opts.limit; - - const lines = [ - '', - '', - ]; + const minConfidence = opts.minConfidence; + const limit = opts.limit; if (fileLevel) { - lines.push(' '); - lines.push(' '); - lines.push(' '); - lines.push(' '); - - let edges = db - .prepare(` - SELECT DISTINCT n1.file AS source, n2.file AS target - FROM edges e - JOIN nodes n1 ON e.source_id = n1.id - JOIN nodes n2 ON e.target_id = n2.id - WHERE n1.file != n2.file AND e.kind IN ('imports', 'imports-type', 'calls') - AND e.confidence >= ? - `) - .all(minConf); - if (noTests) edges = edges.filter((e) => !isTestFile(e.source) && !isTestFile(e.target)); - if (edgeLimit && edges.length > edgeLimit) edges = edges.slice(0, edgeLimit); - - const files = new Set(); - for (const { source, target } of edges) { - files.add(source); - files.add(target); - } - - const fileIds = new Map(); - let nIdx = 0; - for (const f of files) { - const id = `n${nIdx++}`; - fileIds.set(f, id); - lines.push(` `); - lines.push(` ${escapeXml(path.basename(f))}`); - lines.push(` ${escapeXml(f)}`); - lines.push(' '); - } - - let eIdx = 0; - for (const { source, target } of edges) { - lines.push( - ` `, - ); - lines.push(' imports'); - lines.push(' '); - } - } else { - lines.push(' '); - lines.push(' '); - lines.push(' '); - lines.push(' '); - lines.push(' '); - lines.push(' '); - lines.push(' '); - lines.push(' '); - - let edges = db - .prepare(` - SELECT n1.id AS source_id, n1.name AS source_name, n1.kind AS source_kind, - n1.file AS source_file, n1.line AS source_line, n1.role AS source_role, - n2.id AS target_id, n2.name AS target_name, n2.kind AS target_kind, - n2.file AS target_file, n2.line AS target_line, n2.role AS target_role, - e.kind AS edge_kind, e.confidence - FROM edges e - JOIN nodes n1 ON e.source_id = n1.id - JOIN nodes n2 ON e.target_id = n2.id - WHERE n1.kind IN ('function', 'method', 'class', 'interface', 'type', 'struct', 'enum', 'trait', 'record', 'module') - AND n2.kind IN ('function', 'method', 'class', 'interface', 'type', 'struct', 'enum', 'trait', 'record', 'module') - AND e.kind = 'calls' - AND e.confidence >= ? - `) - .all(minConf); - if (noTests) - edges = edges.filter((e) => !isTestFile(e.source_file) && !isTestFile(e.target_file)); - if (edgeLimit && edges.length > edgeLimit) edges = edges.slice(0, edgeLimit); - - const emittedNodes = new Set(); - function emitNode(id, name, kind, file, line, role) { - if (emittedNodes.has(id)) return; - emittedNodes.add(id); - lines.push(` `); - lines.push(` ${escapeXml(name)}`); - lines.push(` ${escapeXml(kind)}`); - lines.push(` ${escapeXml(file)}`); - lines.push(` ${line}`); - if (role) lines.push(` ${escapeXml(role)}`); - lines.push(' '); - } - - let eIdx = 0; - for (const e of edges) { - emitNode( - e.source_id, - e.source_name, - e.source_kind, - e.source_file, - e.source_line, - e.source_role, - ); - emitNode( - e.target_id, - e.target_name, - e.target_kind, - e.target_file, - e.target_line, - e.target_role, - ); - lines.push(` `); - lines.push(` ${escapeXml(e.edge_kind)}`); - lines.push(` ${e.confidence}`); - lines.push(' '); - } + const { edges } = loadFileLevelEdges(db, { noTests, minConfidence, limit }); + return renderFileLevelGraphML({ edges }); } - lines.push(' '); - lines.push(''); - return lines.join('\n'); + const { edges } = loadFunctionLevelEdges(db, { noTests, minConfidence, limit }); + return renderFunctionLevelGraphML({ edges }); } /** @@ -586,96 +359,20 @@ export function exportGraphSON(db, opts = {}) { export function exportNeo4jCSV(db, opts = {}) { const fileLevel = opts.fileLevel !== false; const noTests = opts.noTests || false; - const minConf = opts.minConfidence ?? DEFAULT_MIN_CONFIDENCE; - const edgeLimit = opts.limit; + const minConfidence = opts.minConfidence; + const limit = opts.limit; if (fileLevel) { - let edges = db - .prepare(` - SELECT DISTINCT n1.file AS source, n2.file AS target, e.kind, e.confidence - FROM edges e - JOIN nodes n1 ON e.source_id = n1.id - JOIN nodes n2 ON e.target_id = n2.id - WHERE n1.file != n2.file AND e.kind IN ('imports', 'imports-type', 'calls') - AND e.confidence >= ? - `) - .all(minConf); - if (noTests) edges = edges.filter((e) => !isTestFile(e.source) && !isTestFile(e.target)); - if (edgeLimit && edges.length > edgeLimit) edges = edges.slice(0, edgeLimit); - - const files = new Map(); - let idx = 0; - for (const { source, target } of edges) { - if (!files.has(source)) files.set(source, idx++); - if (!files.has(target)) files.set(target, idx++); - } - - const nodeLines = ['nodeId:ID,name,file:string,:LABEL']; - for (const [file, id] of files) { - nodeLines.push(`${id},${escapeCsv(path.basename(file))},${escapeCsv(file)},File`); - } - - const relLines = [':START_ID,:END_ID,:TYPE,confidence:float']; - for (const e of edges) { - const edgeType = e.kind.toUpperCase().replace(/-/g, '_'); - relLines.push(`${files.get(e.source)},${files.get(e.target)},${edgeType},${e.confidence}`); - } - - return { nodes: nodeLines.join('\n'), relationships: relLines.join('\n') }; - } - - let edges = db - .prepare(` - SELECT n1.id AS source_id, n1.name AS source_name, n1.kind AS source_kind, - n1.file AS source_file, n1.line AS source_line, n1.role AS source_role, - n2.id AS target_id, n2.name AS target_name, n2.kind AS target_kind, - n2.file AS target_file, n2.line AS target_line, n2.role AS target_role, - e.kind AS edge_kind, e.confidence - FROM edges e - JOIN nodes n1 ON e.source_id = n1.id - JOIN nodes n2 ON e.target_id = n2.id - WHERE n1.kind IN ('function', 'method', 'class', 'interface', 'type', 'struct', 'enum', 'trait', 'record', 'module') - AND n2.kind IN ('function', 'method', 'class', 'interface', 'type', 'struct', 'enum', 'trait', 'record', 'module') - AND e.kind = 'calls' - AND e.confidence >= ? - `) - .all(minConf); - if (noTests) - edges = edges.filter((e) => !isTestFile(e.source_file) && !isTestFile(e.target_file)); - if (edgeLimit && edges.length > edgeLimit) edges = edges.slice(0, edgeLimit); - - const emitted = new Set(); - const nodeLines = ['nodeId:ID,name,kind,file:string,line:int,role,:LABEL']; - function emitNode(id, name, kind, file, line, role) { - if (emitted.has(id)) return; - emitted.add(id); - const label = kind.charAt(0).toUpperCase() + kind.slice(1); - nodeLines.push( - `${id},${escapeCsv(name)},${escapeCsv(kind)},${escapeCsv(file)},${line},${escapeCsv(role || '')},${label}`, - ); - } - - const relLines = [':START_ID,:END_ID,:TYPE,confidence:float']; - for (const e of edges) { - emitNode( - e.source_id, - e.source_name, - e.source_kind, - e.source_file, - e.source_line, - e.source_role, - ); - emitNode( - e.target_id, - e.target_name, - e.target_kind, - e.target_file, - e.target_line, - e.target_role, - ); - const edgeType = e.edge_kind.toUpperCase().replace(/-/g, '_'); - relLines.push(`${e.source_id},${e.target_id},${edgeType},${e.confidence}`); + const { edges } = loadFileLevelEdges(db, { + noTests, + minConfidence, + limit, + includeKind: true, + includeConfidence: true, + }); + return renderFileLevelNeo4jCSV({ edges }); } - return { nodes: nodeLines.join('\n'), relationships: relLines.join('\n') }; + const { edges } = loadFunctionLevelEdges(db, { noTests, minConfidence, limit }); + return renderFunctionLevelNeo4jCSV({ edges }); } diff --git a/src/infrastructure/result-formatter.js b/src/infrastructure/result-formatter.js index 98aa8ea1..92f7daec 100644 --- a/src/infrastructure/result-formatter.js +++ b/src/infrastructure/result-formatter.js @@ -1,21 +1,2 @@ -import { printNdjson } from '../paginate.js'; - -/** - * Shared JSON / NDJSON output dispatch for CLI wrappers. - * - * @param {object} data - Result object from a *Data() function - * @param {string} field - Array field name for NDJSON streaming (e.g. 'results') - * @param {object} opts - CLI options ({ json?, ndjson? }) - * @returns {boolean} true if output was handled (caller should return early) - */ -export function outputResult(data, field, opts) { - if (opts.ndjson) { - printNdjson(data, field); - return true; - } - if (opts.json) { - console.log(JSON.stringify(data, null, 2)); - return true; - } - return false; -} +// Re-export from presentation layer — this file exists for backward compatibility. +export { outputResult } from '../presentation/result-formatter.js'; diff --git a/src/presentation/colors.js b/src/presentation/colors.js new file mode 100644 index 00000000..5bfc0da9 --- /dev/null +++ b/src/presentation/colors.js @@ -0,0 +1,44 @@ +/** + * Shared color constants for the graph viewer. + * + * These live in a standalone module so both the domain layer (src/viewer.js) + * and the presentation layer (src/presentation/viewer.js) can import them + * without creating a cross-layer dependency. + */ + +export const DEFAULT_NODE_COLORS = { + function: '#4CAF50', + method: '#66BB6A', + class: '#2196F3', + interface: '#42A5F5', + type: '#7E57C2', + struct: '#FF7043', + enum: '#FFA726', + trait: '#26A69A', + record: '#EC407A', + module: '#78909C', + file: '#90A4AE', +}; + +export const DEFAULT_ROLE_COLORS = { + entry: '#e8f5e9', + core: '#e3f2fd', + utility: '#f5f5f5', + dead: '#ffebee', + leaf: '#fffde7', +}; + +export const COMMUNITY_COLORS = [ + '#4CAF50', + '#2196F3', + '#FF9800', + '#9C27B0', + '#F44336', + '#00BCD4', + '#CDDC39', + '#E91E63', + '#3F51B5', + '#FF5722', + '#009688', + '#795548', +]; diff --git a/src/presentation/export.js b/src/presentation/export.js new file mode 100644 index 00000000..6cb9b8ad --- /dev/null +++ b/src/presentation/export.js @@ -0,0 +1,444 @@ +/** + * Graph export serializers — pure data → formatted string transforms. + * + * Each function receives pre-loaded graph data and returns a formatted string + * (or structured object for CSV). No DB access — all data must be pre-loaded. + */ + +import path from 'node:path'; + +// ─── Escape Helpers ────────────────────────────────────────────────── + +/** Escape special XML characters. */ +export function escapeXml(s) { + return String(s) + .replace(/&/g, '&') + .replace(//g, '>') + .replace(/"/g, '"') + .replace(/'/g, '''); +} + +/** RFC 4180 CSV field escaping — quote fields containing commas, quotes, or newlines. */ +export function escapeCsv(s) { + const str = String(s); + if (str.includes(',') || str.includes('"') || str.includes('\n') || str.includes('\r')) { + return `"${str.replace(/"/g, '""')}"`; + } + return str; +} + +/** Escape double quotes for Mermaid labels. */ +export function escapeLabel(label) { + return label.replace(/"/g, '#quot;'); +} + +/** Map node kind to Mermaid shape wrapper. */ +export function mermaidShape(kind, label) { + const escaped = escapeLabel(label); + switch (kind) { + case 'function': + case 'method': + return `(["${escaped}"])`; + case 'class': + case 'interface': + case 'type': + case 'struct': + case 'enum': + case 'trait': + case 'record': + return `{{"${escaped}"}}`; + case 'module': + return `[["${escaped}"]]`; + default: + return `["${escaped}"]`; + } +} + +/** Map node role to Mermaid style colors. */ +export const ROLE_STYLES = { + entry: 'fill:#e8f5e9,stroke:#4caf50', + core: 'fill:#e3f2fd,stroke:#2196f3', + utility: 'fill:#f5f5f5,stroke:#9e9e9e', + dead: 'fill:#ffebee,stroke:#f44336', + leaf: 'fill:#fffde7,stroke:#fdd835', +}; + +// ─── DOT Serializer ────────────────────────────────────────────────── + +/** + * Render file-level graph data as DOT (Graphviz) format. + * + * @param {{ dirs: Array<{ name: string, files: Array<{ path: string, basename: string }>, cohesion: number|null }>, edges: Array<{ source: string, target: string }>, totalEdges: number, limit?: number }} data + * @returns {string} + */ +export function renderFileLevelDOT(data) { + const lines = [ + 'digraph codegraph {', + ' rankdir=LR;', + ' node [shape=box, fontname="monospace", fontsize=10];', + ' edge [color="#666666"];', + '', + ]; + + let clusterIdx = 0; + for (const dir of data.dirs) { + lines.push(` subgraph cluster_${clusterIdx++} {`); + const cohLabel = dir.cohesion !== null ? ` (cohesion: ${dir.cohesion.toFixed(2)})` : ''; + lines.push(` label="${dir.name}${cohLabel}";`); + lines.push(` style=dashed;`); + lines.push(` color="#999999";`); + for (const f of dir.files) { + lines.push(` "${f.path}" [label="${f.basename}"];`); + } + lines.push(` }`); + lines.push(''); + } + + for (const { source, target } of data.edges) { + lines.push(` "${source}" -> "${target}";`); + } + if (data.limit && data.totalEdges > data.limit) { + lines.push(` // Truncated: showing ${data.edges.length} of ${data.totalEdges} edges`); + } + + lines.push('}'); + return lines.join('\n'); +} + +/** + * Render function-level graph data as DOT (Graphviz) format. + * + * @param {{ edges: Array<{ source_name: string, source_file: string, target_name: string, target_file: string }>, totalEdges: number, limit?: number }} data + * @returns {string} + */ +export function renderFunctionLevelDOT(data) { + const lines = [ + 'digraph codegraph {', + ' rankdir=LR;', + ' node [shape=box, fontname="monospace", fontsize=10];', + ' edge [color="#666666"];', + '', + ]; + + const emittedNodes = new Set(); + for (const e of data.edges) { + const sId = `${e.source_file}:${e.source_name}`.replace(/[^a-zA-Z0-9_]/g, '_'); + const tId = `${e.target_file}:${e.target_name}`.replace(/[^a-zA-Z0-9_]/g, '_'); + if (!emittedNodes.has(sId)) { + lines.push(` ${sId} [label="${e.source_name}\\n${path.basename(e.source_file)}"];`); + emittedNodes.add(sId); + } + if (!emittedNodes.has(tId)) { + lines.push(` ${tId} [label="${e.target_name}\\n${path.basename(e.target_file)}"];`); + emittedNodes.add(tId); + } + lines.push(` ${sId} -> ${tId};`); + } + if (data.limit && data.totalEdges > data.limit) { + lines.push(` // Truncated: showing ${data.edges.length} of ${data.totalEdges} edges`); + } + + lines.push('}'); + return lines.join('\n'); +} + +// ─── Mermaid Serializer ────────────────────────────────────────────── + +/** + * Render file-level graph data as Mermaid flowchart format. + * + * @param {{ direction: string, dirs: Array<{ name: string, files: string[] }>, edges: Array<{ source: string, target: string, edge_kind: string }>, totalEdges: number, limit?: number }} data + * @returns {string} + */ +export function renderFileLevelMermaid(data) { + const lines = [`flowchart ${data.direction || 'LR'}`]; + + let nodeCounter = 0; + const nodeIdMap = new Map(); + function nodeId(key) { + if (!nodeIdMap.has(key)) nodeIdMap.set(key, `n${nodeCounter++}`); + return nodeIdMap.get(key); + } + + // Emit subgraphs + for (const dir of data.dirs) { + const sgId = dir.name.replace(/[^a-zA-Z0-9]/g, '_'); + lines.push(` subgraph ${sgId}["${escapeLabel(dir.name)}"]`); + for (const f of dir.files) { + const nId = nodeId(f); + lines.push(` ${nId}["${escapeLabel(path.basename(f))}"]`); + } + lines.push(' end'); + } + + // Deduplicate edges per source-target pair, collecting all distinct kinds + const edgeMap = new Map(); + for (const { source, target, edge_kind } of data.edges) { + const key = `${source}|${target}`; + const label = edge_kind === 'imports-type' ? 'imports' : edge_kind; + if (!edgeMap.has(key)) edgeMap.set(key, { source, target, labels: new Set() }); + edgeMap.get(key).labels.add(label); + } + + for (const { source, target, labels } of edgeMap.values()) { + lines.push(` ${nodeId(source)} -->|${[...labels].join(', ')}| ${nodeId(target)}`); + } + if (data.limit && data.totalEdges > data.limit) { + lines.push(` %% Truncated: showing ${data.edges.length} of ${data.totalEdges} edges`); + } + + return lines.join('\n'); +} + +/** + * Render function-level graph data as Mermaid flowchart format. + * + * @param {{ direction: string, edges: Array, roles: Map, totalEdges: number, limit?: number }} data + * @returns {string} + */ +export function renderFunctionLevelMermaid(data) { + const lines = [`flowchart ${data.direction || 'LR'}`]; + + let nodeCounter = 0; + const nodeIdMap = new Map(); + function nodeId(key) { + if (!nodeIdMap.has(key)) nodeIdMap.set(key, `n${nodeCounter++}`); + return nodeIdMap.get(key); + } + + // Group nodes by file for subgraphs + const fileNodes = new Map(); + const nodeKinds = new Map(); + for (const e of data.edges) { + const sKey = `${e.source_file}::${e.source_name}`; + const tKey = `${e.target_file}::${e.target_name}`; + nodeId(sKey); + nodeId(tKey); + nodeKinds.set(sKey, e.source_kind); + nodeKinds.set(tKey, e.target_kind); + + if (!fileNodes.has(e.source_file)) fileNodes.set(e.source_file, new Map()); + fileNodes.get(e.source_file).set(sKey, e.source_name); + + if (!fileNodes.has(e.target_file)) fileNodes.set(e.target_file, new Map()); + fileNodes.get(e.target_file).set(tKey, e.target_name); + } + + // Emit subgraphs grouped by file + for (const [file, nodes] of [...fileNodes].sort((a, b) => a[0].localeCompare(b[0]))) { + const sgId = file.replace(/[^a-zA-Z0-9]/g, '_'); + lines.push(` subgraph ${sgId}["${escapeLabel(file)}"]`); + for (const [key, name] of nodes) { + const kind = nodeKinds.get(key); + lines.push(` ${nodeId(key)}${mermaidShape(kind, name)}`); + } + lines.push(' end'); + } + + // Emit edges with labels + for (const e of data.edges) { + const sId = nodeId(`${e.source_file}::${e.source_name}`); + const tId = nodeId(`${e.target_file}::${e.target_name}`); + lines.push(` ${sId} -->|${e.edge_kind}| ${tId}`); + } + if (data.limit && data.totalEdges > data.limit) { + lines.push(` %% Truncated: showing ${data.edges.length} of ${data.totalEdges} edges`); + } + + // Role styling + const roleStyles = []; + for (const [key, nid] of nodeIdMap) { + const role = data.roles?.get(key); + if (role && ROLE_STYLES[role]) { + roleStyles.push(` style ${nid} ${ROLE_STYLES[role]}`); + } + } + lines.push(...roleStyles); + + return lines.join('\n'); +} + +// ─── GraphML Serializer ────────────────────────────────────────────── + +/** + * Render file-level graph data as GraphML (XML) format. + * + * @param {{ edges: Array<{ source: string, target: string }> }} data + * @returns {string} + */ +export function renderFileLevelGraphML(data) { + const lines = [ + '', + '', + ' ', + ' ', + ' ', + ' ', + ]; + + const files = new Set(); + for (const { source, target } of data.edges) { + files.add(source); + files.add(target); + } + + const fileIds = new Map(); + let nIdx = 0; + for (const f of files) { + const id = `n${nIdx++}`; + fileIds.set(f, id); + lines.push(` `); + lines.push(` ${escapeXml(path.basename(f))}`); + lines.push(` ${escapeXml(f)}`); + lines.push(' '); + } + + let eIdx = 0; + for (const { source, target } of data.edges) { + lines.push( + ` `, + ); + lines.push(' imports'); + lines.push(' '); + } + + lines.push(' '); + lines.push(''); + return lines.join('\n'); +} + +/** + * Render function-level graph data as GraphML (XML) format. + * + * @param {{ edges: Array }} data + * @returns {string} + */ +export function renderFunctionLevelGraphML(data) { + const lines = [ + '', + '', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ]; + + const emittedNodes = new Set(); + function emitNode(id, name, kind, file, line, role) { + if (emittedNodes.has(id)) return; + emittedNodes.add(id); + lines.push(` `); + lines.push(` ${escapeXml(name)}`); + lines.push(` ${escapeXml(kind)}`); + lines.push(` ${escapeXml(file)}`); + lines.push(` ${line}`); + if (role) lines.push(` ${escapeXml(role)}`); + lines.push(' '); + } + + let eIdx = 0; + for (const e of data.edges) { + emitNode( + e.source_id, + e.source_name, + e.source_kind, + e.source_file, + e.source_line, + e.source_role, + ); + emitNode( + e.target_id, + e.target_name, + e.target_kind, + e.target_file, + e.target_line, + e.target_role, + ); + lines.push(` `); + lines.push(` ${escapeXml(e.edge_kind)}`); + lines.push(` ${e.confidence}`); + lines.push(' '); + } + + lines.push(' '); + lines.push(''); + return lines.join('\n'); +} + +// ─── Neo4j CSV Serializer ──────────────────────────────────────────── + +/** + * Render file-level graph data as Neo4j bulk-import CSV. + * + * @param {{ edges: Array<{ source: string, target: string, edge_kind: string, confidence: number }> }} data + * @returns {{ nodes: string, relationships: string }} + */ +export function renderFileLevelNeo4jCSV(data) { + const files = new Map(); + let idx = 0; + for (const { source, target } of data.edges) { + if (!files.has(source)) files.set(source, idx++); + if (!files.has(target)) files.set(target, idx++); + } + + const nodeLines = ['nodeId:ID,name,file:string,:LABEL']; + for (const [file, id] of files) { + nodeLines.push(`${id},${escapeCsv(path.basename(file))},${escapeCsv(file)},File`); + } + + const relLines = [':START_ID,:END_ID,:TYPE,confidence:float']; + for (const e of data.edges) { + const edgeType = e.edge_kind.toUpperCase().replace(/-/g, '_'); + relLines.push(`${files.get(e.source)},${files.get(e.target)},${edgeType},${e.confidence}`); + } + + return { nodes: nodeLines.join('\n'), relationships: relLines.join('\n') }; +} + +/** + * Render function-level graph data as Neo4j bulk-import CSV. + * + * @param {{ edges: Array }} data + * @returns {{ nodes: string, relationships: string }} + */ +export function renderFunctionLevelNeo4jCSV(data) { + const emitted = new Set(); + const nodeLines = ['nodeId:ID,name,kind,file:string,line:int,role,:LABEL']; + function emitNode(id, name, kind, file, line, role) { + if (emitted.has(id)) return; + emitted.add(id); + const label = kind.charAt(0).toUpperCase() + kind.slice(1); + nodeLines.push( + `${id},${escapeCsv(name)},${escapeCsv(kind)},${escapeCsv(file)},${line},${escapeCsv(role || '')},${label}`, + ); + } + + const relLines = [':START_ID,:END_ID,:TYPE,confidence:float']; + for (const e of data.edges) { + emitNode( + e.source_id, + e.source_name, + e.source_kind, + e.source_file, + e.source_line, + e.source_role, + ); + emitNode( + e.target_id, + e.target_name, + e.target_kind, + e.target_file, + e.target_line, + e.target_role, + ); + const edgeType = e.edge_kind.toUpperCase().replace(/-/g, '_'); + relLines.push(`${e.source_id},${e.target_id},${edgeType},${e.confidence}`); + } + + return { nodes: nodeLines.join('\n'), relationships: relLines.join('\n') }; +} diff --git a/src/presentation/result-formatter.js b/src/presentation/result-formatter.js new file mode 100644 index 00000000..98aa8ea1 --- /dev/null +++ b/src/presentation/result-formatter.js @@ -0,0 +1,21 @@ +import { printNdjson } from '../paginate.js'; + +/** + * Shared JSON / NDJSON output dispatch for CLI wrappers. + * + * @param {object} data - Result object from a *Data() function + * @param {string} field - Array field name for NDJSON streaming (e.g. 'results') + * @param {object} opts - CLI options ({ json?, ndjson? }) + * @returns {boolean} true if output was handled (caller should return early) + */ +export function outputResult(data, field, opts) { + if (opts.ndjson) { + printNdjson(data, field); + return true; + } + if (opts.json) { + console.log(JSON.stringify(data, null, 2)); + return true; + } + return false; +} diff --git a/src/presentation/sequence-renderer.js b/src/presentation/sequence-renderer.js new file mode 100644 index 00000000..8913230b --- /dev/null +++ b/src/presentation/sequence-renderer.js @@ -0,0 +1,43 @@ +/** + * Mermaid sequence diagram renderer — pure data → string transform. + * + * Converts sequenceData() output into Mermaid sequenceDiagram syntax. + * No DB access, no I/O — just data in, formatted string out. + */ + +/** + * Escape special Mermaid characters in labels. + */ +function escapeMermaid(str) { + return str + .replace(//g, '>') + .replace(/:/g, '#colon;') + .replace(/"/g, '#quot;'); +} + +/** + * Convert sequenceData result to Mermaid sequenceDiagram syntax. + * @param {{ participants, messages, truncated, depth }} seqResult + * @returns {string} + */ +export function sequenceToMermaid(seqResult) { + const lines = ['sequenceDiagram']; + + for (const p of seqResult.participants) { + lines.push(` participant ${p.id} as ${escapeMermaid(p.label)}`); + } + + for (const msg of seqResult.messages) { + const arrow = msg.type === 'return' ? '-->>' : '->>'; + lines.push(` ${msg.from}${arrow}${msg.to}: ${escapeMermaid(msg.label)}`); + } + + if (seqResult.truncated && seqResult.participants.length > 0) { + lines.push( + ` note right of ${seqResult.participants[0].id}: Truncated at depth ${seqResult.depth}`, + ); + } + + return lines.join('\n'); +} diff --git a/src/presentation/table.js b/src/presentation/table.js new file mode 100644 index 00000000..d5ef1903 --- /dev/null +++ b/src/presentation/table.js @@ -0,0 +1,47 @@ +/** + * Shared table formatting utilities for CLI output. + * + * Pure data → formatted string transforms. No I/O — callers handle printing. + */ + +/** + * Format a table with aligned columns. + * + * @param {object} opts + * @param {Array<{ header: string, width: number, align?: 'left'|'right' }>} opts.columns + * @param {string[][]} opts.rows - Each row is an array of string cell values + * @param {number} [opts.indent=2] - Leading spaces per line + * @returns {string} Formatted table string (header + separator + data rows) + */ +export function formatTable({ columns, rows, indent = 2 }) { + const prefix = ' '.repeat(indent); + const header = columns + .map((c) => (c.align === 'right' ? c.header.padStart(c.width) : c.header.padEnd(c.width))) + .join(' '); + const separator = columns.map((c) => '\u2500'.repeat(c.width)).join(' '); + const lines = [`${prefix}${header}`, `${prefix}${separator}`]; + for (const row of rows) { + const cells = columns.map((c, i) => { + const val = row[i] ?? ''; + return c.align === 'right' ? val.padStart(c.width) : val.padEnd(c.width); + }); + lines.push(`${prefix}${cells.join(' ')}`); + } + return lines.join('\n'); +} + +/** + * Truncate a string from the end, appending '\u2026' if truncated. + */ +export function truncEnd(str, maxLen) { + if (str.length <= maxLen) return str; + return `${str.slice(0, maxLen - 1)}\u2026`; +} + +/** + * Truncate a string from the start, prepending '\u2026' if truncated. + */ +export function truncStart(str, maxLen) { + if (str.length <= maxLen) return str; + return `\u2026${str.slice(-(maxLen - 1))}`; +} diff --git a/src/presentation/viewer.js b/src/presentation/viewer.js new file mode 100644 index 00000000..b39f9448 --- /dev/null +++ b/src/presentation/viewer.js @@ -0,0 +1,634 @@ +/** + * Interactive HTML viewer — presentation layer. + * + * Exports two concerns: + * - renderPlotHTML(): pure data → HTML transform (no I/O) that receives + * prepared graph data and config, returns a self-contained HTML string + * with vis-network. All graph data must be pre-loaded via prepareGraphData(). + * - loadPlotConfig(): reads .plotDotCfg / .plotDotCfg.json files from disk + * and merges them with defaults. This performs filesystem I/O. + * + * Color constants are defined in ./colors.js and re-exported here for + * backward compatibility. + */ + +import fs from 'node:fs'; +import path from 'node:path'; +import { COMMUNITY_COLORS, DEFAULT_NODE_COLORS, DEFAULT_ROLE_COLORS } from './colors.js'; + +// Re-export color constants so existing consumers are unaffected +export { COMMUNITY_COLORS, DEFAULT_NODE_COLORS, DEFAULT_ROLE_COLORS }; + +export const DEFAULT_CONFIG = { + layout: { algorithm: 'hierarchical', direction: 'LR' }, + physics: { enabled: true, nodeDistance: 150 }, + nodeColors: DEFAULT_NODE_COLORS, + roleColors: DEFAULT_ROLE_COLORS, + colorBy: 'kind', + edgeStyle: { color: '#666', smooth: true }, + filter: { kinds: null, roles: null, files: null }, + title: 'Codegraph', + seedStrategy: 'all', + seedCount: 30, + clusterBy: 'none', + sizeBy: 'uniform', + overlays: { complexity: false, risk: false }, + riskThresholds: { highBlastRadius: 10, lowMI: 40 }, +}; + +// ─── Config Loading ────────────────────────────────────────────────── + +/** + * Load .plotDotCfg or .plotDotCfg.json from given directory. + * Returns merged config with defaults. + */ +export function loadPlotConfig(dir) { + for (const name of ['.plotDotCfg', '.plotDotCfg.json']) { + const p = path.join(dir, name); + if (fs.existsSync(p)) { + try { + const raw = JSON.parse(fs.readFileSync(p, 'utf-8')); + return { + ...DEFAULT_CONFIG, + ...raw, + layout: { ...DEFAULT_CONFIG.layout, ...(raw.layout || {}) }, + physics: { ...DEFAULT_CONFIG.physics, ...(raw.physics || {}) }, + nodeColors: { + ...DEFAULT_CONFIG.nodeColors, + ...(raw.nodeColors || {}), + }, + roleColors: { + ...DEFAULT_CONFIG.roleColors, + ...(raw.roleColors || {}), + }, + edgeStyle: { + ...DEFAULT_CONFIG.edgeStyle, + ...(raw.edgeStyle || {}), + }, + filter: { ...DEFAULT_CONFIG.filter, ...(raw.filter || {}) }, + overlays: { + ...DEFAULT_CONFIG.overlays, + ...(raw.overlays || {}), + }, + riskThresholds: { + ...DEFAULT_CONFIG.riskThresholds, + ...(raw.riskThresholds || {}), + }, + }; + } catch { + // Invalid JSON — use defaults + } + } + } + return { ...DEFAULT_CONFIG }; +} + +// ─── Internal Helpers ──────────────────────────────────────────────── + +export function escapeHtml(s) { + return String(s) + .replace(/&/g, '&') + .replace(//g, '>') + .replace(/"/g, '"'); +} + +export function buildLayoutOptions(cfg) { + const opts = { + nodes: { + shape: 'box', + font: { face: 'monospace', size: 12 }, + }, + edges: { + arrows: 'to', + color: cfg.edgeStyle.color || '#666', + smooth: cfg.edgeStyle.smooth !== false, + }, + physics: { + enabled: cfg.physics.enabled !== false, + barnesHut: { + gravitationalConstant: -3000, + springLength: cfg.physics.nodeDistance || 150, + }, + }, + interaction: { + tooltipDelay: 200, + hover: true, + }, + }; + + if (cfg.layout.algorithm === 'hierarchical') { + opts.layout = { + hierarchical: { + enabled: true, + direction: cfg.layout.direction || 'LR', + sortMethod: 'directed', + nodeSpacing: cfg.physics.nodeDistance || 150, + }, + }; + } + + return opts; +} + +// ─── HTML Renderer ─────────────────────────────────────────────────── + +/** + * Render a self-contained interactive HTML file with vis-network. + * + * Pure transform: prepared graph data + config → HTML string. + * + * @param {{ nodes: Array, edges: Array, seedNodeIds: Array }} data - From prepareGraphData() + * @param {object} cfg - Viewer config (from loadPlotConfig or DEFAULT_CONFIG) + * @returns {string} Complete HTML document + */ +export function renderPlotHTML(data, cfg) { + const layoutOpts = buildLayoutOptions(cfg); + const title = cfg.title || 'Codegraph'; + + // Resolve effective colorBy (overlays.complexity overrides) + const effectiveColorBy = + cfg.overlays?.complexity && cfg.colorBy === 'kind' ? 'complexity' : cfg.colorBy || 'kind'; + const effectiveRisk = cfg.overlays?.risk || false; + + return ` + + + + +${escapeHtml(title)} + + + + +
+ + + + + + + +
+
+
+
+ × +
+
+
+
+ + +`; +} diff --git a/src/sequence.js b/src/sequence.js index bac6d408..5d87a1f0 100644 --- a/src/sequence.js +++ b/src/sequence.js @@ -285,41 +285,5 @@ export function sequenceData(name, dbPath, opts = {}) { } } -// ─── Mermaid formatter ─────────────────────────────────────────────── - -/** - * Escape special Mermaid characters in labels. - */ -function escapeMermaid(str) { - return str - .replace(//g, '>') - .replace(/:/g, '#colon;') - .replace(/"/g, '#quot;'); -} - -/** - * Convert sequenceData result to Mermaid sequenceDiagram syntax. - * @param {{ participants, messages, truncated }} seqResult - * @returns {string} - */ -export function sequenceToMermaid(seqResult) { - const lines = ['sequenceDiagram']; - - for (const p of seqResult.participants) { - lines.push(` participant ${p.id} as ${escapeMermaid(p.label)}`); - } - - for (const msg of seqResult.messages) { - const arrow = msg.type === 'return' ? '-->>' : '->>'; - lines.push(` ${msg.from}${arrow}${msg.to}: ${escapeMermaid(msg.label)}`); - } - - if (seqResult.truncated && seqResult.participants.length > 0) { - lines.push( - ` note right of ${seqResult.participants[0].id}: Truncated at depth ${seqResult.depth}`, - ); - } - - return lines.join('\n'); -} +// Re-export Mermaid renderer from presentation layer +export { sequenceToMermaid } from './presentation/sequence-renderer.js'; diff --git a/src/viewer.js b/src/viewer.js index 6bd94d35..fb438823 100644 --- a/src/viewer.js +++ b/src/viewer.js @@ -1,109 +1,18 @@ -import fs from 'node:fs'; import path from 'node:path'; import { louvainCommunities } from './graph/algorithms/louvain.js'; import { CodeGraph } from './graph/model.js'; import { isTestFile } from './infrastructure/test-filter.js'; +import { + COMMUNITY_COLORS, + DEFAULT_NODE_COLORS, + DEFAULT_ROLE_COLORS, +} from './presentation/colors.js'; +import { DEFAULT_CONFIG, renderPlotHTML } from './presentation/viewer.js'; -const DEFAULT_MIN_CONFIDENCE = 0.5; - -const DEFAULT_NODE_COLORS = { - function: '#4CAF50', - method: '#66BB6A', - class: '#2196F3', - interface: '#42A5F5', - type: '#7E57C2', - struct: '#FF7043', - enum: '#FFA726', - trait: '#26A69A', - record: '#EC407A', - module: '#78909C', - file: '#90A4AE', -}; - -const DEFAULT_ROLE_COLORS = { - entry: '#e8f5e9', - core: '#e3f2fd', - utility: '#f5f5f5', - dead: '#ffebee', - leaf: '#fffde7', -}; - -const COMMUNITY_COLORS = [ - '#4CAF50', - '#2196F3', - '#FF9800', - '#9C27B0', - '#F44336', - '#00BCD4', - '#CDDC39', - '#E91E63', - '#3F51B5', - '#FF5722', - '#009688', - '#795548', -]; - -const DEFAULT_CONFIG = { - layout: { algorithm: 'hierarchical', direction: 'LR' }, - physics: { enabled: true, nodeDistance: 150 }, - nodeColors: DEFAULT_NODE_COLORS, - roleColors: DEFAULT_ROLE_COLORS, - colorBy: 'kind', - edgeStyle: { color: '#666', smooth: true }, - filter: { kinds: null, roles: null, files: null }, - title: 'Codegraph', - seedStrategy: 'all', - seedCount: 30, - clusterBy: 'none', - sizeBy: 'uniform', - overlays: { complexity: false, risk: false }, - riskThresholds: { highBlastRadius: 10, lowMI: 40 }, -}; +// Re-export presentation utilities for backward compatibility +export { loadPlotConfig } from './presentation/viewer.js'; -/** - * Load .plotDotCfg or .plotDotCfg.json from given directory. - * Returns merged config with defaults. - */ -export function loadPlotConfig(dir) { - for (const name of ['.plotDotCfg', '.plotDotCfg.json']) { - const p = path.join(dir, name); - if (fs.existsSync(p)) { - try { - const raw = JSON.parse(fs.readFileSync(p, 'utf-8')); - return { - ...DEFAULT_CONFIG, - ...raw, - layout: { ...DEFAULT_CONFIG.layout, ...(raw.layout || {}) }, - physics: { ...DEFAULT_CONFIG.physics, ...(raw.physics || {}) }, - nodeColors: { - ...DEFAULT_CONFIG.nodeColors, - ...(raw.nodeColors || {}), - }, - roleColors: { - ...DEFAULT_CONFIG.roleColors, - ...(raw.roleColors || {}), - }, - edgeStyle: { - ...DEFAULT_CONFIG.edgeStyle, - ...(raw.edgeStyle || {}), - }, - filter: { ...DEFAULT_CONFIG.filter, ...(raw.filter || {}) }, - overlays: { - ...DEFAULT_CONFIG.overlays, - ...(raw.overlays || {}), - }, - riskThresholds: { - ...DEFAULT_CONFIG.riskThresholds, - ...(raw.riskThresholds || {}), - }, - }; - } catch { - // Invalid JSON — use defaults - } - } - } - return { ...DEFAULT_CONFIG }; -} +const DEFAULT_MIN_CONFIDENCE = 0.5; // ─── Data Preparation ───────────────────────────────────────────────── @@ -404,548 +313,15 @@ function prepareFileLevelData(db, noTests, minConf, cfg) { return { nodes: visNodes, edges: visEdges, seedNodeIds }; } -// ─── HTML Generation ────────────────────────────────────────────────── +// ─── HTML Generation (thin wrapper) ────────────────────────────────── /** * Generate a self-contained interactive HTML file with vis-network. + * + * Loads graph data from the DB, then delegates to the presentation layer. */ export function generatePlotHTML(db, opts = {}) { const cfg = opts.config || DEFAULT_CONFIG; const data = prepareGraphData(db, opts); - const layoutOpts = buildLayoutOptions(cfg); - const title = cfg.title || 'Codegraph'; - - // Resolve effective colorBy (overlays.complexity overrides) - const effectiveColorBy = - cfg.overlays?.complexity && cfg.colorBy === 'kind' ? 'complexity' : cfg.colorBy || 'kind'; - const effectiveRisk = cfg.overlays?.risk || false; - - return ` - - - - -${escapeHtml(title)} - - - - -
- - - - - - - -
-
-
-
- × -
-
-
-
- - -`; -} - -// ─── Internal Helpers ───────────────────────────────────────────────── - -function escapeHtml(s) { - return String(s) - .replace(/&/g, '&') - .replace(//g, '>') - .replace(/"/g, '"'); -} - -function buildLayoutOptions(cfg) { - const opts = { - nodes: { - shape: 'box', - font: { face: 'monospace', size: 12 }, - }, - edges: { - arrows: 'to', - color: cfg.edgeStyle.color || '#666', - smooth: cfg.edgeStyle.smooth !== false, - }, - physics: { - enabled: cfg.physics.enabled !== false, - barnesHut: { - gravitationalConstant: -3000, - springLength: cfg.physics.nodeDistance || 150, - }, - }, - interaction: { - tooltipDelay: 200, - hover: true, - }, - }; - - if (cfg.layout.algorithm === 'hierarchical') { - opts.layout = { - hierarchical: { - enabled: true, - direction: cfg.layout.direction || 'LR', - sortMethod: 'directed', - nodeSpacing: cfg.physics.nodeDistance || 150, - }, - }; - } - - return opts; + return renderPlotHTML(data, cfg); }