diff --git a/src/cli/commands/audit.js b/src/cli/commands/audit.js index bf542b7c..c52df18e 100644 --- a/src/cli/commands/audit.js +++ b/src/cli/commands/audit.js @@ -41,6 +41,7 @@ export const command = { kind: opts.kind, noTests: ctx.resolveNoTests(opts), json: opts.json, + config: ctx.config, }); }, }; diff --git a/src/cli/commands/check.js b/src/cli/commands/check.js index cb2e16d9..a85ca995 100644 --- a/src/cli/commands/check.js +++ b/src/cli/commands/check.js @@ -41,6 +41,7 @@ export const command = { limit: opts.limit ? parseInt(opts.limit, 10) : undefined, offset: opts.offset ? parseInt(opts.offset, 10) : undefined, ndjson: opts.ndjson, + config: ctx.config, }); return; } @@ -56,6 +57,7 @@ export const command = { depth: opts.depth ? parseInt(opts.depth, 10) : undefined, noTests: ctx.resolveNoTests(opts), json: opts.json, + config: ctx.config, }); if (opts.rules) { @@ -73,6 +75,7 @@ export const command = { limit: opts.limit ? parseInt(opts.limit, 10) : undefined, offset: opts.offset ? parseInt(opts.offset, 10) : undefined, ndjson: opts.ndjson, + config: ctx.config, }); } }, diff --git a/src/cli/commands/complexity.js b/src/cli/commands/complexity.js index cf9ab4c4..f9a08c01 100644 --- a/src/cli/commands/complexity.js +++ b/src/cli/commands/complexity.js @@ -40,6 +40,7 @@ export const command = { noTests: ctx.resolveNoTests(opts), json: opts.json, ndjson: opts.ndjson, + config: ctx.config, }); }, }; diff --git a/src/cli/commands/diff-impact.js b/src/cli/commands/diff-impact.js index 15fd67c9..4cc5e253 100644 --- a/src/cli/commands/diff-impact.js +++ b/src/cli/commands/diff-impact.js @@ -25,6 +25,7 @@ export const command = { limit: opts.limit ? parseInt(opts.limit, 10) : undefined, offset: opts.offset ? parseInt(opts.offset, 10) : undefined, ndjson: opts.ndjson, + config: ctx.config, }); }, }; diff --git a/src/db/repository/base.js b/src/db/repository/base.js index d13ffcfa..0ab4deac 100644 --- a/src/db/repository/base.js +++ b/src/db/repository/base.js @@ -163,7 +163,7 @@ export class Repository { throw new Error('not implemented'); } - /** @returns {{ source_id: number, target_id: number }[]} */ + /** @returns {{ source_id: number, target_id: number, confidence: number|null }[]} */ getCallEdges() { throw new Error('not implemented'); } diff --git a/src/db/repository/graph-read.js b/src/db/repository/graph-read.js index 8fd284ad..5538a9d4 100644 --- a/src/db/repository/graph-read.js +++ b/src/db/repository/graph-read.js @@ -25,13 +25,13 @@ export function getCallableNodes(db) { /** * Get all 'calls' edges. * @param {object} db - * @returns {{ source_id: number, target_id: number }[]} + * @returns {{ source_id: number, target_id: number, confidence: number|null }[]} */ export function getCallEdges(db) { return cachedStmt( _getCallEdgesStmt, db, - "SELECT source_id, target_id FROM edges WHERE kind = 'calls'", + "SELECT source_id, target_id, confidence FROM edges WHERE kind = 'calls'", ).all(); } diff --git a/src/db/repository/in-memory-repository.js b/src/db/repository/in-memory-repository.js index 9d228ca1..57e6592e 100644 --- a/src/db/repository/in-memory-repository.js +++ b/src/db/repository/in-memory-repository.js @@ -489,7 +489,7 @@ export class InMemoryRepository extends Repository { getCallEdges() { return [...this.#edges.values()] .filter((e) => e.kind === 'calls') - .map((e) => ({ source_id: e.source_id, target_id: e.target_id })); + .map((e) => ({ source_id: e.source_id, target_id: e.target_id, confidence: e.confidence })); } getFileNodesAll() { diff --git a/src/domain/analysis/impact.js b/src/domain/analysis/impact.js index 01b5cef6..736d76e0 100644 --- a/src/domain/analysis/impact.js +++ b/src/domain/analysis/impact.js @@ -18,6 +18,42 @@ import { normalizeSymbol } from '../../shared/normalize.js'; import { paginateResult } from '../../shared/paginate.js'; import { findMatchingNodes } from './symbol-lookup.js'; +// ─── Shared BFS: transitive callers ──────────────────────────────────── + +/** + * BFS traversal to find transitive callers of a node. + * + * @param {import('better-sqlite3').Database} db - Open read-only SQLite database handle (not a Repository) + * @param {number} startId - Starting node ID + * @param {{ noTests?: boolean, maxDepth?: number, onVisit?: (caller: object, parentId: number, depth: number) => void }} options + * @returns {{ totalDependents: number, levels: Record> }} + */ +export function bfsTransitiveCallers(db, startId, { noTests = false, maxDepth = 3, onVisit } = {}) { + const visited = new Set([startId]); + const levels = {}; + let frontier = [startId]; + + for (let d = 1; d <= maxDepth; d++) { + const nextFrontier = []; + for (const fid of frontier) { + const callers = findDistinctCallers(db, fid); + for (const c of callers) { + if (!visited.has(c.id) && (!noTests || !isTestFile(c.file))) { + visited.add(c.id); + nextFrontier.push(c.id); + if (!levels[d]) levels[d] = []; + levels[d].push({ name: c.name, kind: c.kind, file: c.file, line: c.line }); + if (onVisit) onVisit(c, fid, d); + } + } + } + frontier = nextFrontier; + if (frontier.length === 0) break; + } + + return { totalDependents: visited.size - 1, levels }; +} + export function impactAnalysisData(file, customDbPath, opts = {}) { const db = openReadonlyOrFail(customDbPath); try { @@ -82,31 +118,11 @@ export function fnImpactData(name, customDbPath, opts = {}) { } const results = nodes.map((node) => { - const visited = new Set([node.id]); - const levels = {}; - let frontier = [node.id]; - - for (let d = 1; d <= maxDepth; d++) { - const nextFrontier = []; - for (const fid of frontier) { - const callers = findDistinctCallers(db, fid); - for (const c of callers) { - if (!visited.has(c.id) && (!noTests || !isTestFile(c.file))) { - visited.add(c.id); - nextFrontier.push(c.id); - if (!levels[d]) levels[d] = []; - levels[d].push({ name: c.name, kind: c.kind, file: c.file, line: c.line }); - } - } - } - frontier = nextFrontier; - if (frontier.length === 0) break; - } - + const { levels, totalDependents } = bfsTransitiveCallers(db, node.id, { noTests, maxDepth }); return { ...normalizeSymbol(node, db, hc), levels, - totalDependents: visited.size - 1, + totalDependents, }; }); @@ -232,40 +248,27 @@ export function diffImpactData(customDbPath, opts = {}) { const allAffected = new Set(); const functionResults = affectedFunctions.map((fn) => { - const visited = new Set([fn.id]); - let frontier = [fn.id]; - let totalCallers = 0; - const levels = {}; const edges = []; const idToKey = new Map(); idToKey.set(fn.id, `${fn.file}::${fn.name}:${fn.line}`); - for (let d = 1; d <= maxDepth; d++) { - const nextFrontier = []; - for (const fid of frontier) { - const callers = findDistinctCallers(db, fid); - for (const c of callers) { - if (!visited.has(c.id) && (!noTests || !isTestFile(c.file))) { - visited.add(c.id); - nextFrontier.push(c.id); - allAffected.add(`${c.file}:${c.name}`); - const callerKey = `${c.file}::${c.name}:${c.line}`; - idToKey.set(c.id, callerKey); - if (!levels[d]) levels[d] = []; - levels[d].push({ name: c.name, kind: c.kind, file: c.file, line: c.line }); - edges.push({ from: idToKey.get(fid), to: callerKey }); - totalCallers++; - } - } - } - frontier = nextFrontier; - if (frontier.length === 0) break; - } + + const { levels, totalDependents } = bfsTransitiveCallers(db, fn.id, { + noTests, + maxDepth, + onVisit(c, parentId) { + allAffected.add(`${c.file}:${c.name}`); + const callerKey = `${c.file}::${c.name}:${c.line}`; + idToKey.set(c.id, callerKey); + edges.push({ from: idToKey.get(parentId), to: callerKey }); + }, + }); + return { name: fn.name, kind: fn.kind, file: fn.file, line: fn.line, - transitiveCallers: totalCallers, + transitiveCallers: totalDependents, levels, edges, }; @@ -310,8 +313,8 @@ export function diffImpactData(customDbPath, opts = {}) { let boundaryViolations = []; let boundaryViolationCount = 0; try { - const config = loadConfig(repoRoot); - const boundaryConfig = config.manifesto?.boundaries; + const cfg = opts.config || loadConfig(repoRoot); + const boundaryConfig = cfg.manifesto?.boundaries; if (boundaryConfig) { const result = evaluateBoundaries(db, boundaryConfig, { scopeFiles: [...changedRanges.keys()], diff --git a/src/features/audit.js b/src/features/audit.js index ef71ca93..7526b2c3 100644 --- a/src/features/audit.js +++ b/src/features/audit.js @@ -8,6 +8,7 @@ import path from 'node:path'; import { openReadonlyOrFail } from '../db/index.js'; +import { bfsTransitiveCallers } from '../domain/analysis/impact.js'; import { explainData } from '../domain/queries.js'; import { loadConfig } from '../infrastructure/config.js'; import { isTestFile } from '../infrastructure/test-filter.js'; @@ -17,11 +18,15 @@ import { RULE_DEFS } from './manifesto.js'; const FUNCTION_RULES = RULE_DEFS.filter((d) => d.level === 'function'); -function resolveThresholds(customDbPath) { +function resolveThresholds(customDbPath, config) { try { - const dbDir = path.dirname(customDbPath); - const repoRoot = path.resolve(dbDir, '..'); - const cfg = loadConfig(repoRoot); + const cfg = + config || + (() => { + const dbDir = path.dirname(customDbPath); + const repoRoot = path.resolve(dbDir, '..'); + return loadConfig(repoRoot); + })(); const userRules = cfg.manifesto || {}; const resolved = {}; for (const def of FUNCTION_RULES) { @@ -70,39 +75,6 @@ function checkBreaches(row, thresholds) { return breaches; } -// ─── BFS impact (inline, same algorithm as fnImpactData) ──────────── - -function computeImpact(db, nodeId, noTests, maxDepth) { - const visited = new Set([nodeId]); - const levels = {}; - let frontier = [nodeId]; - - for (let d = 1; d <= maxDepth; d++) { - const nextFrontier = []; - for (const fid of frontier) { - const callers = db - .prepare( - `SELECT DISTINCT n.id, n.name, n.kind, n.file, n.line - FROM edges e JOIN nodes n ON e.source_id = n.id - WHERE e.target_id = ? AND e.kind = 'calls'`, - ) - .all(fid); - for (const c of callers) { - if (!visited.has(c.id) && (!noTests || !isTestFile(c.file))) { - visited.add(c.id); - nextFrontier.push(c.id); - if (!levels[d]) levels[d] = []; - levels[d].push({ name: c.name, kind: c.kind, file: c.file, line: c.line }); - } - } - } - frontier = nextFrontier; - if (frontier.length === 0) break; - } - - return { totalDependents: visited.size - 1, levels }; -} - // ─── Phase 4.4 fields (graceful null fallback) ───────────────────── function readPhase44(db, nodeId) { @@ -147,7 +119,7 @@ export function auditData(target, customDbPath, opts = {}) { // 2. Open DB for enrichment const db = openReadonlyOrFail(customDbPath); - const thresholds = resolveThresholds(customDbPath); + const thresholds = resolveThresholds(customDbPath, opts.config); let functions; try { @@ -189,7 +161,7 @@ function enrichFunction(db, r, noTests, maxDepth, thresholds) { const nodeId = nodeRow?.id; const health = nodeId ? buildHealth(db, nodeId, thresholds) : defaultHealth(); const impact = nodeId - ? computeImpact(db, nodeId, noTests, maxDepth) + ? bfsTransitiveCallers(db, nodeId, { noTests, maxDepth }) : { totalDependents: 0, levels: {} }; const phase44 = nodeId ? readPhase44(db, nodeId) @@ -260,7 +232,7 @@ function enrichSymbol(db, sym, file, noTests, maxDepth, thresholds) { const health = nodeId ? buildHealth(db, nodeId, thresholds) : defaultHealth(); const impact = nodeId - ? computeImpact(db, nodeId, noTests, maxDepth) + ? bfsTransitiveCallers(db, nodeId, { noTests, maxDepth }) : { totalDependents: 0, levels: {} }; const phase44 = nodeId ? readPhase44(db, nodeId) diff --git a/src/features/check.js b/src/features/check.js index f3de3b78..4b71df2d 100644 --- a/src/features/check.js +++ b/src/features/check.js @@ -2,6 +2,7 @@ import { execFileSync } from 'node:child_process'; import fs from 'node:fs'; import path from 'node:path'; import { findDbPath, openReadonlyOrFail } from '../db/index.js'; +import { bfsTransitiveCallers } from '../domain/analysis/impact.js'; import { findCycles } from '../domain/graph/cycles.js'; import { loadConfig } from '../infrastructure/config.js'; import { isTestFile } from '../infrastructure/test-filter.js'; @@ -96,31 +97,10 @@ export function checkMaxBlastRadius(db, changedRanges, threshold, noTests, maxDe } if (!overlaps) continue; - // BFS transitive callers - const visited = new Set([def.id]); - let frontier = [def.id]; - let totalCallers = 0; - for (let d = 1; d <= maxDepth; d++) { - const nextFrontier = []; - for (const fid of frontier) { - const callers = db - .prepare( - `SELECT DISTINCT n.id, n.name, n.kind, n.file, n.line - FROM edges e JOIN nodes n ON e.source_id = n.id - WHERE e.target_id = ? AND e.kind = 'calls'`, - ) - .all(fid); - for (const c of callers) { - if (!visited.has(c.id) && (!noTests || !isTestFile(c.file))) { - visited.add(c.id); - nextFrontier.push(c.id); - totalCallers++; - } - } - } - frontier = nextFrontier; - if (frontier.length === 0) break; - } + const { totalDependents: totalCallers } = bfsTransitiveCallers(db, def.id, { + noTests, + maxDepth, + }); if (totalCallers > maxFound) maxFound = totalCallers; if (totalCallers > threshold) { @@ -240,7 +220,10 @@ export function checkData(customDbPath, opts = {}) { const maxDepth = opts.depth || 3; // Load config defaults for check predicates - const config = loadConfig(repoRoot); + // NOTE: opts.config is loaded from process.cwd() at startup (via CLI context), + // which may differ from the DB's parent repo root when --db points to an external + // project. This is an acceptable trade-off to avoid duplicate I/O on the hot path. + const config = opts.config || loadConfig(repoRoot); const checkConfig = config.check || {}; // Resolve which predicates are enabled: CLI flags ?? config ?? built-in defaults diff --git a/src/features/complexity.js b/src/features/complexity.js index 4e4cf35d..c5cdf62e 100644 --- a/src/features/complexity.js +++ b/src/features/complexity.js @@ -524,7 +524,7 @@ export function complexityData(customDbPath, opts = {}) { const kindFilter = opts.kind || null; // Load thresholds from config - const config = loadConfig(process.cwd()); + const config = opts.config || loadConfig(process.cwd()); const thresholds = config.manifesto?.rules || { cognitive: { warn: 15, fail: null }, cyclomatic: { warn: 10, fail: null }, diff --git a/src/features/manifesto.js b/src/features/manifesto.js index 3113122a..edae49e4 100644 --- a/src/features/manifesto.js +++ b/src/features/manifesto.js @@ -395,7 +395,7 @@ export function manifestoData(customDbPath, opts = {}) { const db = openReadonlyOrFail(customDbPath); try { - const config = loadConfig(process.cwd()); + const config = opts.config || loadConfig(process.cwd()); const rules = resolveRules(config.manifesto?.rules); const violations = []; diff --git a/src/graph/builders/dependency.js b/src/graph/builders/dependency.js index 633b4147..7024f0db 100644 --- a/src/graph/builders/dependency.js +++ b/src/graph/builders/dependency.js @@ -79,8 +79,14 @@ function buildFunctionLevelGraph(dbOrRepo, noTests, minConfidence) { let edges; if (minConfidence != null) { if (isRepo) { - // minConfidence filtering not supported by Repository — fall back to getCallEdges - edges = dbOrRepo.getCallEdges(); + // Trade-off: Repository.getCallEdges() returns all call edges, so we + // filter in JS. This is O(all call edges) rather than the SQL path's + // indexed WHERE clause. Acceptable for current data sizes; a dedicated + // getCallEdgesByMinConfidence(threshold) method on the Repository + // interface would be the proper fix if this becomes a bottleneck. + edges = dbOrRepo + .getCallEdges() + .filter((e) => e.confidence != null && e.confidence >= minConfidence); } else { edges = dbOrRepo .prepare("SELECT source_id, target_id FROM edges WHERE kind = 'calls' AND confidence >= ?") diff --git a/tests/graph/builders/dependency.test.js b/tests/graph/builders/dependency.test.js index 6afb68de..43fc69d4 100644 --- a/tests/graph/builders/dependency.test.js +++ b/tests/graph/builders/dependency.test.js @@ -1,7 +1,9 @@ import Database from 'better-sqlite3'; import { describe, expect, it } from 'vitest'; import { initSchema } from '../../../src/db/index.js'; +import { InMemoryRepository } from '../../../src/db/repository/in-memory-repository.js'; import { buildDependencyGraph } from '../../../src/graph/builders/dependency.js'; +import { createTestRepo } from '../../helpers/fixtures.js'; function createTestDb() { const db = new Database(':memory:'); @@ -92,3 +94,82 @@ describe('buildDependencyGraph — function-level', () => { db.close(); }); }); + +// ── InMemoryRepository dispatch path ──────────────────────────────────────── + +describe('buildDependencyGraph — file-level via InMemoryRepository', () => { + it('builds graph from file nodes and import edges', () => { + const { repo, ids } = createTestRepo() + .file('a.js') + .file('b.js') + .file('c.js') + .imports('a.js', 'b.js') + .imports('b.js', 'c.js') + .build(); + + const graph = buildDependencyGraph(repo); + expect(graph.nodeCount).toBe(3); + expect(graph.edgeCount).toBe(2); + expect(graph.hasEdge(String(ids.get('a.js')), String(ids.get('b.js')))).toBe(true); + expect(graph.hasEdge(String(ids.get('b.js')), String(ids.get('c.js')))).toBe(true); + }); + + it('excludes test files when noTests is set', () => { + const { repo } = createTestRepo() + .file('src/a.js') + .file('tests/a.test.js') + .imports('tests/a.test.js', 'src/a.js') + .build(); + + const graph = buildDependencyGraph(repo, { noTests: true }); + expect(graph.nodeCount).toBe(1); + }); + + it('skips self-loops', () => { + const repo = new InMemoryRepository(); + const a = repo.addNode({ name: 'a.js', kind: 'file', file: 'a.js', line: 0 }); + repo.addEdge({ source_id: a, target_id: a, kind: 'imports' }); + + const graph = buildDependencyGraph(repo); + expect(graph.edgeCount).toBe(0); + }); +}); + +describe('buildDependencyGraph — function-level via InMemoryRepository', () => { + it('builds graph from callable nodes and call edges', () => { + const { repo, ids } = createTestRepo() + .fn('foo', 'a.js', 5) + .fn('bar', 'b.js', 10) + .calls('foo', 'bar') + .build(); + + const graph = buildDependencyGraph(repo, { fileLevel: false }); + expect(graph.nodeCount).toBe(2); + expect(graph.edgeCount).toBe(1); + expect(graph.hasEdge(String(ids.get('foo')), String(ids.get('bar')))).toBe(true); + }); + + it('respects minConfidence filter', () => { + const repo = new InMemoryRepository(); + const fn1 = repo.addNode({ name: 'foo', kind: 'function', file: 'a.js', line: 5 }); + const fn2 = repo.addNode({ name: 'bar', kind: 'function', file: 'b.js', line: 10 }); + const fn3 = repo.addNode({ name: 'baz', kind: 'function', file: 'c.js', line: 15 }); + repo.addEdge({ source_id: fn1, target_id: fn2, kind: 'calls', confidence: 0.9 }); + repo.addEdge({ source_id: fn1, target_id: fn3, kind: 'calls', confidence: 0.3 }); + + const graph = buildDependencyGraph(repo, { fileLevel: false, minConfidence: 0.5 }); + expect(graph.edgeCount).toBe(1); + expect(graph.hasEdge(String(fn1), String(fn2))).toBe(true); + expect(graph.hasEdge(String(fn1), String(fn3))).toBe(false); + }); + + it('returns all call edges when minConfidence is omitted', () => { + const repo = new InMemoryRepository(); + const fn1 = repo.addNode({ name: 'foo', kind: 'function', file: 'a.js', line: 5 }); + const fn2 = repo.addNode({ name: 'bar', kind: 'function', file: 'b.js', line: 10 }); + repo.addEdge({ source_id: fn1, target_id: fn2, kind: 'calls', confidence: 0.1 }); + + const graph = buildDependencyGraph(repo, { fileLevel: false }); + expect(graph.edgeCount).toBe(1); + }); +});