diff --git a/e2e/react-start/import-protection/.gitignore b/e2e/react-start/import-protection/.gitignore
index 62972dee371..19519d5615b 100644
--- a/e2e/react-start/import-protection/.gitignore
+++ b/e2e/react-start/import-protection/.gitignore
@@ -26,3 +26,5 @@ port-*.txt
.tanstack
webserver-*.log
+error-build-result.json
+error-build.log
diff --git a/e2e/react-start/import-protection/src/routes/__root.tsx b/e2e/react-start/import-protection/src/routes/__root.tsx
index 3b35826c674..ebdf3be21f7 100644
--- a/e2e/react-start/import-protection/src/routes/__root.tsx
+++ b/e2e/react-start/import-protection/src/routes/__root.tsx
@@ -32,6 +32,8 @@ function RootComponent() {
Client-Only Violations
{' | '}
Client-Only JSX
+ {' | '}
+ Beforeload Leak
diff --git a/e2e/react-start/import-protection/src/routes/beforeload-leak.tsx b/e2e/react-start/import-protection/src/routes/beforeload-leak.tsx
new file mode 100644
index 00000000000..fd85225d5de
--- /dev/null
+++ b/e2e/react-start/import-protection/src/routes/beforeload-leak.tsx
@@ -0,0 +1,24 @@
+import { createFileRoute } from '@tanstack/react-router'
+import { getSessionFromRequest } from '../violations/beforeload-server-leak'
+
+export const Route = createFileRoute('/beforeload-leak')({
+ // beforeLoad is NOT stripped by the compiler on the client side.
+ // It is not in splitRouteIdentNodes or deleteNodes, so this import
+ // chain survives: beforeload-leak.tsx -> beforeload-server-leak.ts
+ // -> @tanstack/react-start/server
+ // This is a TRUE POSITIVE violation in the client environment.
+ beforeLoad: () => {
+ const session = getSessionFromRequest()
+ return { session }
+ },
+ component: BeforeloadLeakRoute,
+})
+
+function BeforeloadLeakRoute() {
+ return (
+
+
Beforeload Leak
+
Route loaded
+
+ )
+}
diff --git a/e2e/react-start/import-protection/src/routes/index.tsx b/e2e/react-start/import-protection/src/routes/index.tsx
index 801b8faf9f5..c05ed1a7e36 100644
--- a/e2e/react-start/import-protection/src/routes/index.tsx
+++ b/e2e/react-start/import-protection/src/routes/index.tsx
@@ -13,6 +13,12 @@ import {
safeServerFn,
safeServerOnly,
} from '../violations/boundary-safe'
+import {
+ crossBoundarySafeServerFn,
+ crossBoundarySafeWithAuth,
+} from '../violations/cross-boundary-safe/usage'
+import { safeFn } from '../violations/cross-boundary-leak/safe-consumer'
+import { leakyGetSharedData } from '../violations/cross-boundary-leak/leaky-consumer'
export const Route = createFileRoute('/')({
component: Home,
@@ -31,6 +37,16 @@ function Home() {
{String(typeof safeServerOnly)}
{String(typeof safeServerFn)}
{String(typeof safeIsomorphic)}
+
+ {String(typeof crossBoundarySafeServerFn)}
+
+
+ {String(typeof crossBoundarySafeWithAuth)}
+
+ {String(typeof safeFn)}
+
+ {String(typeof leakyGetSharedData)}
+
)
}
diff --git a/e2e/react-start/import-protection/src/violations/beforeload-server-leak.ts b/e2e/react-start/import-protection/src/violations/beforeload-server-leak.ts
new file mode 100644
index 00000000000..5735a5eace8
--- /dev/null
+++ b/e2e/react-start/import-protection/src/violations/beforeload-server-leak.ts
@@ -0,0 +1,10 @@
+import { getRequest } from '@tanstack/react-start/server'
+
+// This utility wraps a denied server import and is used in a route's
+// `beforeLoad` hook. `beforeLoad` is NOT in the compiler's
+// splitRouteIdentNodes or deleteNodes lists, so it survives on the client.
+// Using this module in `beforeLoad` is therefore a TRUE POSITIVE violation.
+export function getSessionFromRequest() {
+ const req = getRequest()
+ return { sessionId: req.headers.get('x-session-id') }
+}
diff --git a/e2e/react-start/import-protection/src/violations/cross-boundary-leak/leaky-consumer.ts b/e2e/react-start/import-protection/src/violations/cross-boundary-leak/leaky-consumer.ts
new file mode 100644
index 00000000000..6628705f4f9
--- /dev/null
+++ b/e2e/react-start/import-protection/src/violations/cross-boundary-leak/leaky-consumer.ts
@@ -0,0 +1,9 @@
+import { getSharedData } from './shared-util'
+
+// Leaky: uses the shared utility OUTSIDE any compiler boundary.
+// This must still trigger a violation in the client environment
+// even if safe-consumer.ts already loaded shared-util.ts via
+// a fetchModule chain that silenced its resolveId.
+export function leakyGetSharedData() {
+ return getSharedData()
+}
diff --git a/e2e/react-start/import-protection/src/violations/cross-boundary-leak/safe-consumer.ts b/e2e/react-start/import-protection/src/violations/cross-boundary-leak/safe-consumer.ts
new file mode 100644
index 00000000000..dd22f25beec
--- /dev/null
+++ b/e2e/react-start/import-protection/src/violations/cross-boundary-leak/safe-consumer.ts
@@ -0,0 +1,9 @@
+import { createServerFn } from '@tanstack/react-start'
+import { getSharedData } from './shared-util'
+
+// Safe: uses the shared utility ONLY inside a compiler boundary.
+// The compiler strips this from the client; fetchModule adds shared-util.ts
+// to the serverFnLookupModules set.
+export const safeFn = createServerFn().handler(async () => {
+ return getSharedData()
+})
diff --git a/e2e/react-start/import-protection/src/violations/cross-boundary-leak/shared-util.ts b/e2e/react-start/import-protection/src/violations/cross-boundary-leak/shared-util.ts
new file mode 100644
index 00000000000..01410a851d5
--- /dev/null
+++ b/e2e/react-start/import-protection/src/violations/cross-boundary-leak/shared-util.ts
@@ -0,0 +1,10 @@
+import { getRequest } from '@tanstack/react-start/server'
+
+// Utility that wraps a denied server import. It is consumed by BOTH a
+// safe consumer (inside compiler boundaries) AND a leaky consumer (outside
+// any boundary). The leaky consumer must still trigger a violation even
+// if the safe consumer's fetchModule chain silences the initial resolve.
+export function getSharedData() {
+ const req = getRequest()
+ return { method: req.method }
+}
diff --git a/e2e/react-start/import-protection/src/violations/cross-boundary-safe/auth-wrapper.ts b/e2e/react-start/import-protection/src/violations/cross-boundary-safe/auth-wrapper.ts
new file mode 100644
index 00000000000..a5e06c0505e
--- /dev/null
+++ b/e2e/react-start/import-protection/src/violations/cross-boundary-safe/auth-wrapper.ts
@@ -0,0 +1,15 @@
+import { createMiddleware, createServerFn } from '@tanstack/react-start'
+import { getSessionData } from './session-util'
+
+// This middleware uses the session utility inside a compiler boundary.
+// The compiler should strip the import of session-util from the client.
+const authMiddleware = createMiddleware({ type: 'function' }).server(
+ ({ next }) => {
+ const data = getSessionData()
+ return next({ context: { session: data } })
+ },
+)
+
+// Exports a pre-configured server fn with the middleware attached.
+// This mirrors the real-world `createAuthServerFn` pattern.
+export const createAuthServerFn = createServerFn().middleware([authMiddleware])
diff --git a/e2e/react-start/import-protection/src/violations/cross-boundary-safe/session-util.ts b/e2e/react-start/import-protection/src/violations/cross-boundary-safe/session-util.ts
new file mode 100644
index 00000000000..ab44237539e
--- /dev/null
+++ b/e2e/react-start/import-protection/src/violations/cross-boundary-safe/session-util.ts
@@ -0,0 +1,13 @@
+import { getRequest } from '@tanstack/react-start/server'
+
+// This utility wraps a denied server import but does NOT contain any
+// compiler boundaries. All consumers use it ONLY inside compiler
+// boundaries (createServerFn().handler, createMiddleware().server, etc.)
+// so the compiler should prune this import from the client bundle.
+//
+// This mirrors the real-world pattern of a session utility that wraps
+// `useSession` from `@tanstack/react-start/server`.
+export function getSessionData() {
+ const req = getRequest()
+ return { method: req.method }
+}
diff --git a/e2e/react-start/import-protection/src/violations/cross-boundary-safe/usage.ts b/e2e/react-start/import-protection/src/violations/cross-boundary-safe/usage.ts
new file mode 100644
index 00000000000..8a10dac96af
--- /dev/null
+++ b/e2e/react-start/import-protection/src/violations/cross-boundary-safe/usage.ts
@@ -0,0 +1,18 @@
+import { createServerFn } from '@tanstack/react-start'
+import { getSessionData } from './session-util'
+import { createAuthServerFn } from './auth-wrapper'
+
+// Pattern 1: Direct use of session utility inside a server fn handler.
+// This mirrors login.tsx importing useAppSession and using it in
+// createServerFn().handler().
+export const crossBoundarySafeServerFn = createServerFn().handler(async () => {
+ return getSessionData()
+})
+
+// Pattern 2: Using the pre-configured server fn from auth-wrapper.
+// This mirrors user.tsx importing createAuthServerFn().handler().
+export const crossBoundarySafeWithAuth = createAuthServerFn().handler(
+ async () => {
+ return { ok: true }
+ },
+)
diff --git a/e2e/react-start/import-protection/tests/error-mode.setup.ts b/e2e/react-start/import-protection/tests/error-mode.setup.ts
index c581aad8ec9..105ac304314 100644
--- a/e2e/react-start/import-protection/tests/error-mode.setup.ts
+++ b/e2e/react-start/import-protection/tests/error-mode.setup.ts
@@ -1,24 +1,71 @@
import fs from 'node:fs'
import path from 'node:path'
-import { execSync } from 'node:child_process'
+import { execSync, spawn } from 'node:child_process'
+import { chromium } from '@playwright/test'
+import { getTestServerPort } from '@tanstack/router-e2e-utils'
+import packageJson from '../package.json' with { type: 'json' }
import type { FullConfig } from '@playwright/test'
/**
* Global setup for error-mode E2E tests.
*
- * Runs `BEHAVIOR=error pnpm build` and captures the output + exit code.
- * The build is *expected* to fail because `behavior: 'error'` causes the
- * import-protection plugin to call `this.error()` on the first violation,
- * which aborts the Vite/Rollup build with a non-zero exit code.
+ * 1. Runs `BEHAVIOR=error pnpm build` — expected to fail because the plugin
+ * calls `this.error()` on the first violation, aborting the Rollup build.
+ * Output is written to `error-build-result.json`.
*
- * Results are written to `error-build-result.json` for the spec to read.
+ * 2. Starts a dev server with `BEHAVIOR=error`, navigates all violation
+ * routes, then captures the server log. In dev mode `this.error()` causes
+ * a module-level 500 (the server stays up). Output is written to
+ * `error-dev-result.json`.
*/
-export default async function globalSetup(config: FullConfig) {
- void config
- const cwd = path.resolve(import.meta.dirname, '..')
- const outFile = path.resolve(cwd, 'error-build-result.json')
- // Clean up from previous runs.
+async function waitForHttpOk(url: string, timeoutMs: number): Promise {
+ const start = Date.now()
+ // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
+ while (true) {
+ if (Date.now() - start > timeoutMs) {
+ throw new Error(`Timed out waiting for ${url}`)
+ }
+ try {
+ const res = await fetch(url, { signal: AbortSignal.timeout(1000) })
+ if (res.ok) return
+ } catch {
+ // ignore
+ }
+ await new Promise((r) => setTimeout(r, 200))
+ }
+}
+
+async function killChild(child: ReturnType): Promise {
+ if (child.exitCode !== null || child.killed) return
+ await new Promise((resolve) => {
+ let resolved = false
+ const done = () => {
+ if (resolved) return
+ resolved = true
+ resolve()
+ }
+ child.once('exit', done)
+ child.once('error', done)
+ try {
+ child.kill('SIGTERM')
+ } catch {
+ done()
+ return
+ }
+ setTimeout(() => {
+ try {
+ child.kill('SIGKILL')
+ } catch {
+ // ignore
+ }
+ setTimeout(done, 500)
+ }, 3000)
+ })
+}
+
+function captureBuild(cwd: string): void {
+ const outFile = path.resolve(cwd, 'error-build-result.json')
for (const f of ['error-build-result.json', 'error-build.log']) {
const p = path.resolve(cwd, f)
if (fs.existsSync(p)) fs.unlinkSync(p)
@@ -31,38 +78,99 @@ export default async function globalSetup(config: FullConfig) {
try {
const output = execSync('pnpm build', {
cwd,
- env: {
- ...process.env,
- BEHAVIOR: 'error',
- },
+ env: { ...process.env, BEHAVIOR: 'error' },
encoding: 'utf-8',
stdio: ['ignore', 'pipe', 'pipe'],
timeout: 120_000,
})
stdout = output
} catch (err: any) {
- // execSync throws on non-zero exit code — this is the *expected* path.
exitCode = err.status ?? 1
stdout = err.stdout ?? ''
stderr = err.stderr ?? ''
}
const combined = `${stdout}\n${stderr}`
-
- // Persist the log for debugging.
fs.writeFileSync(path.resolve(cwd, 'error-build.log'), combined)
-
fs.writeFileSync(
outFile,
- JSON.stringify(
- {
- exitCode,
- stdout,
- stderr,
- combined,
- },
- null,
- 2,
- ),
+ JSON.stringify({ exitCode, stdout, stderr, combined }, null, 2),
)
}
+
+const routes = [
+ '/',
+ '/leaky-server-import',
+ '/client-only-violations',
+ '/client-only-jsx',
+ '/beforeload-leak',
+]
+
+async function captureDev(cwd: string): Promise {
+ const outFile = path.resolve(cwd, 'error-dev-result.json')
+ for (const f of ['error-dev-result.json', 'error-dev.log']) {
+ const p = path.resolve(cwd, f)
+ if (fs.existsSync(p)) fs.unlinkSync(p)
+ }
+
+ const port = await getTestServerPort(`${packageJson.name}_error_dev`)
+ const baseURL = `http://localhost:${port}`
+ const logFile = path.resolve(cwd, 'error-dev.log')
+
+ const out = fs.createWriteStream(logFile)
+ const child = spawn('pnpm', ['exec', 'vite', 'dev', '--port', String(port)], {
+ cwd,
+ env: {
+ ...process.env,
+ BEHAVIOR: 'error',
+ PORT: String(port),
+ VITE_SERVER_PORT: String(port),
+ VITE_NODE_ENV: 'test',
+ },
+ stdio: ['ignore', 'pipe', 'pipe'],
+ })
+
+ child.stdout?.on('data', (d: Buffer) => out.write(d))
+ child.stderr?.on('data', (d: Buffer) => out.write(d))
+
+ try {
+ await waitForHttpOk(baseURL, 30_000)
+
+ const browser = await chromium.launch()
+ try {
+ const context = await browser.newContext()
+ const page = await context.newPage()
+ for (const route of routes) {
+ try {
+ await page.goto(`${baseURL}${route}`, {
+ waitUntil: 'networkidle',
+ timeout: 15_000,
+ })
+ } catch {
+ // expected — modules fail with 500 in error mode
+ }
+ }
+ await context.close()
+ } finally {
+ await browser.close()
+ }
+
+ await new Promise((r) => setTimeout(r, 750))
+ } finally {
+ await killChild(child)
+ await new Promise((resolve) => out.end(resolve))
+ }
+
+ const combined = fs.existsSync(logFile)
+ ? fs.readFileSync(logFile, 'utf-8')
+ : ''
+ fs.writeFileSync(outFile, JSON.stringify({ combined }, null, 2))
+}
+
+export default async function globalSetup(config: FullConfig) {
+ void config
+ const cwd = path.resolve(import.meta.dirname, '..')
+
+ captureBuild(cwd)
+ await captureDev(cwd)
+}
diff --git a/e2e/react-start/import-protection/tests/error-mode.spec.ts b/e2e/react-start/import-protection/tests/error-mode.spec.ts
index 501be493c79..7e99bb3ec92 100644
--- a/e2e/react-start/import-protection/tests/error-mode.spec.ts
+++ b/e2e/react-start/import-protection/tests/error-mode.spec.ts
@@ -4,93 +4,116 @@ import { test } from '@tanstack/router-e2e-utils'
import { extractViolationsFromLog, stripAnsi } from './violations.utils'
-interface ErrorBuildResult {
- exitCode: number
- stdout: string
- stderr: string
+interface ErrorResult {
+ exitCode?: number
+ stdout?: string
+ stderr?: string
combined: string
}
-async function readBuildResult(): Promise {
- const resultPath = path.resolve(
- import.meta.dirname,
- '..',
- 'error-build-result.json',
- )
- const mod = await import(resultPath, {
+async function readResult(name: string): Promise {
+ const p = path.resolve(import.meta.dirname, '..', name)
+ const mod: { default: ErrorResult } = await import(p, {
with: { type: 'json' },
- } as any)
- return mod.default as ErrorBuildResult
+ })
+ return mod.default
}
-// ---------------------------------------------------------------------------
-// Error-mode E2E tests
-//
-// When `behavior: 'error'`, the import-protection plugin calls `this.error()`
-// on the first violation it encounters, which causes the Vite/Rollup build to
-// abort with a non-zero exit code. These tests verify that behavior.
-// ---------------------------------------------------------------------------
+// Build error mode tests
test('build fails with non-zero exit code in error mode', async () => {
- const result = await readBuildResult()
+ const result = await readResult('error-build-result.json')
expect(result.exitCode).not.toBe(0)
})
test('build output contains import-protection violation', async () => {
- const result = await readBuildResult()
+ const result = await readResult('error-build-result.json')
const text = stripAnsi(result.combined)
-
- // The error output must contain the structured violation header
expect(text).toContain('[import-protection] Import denied in')
})
-test('violation mentions the environment (client or ssr)', async () => {
- const result = await readBuildResult()
+test('build violation mentions environment', async () => {
+ const result = await readResult('error-build-result.json')
const text = stripAnsi(result.combined)
-
- // At least one of the violation environments should appear
const hasClient = text.includes('Import denied in client environment')
const hasServer = text.includes('Import denied in server environment')
expect(hasClient || hasServer).toBe(true)
})
-test('violation includes importer and specifier details', async () => {
- const result = await readBuildResult()
+test('build violation includes importer and specifier', async () => {
+ const result = await readResult('error-build-result.json')
const text = stripAnsi(result.combined)
-
expect(text).toContain('Importer:')
expect(text).toContain('Import:')
})
-test('violation includes denial reason', async () => {
- const result = await readBuildResult()
+test('build violation includes denial reason', async () => {
+ const result = await readResult('error-build-result.json')
const text = stripAnsi(result.combined)
-
- // Must include one of the denial reason types
const hasFilePattern = text.includes('Denied by file pattern')
const hasSpecifierPattern = text.includes('Denied by specifier pattern')
const hasMarker = text.includes('Denied by marker')
expect(hasFilePattern || hasSpecifierPattern || hasMarker).toBe(true)
})
-test('violation includes trace information', async () => {
- const result = await readBuildResult()
+test('build violation includes trace', async () => {
+ const result = await readResult('error-build-result.json')
const text = stripAnsi(result.combined)
-
expect(text).toContain('Trace:')
- // The trace should have at least one numbered step
expect(text).toMatch(/\d+\.\s+\S+/)
})
-test('violation is parseable by extractViolationsFromLog', async () => {
- const result = await readBuildResult()
+test('build violation is parseable', async () => {
+ const result = await readResult('error-build-result.json')
const violations = extractViolationsFromLog(result.combined)
+ expect(violations.length).toBeGreaterThanOrEqual(1)
- // In error mode the build aborts after the first violation, so we expect
- // exactly one violation to be logged.
+ const v = violations[0]
+ expect(v.envType).toMatch(/^(client|server)$/)
+ expect(v.importer).toBeTruthy()
+ expect(v.specifier).toBeTruthy()
+ expect(v.type).toMatch(/^(file|specifier|marker)$/)
+ expect(v.trace.length).toBeGreaterThanOrEqual(1)
+})
+
+// Dev error mode tests
+
+test('dev server logs contain import-protection error', async () => {
+ const result = await readResult('error-dev-result.json')
+ const text = stripAnsi(result.combined)
+ expect(text).toContain('[import-protection] Import denied in')
+})
+
+test('dev error violation mentions environment', async () => {
+ const result = await readResult('error-dev-result.json')
+ const text = stripAnsi(result.combined)
+ const hasClient = text.includes('Import denied in client environment')
+ const hasServer = text.includes('Import denied in server environment')
+ expect(hasClient || hasServer).toBe(true)
+})
+
+test('dev error violation includes importer and specifier', async () => {
+ const result = await readResult('error-dev-result.json')
+ const text = stripAnsi(result.combined)
+ expect(text).toContain('Importer:')
+ expect(text).toContain('Import:')
+})
+
+test('dev error violation includes denial reason', async () => {
+ const result = await readResult('error-dev-result.json')
+ const text = stripAnsi(result.combined)
+ const hasFilePattern = text.includes('Denied by file pattern')
+ const hasSpecifierPattern = text.includes('Denied by specifier pattern')
+ const hasMarker = text.includes('Denied by marker')
+ expect(hasFilePattern || hasSpecifierPattern || hasMarker).toBe(true)
+})
+
+test('dev error violation is parseable', async () => {
+ const result = await readResult('error-dev-result.json')
+ const violations = extractViolationsFromLog(result.combined)
expect(violations.length).toBeGreaterThanOrEqual(1)
- const v = violations[0]!
+ const v = violations[0]
expect(v.envType).toMatch(/^(client|server)$/)
expect(v.importer).toBeTruthy()
expect(v.specifier).toBeTruthy()
diff --git a/e2e/react-start/import-protection/tests/import-protection.spec.ts b/e2e/react-start/import-protection/tests/import-protection.spec.ts
index 8bc40e1e9a9..57738b2952c 100644
--- a/e2e/react-start/import-protection/tests/import-protection.spec.ts
+++ b/e2e/react-start/import-protection/tests/import-protection.spec.ts
@@ -1,31 +1,10 @@
import path from 'node:path'
import { expect } from '@playwright/test'
import { test } from '@tanstack/router-e2e-utils'
-
-interface TraceStep {
- file: string
- specifier?: string
- line?: number
- column?: number
-}
-
-interface CodeSnippet {
- lines: Array
- location?: string
-}
-
-interface Violation {
- type: string
- specifier: string
- importer: string
- resolved?: string
- trace: Array
- snippet?: CodeSnippet
- envType?: string
-}
+import type { Violation } from './violations.utils'
async function readViolations(
- type: 'build' | 'dev',
+ type: 'build' | 'dev' | 'dev.cold' | 'dev.warm',
): Promise> {
const filename = `violations.${type}.json`
const violationsPath = path.resolve(import.meta.dirname, '..', filename)
@@ -65,15 +44,12 @@ test('client-only violations route loads in mock mode', async ({ page }) => {
)
})
-test('violations.build.json is written during build', async () => {
- const violations = await readViolations('build')
- expect(violations.length).toBeGreaterThan(0)
-})
-
-test('violations.dev.json is written during dev', async () => {
- const violations = await readViolations('dev')
- expect(violations.length).toBeGreaterThan(0)
-})
+for (const mode of ['build', 'dev'] as const) {
+ test(`violations.${mode}.json is written during ${mode}`, async () => {
+ const violations = await readViolations(mode)
+ expect(violations.length).toBeGreaterThan(0)
+ })
+}
test('file-based violation: client importing .server. file', async () => {
const violations = await readViolations('build')
@@ -108,22 +84,22 @@ test('marker violation: client importing server-only marked module', async () =>
expect(markerViolation).toBeDefined()
})
-test('violations contain trace information', async () => {
- const violations = await readViolations('build')
-
- // File-based violation should have trace info
- const fileViolation = violations.find(
- (v) =>
- v.type === 'file' &&
- (v.specifier.includes('secret.server') ||
- v.resolved?.includes('secret.server')),
- )
-
- expect(fileViolation).toBeDefined()
- // The trace should show the import chain
- expect(fileViolation!.trace).toBeDefined()
- expect(fileViolation!.trace.length).toBeGreaterThanOrEqual(2)
-})
+for (const mode of ['build', 'dev'] as const) {
+ test(`violations contain trace information in ${mode}`, async () => {
+ const violations = await readViolations(mode)
+
+ const fileViolation = violations.find(
+ (v) =>
+ v.type === 'file' &&
+ (v.specifier.includes('secret.server') ||
+ v.resolved?.includes('secret.server')),
+ )
+
+ expect(fileViolation).toBeDefined()
+ expect(fileViolation!.trace).toBeDefined()
+ expect(fileViolation!.trace.length).toBeGreaterThanOrEqual(2)
+ })
+}
test('deep trace includes full chain', async () => {
const violations = await readViolations('build')
@@ -140,171 +116,153 @@ test('deep trace includes full chain', async () => {
expect(traceText).toContain('violations/edge-3')
})
-test('all trace steps include line numbers', async () => {
- const violations = await readViolations('build')
-
- // Find a violation with a multi-step trace (the deep chain)
- const v = violations.find(
- (x) => x.type === 'file' && x.importer.includes('edge-3'),
- )
- expect(v).toBeDefined()
- expect(v!.trace.length).toBeGreaterThanOrEqual(3)
-
- // Every trace step (except possibly the entry) should have a line number.
- // The entry (step 0) may not have one if it has no specifier pointing into it.
- // All non-entry steps should have line numbers since they import something.
- for (let i = 1; i < v!.trace.length; i++) {
- const step = v!.trace[i]
- expect(
- step.line,
- `trace step ${i} (${step.file}) should have a line number`,
- ).toBeDefined()
- expect(step.line).toBeGreaterThan(0)
- }
-})
-
-test('leaf trace step includes the denied import specifier', async () => {
- const violations = await readViolations('build')
-
- const v = violations.find(
- (x) => x.type === 'file' && x.importer.includes('edge-a'),
- )
- expect(v).toBeDefined()
-
- // The last trace step should be the leaf (edge-a) and include the specifier
- const last = v!.trace[v!.trace.length - 1]
- expect(last.file).toContain('edge-a')
- expect(last.specifier).toContain('secret.server')
- expect(last.line).toBeDefined()
- expect(last.line).toBeGreaterThan(0)
-})
-
-test('violation includes code snippet showing offending usage', async () => {
- const violations = await readViolations('build')
-
- // File violation for edge-a should have a code snippet
- const v = violations.find(
- (x) => x.type === 'file' && x.importer.includes('edge-a'),
- )
- expect(v).toBeDefined()
- expect(v!.snippet).toBeDefined()
- expect(v!.snippet!.lines.length).toBeGreaterThan(0)
-
- // The snippet should contain the usage site of the denied import's binding.
- // The post-compile usage finder locates where `getSecret` is called (line 9),
- // which is more useful than pointing at the import statement itself.
- const snippetText = v!.snippet!.lines.join('\n')
- expect(snippetText).toContain('getSecret')
-
- // The snippet location should be a clickable file:line:col reference
- if (v!.snippet!.location) {
- expect(v!.snippet!.location).toMatch(/:\d+:\d+/)
- }
-})
-
-test('compiler leak violation includes line/col in importer', async () => {
- const violations = await readViolations('build')
- const v = violations.find(
- (x) => x.importer.includes('compiler-leak') && x.type === 'file',
- )
- expect(v).toBeDefined()
-
- // Should be clickable-ish: path:line:col
- expect(v!.importer).toMatch(/:\d+:\d+$/)
-})
-
-test('leaky @tanstack/react-start/server import points to usage site', async () => {
- const violations = await readViolations('build')
- const v = violations.find(
- (x) =>
- x.type === 'specifier' && x.specifier === '@tanstack/react-start/server',
- )
- expect(v).toBeDefined()
-
- // Importer should include a mapped location.
- expect(v!.importer).toContain('violations/leaky-server-import')
- expect(v!.importer).toMatch(/:\d+:\d+$/)
-})
-
-test('all client-env violations are in the client environment', async () => {
- const violations = await readViolations('build')
-
- // Server-only violations (client env importing server stuff)
- const clientViolations = violations.filter((v) => v.envType === 'client')
- expect(clientViolations.length).toBeGreaterThanOrEqual(2)
-})
-
-test('dev violations include client environment violations', async () => {
- const violations = await readViolations('dev')
- expect(violations.length).toBeGreaterThan(0)
- const clientViolations = violations.filter((v) => v.envType === 'client')
- expect(clientViolations.length).toBeGreaterThanOrEqual(1)
-})
-
-test('dev violations include code snippets', async () => {
- const violations = await readViolations('dev')
-
- // Find a file-based client violation (e.g. compiler-leak or edge-a importing secret.server)
- const fileViolation = violations.find(
- (v) =>
- v.type === 'file' &&
- v.envType === 'client' &&
- (v.specifier.includes('secret.server') ||
- v.resolved?.includes('secret.server')),
- )
-
- expect(fileViolation).toBeDefined()
- expect(fileViolation!.snippet).toBeDefined()
- expect(fileViolation!.snippet!.lines.length).toBeGreaterThan(0)
+for (const mode of ['build', 'dev'] as const) {
+ test(`all trace steps include line numbers in ${mode}`, async () => {
+ const violations = await readViolations(mode)
+
+ // Find a violation with a multi-step trace
+ const v = violations.find((x) => x.type === 'file' && x.trace.length >= 3)
+ expect(v).toBeDefined()
+
+ // Every non-entry trace step should have a line number, except:
+ // - Virtual specifiers (e.g. ?tsr-split=) injected by the router plugin
+ // - routeTree.gen.ts steps (generated code, import locations unreliable)
+ // - Steps immediately after ?tsr-split= (the re-entry from the split chunk)
+ for (let i = 1; i < v!.trace.length; i++) {
+ const step = v!.trace[i]
+ if (step.specifier?.includes('?tsr-split=')) continue
+ if (step.file.includes('routeTree.gen')) continue
+ // In dev mode, the step right after a ?tsr-split= virtual step
+ // re-enters the same file — its import may not be locatable.
+ const prev = v!.trace[i - 1]
+ if (prev?.specifier?.includes('?tsr-split=')) continue
+
+ expect(
+ step.line,
+ `trace step ${i} (${step.file}) should have a line number`,
+ ).toBeDefined()
+ expect(step.line).toBeGreaterThan(0)
+ }
+ })
+}
- // The snippet should show original source (not transformed/compiled output)
- const snippetText = fileViolation!.snippet!.lines.join('\n')
- expect(snippetText).toContain('getSecret')
+for (const mode of ['build', 'dev'] as const) {
+ test(`leaf trace step includes the denied import specifier in ${mode}`, async () => {
+ const violations = await readViolations(mode)
+
+ const v = violations.find(
+ (x) =>
+ x.type === 'file' &&
+ x.envType === 'client' &&
+ (x.specifier.includes('secret.server') ||
+ x.resolved?.includes('secret.server')),
+ )
+ expect(v).toBeDefined()
+
+ const last = v!.trace[v!.trace.length - 1]
+ expect(last.specifier).toContain('secret.server')
+ expect(last.line).toBeDefined()
+ expect(last.line).toBeGreaterThan(0)
+ })
+}
- // The snippet location should be a clickable file:line:col reference
- if (fileViolation!.snippet!.location) {
- expect(fileViolation!.snippet!.location).toMatch(/:\d+:\d+/)
- }
-})
+for (const mode of ['build', 'dev'] as const) {
+ test(`violation includes code snippet showing offending usage in ${mode}`, async () => {
+ const violations = await readViolations(mode)
+
+ const v = violations.find(
+ (x) =>
+ x.type === 'file' &&
+ x.envType === 'client' &&
+ (x.specifier.includes('secret.server') ||
+ x.resolved?.includes('secret.server')),
+ )
+ expect(v).toBeDefined()
+ expect(v!.snippet).toBeDefined()
+ expect(v!.snippet!.lines.length).toBeGreaterThan(0)
+
+ const snippetText = v!.snippet!.lines.join('\n')
+ expect(snippetText).toContain('getSecret')
+
+ if (v!.snippet!.location) {
+ expect(v!.snippet!.location).toMatch(/:\d+:\d+/)
+ }
+ })
+}
-test('no violation for .server import used only inside compiler boundaries', async () => {
- const violations = await readViolations('build')
+for (const mode of ['build', 'dev'] as const) {
+ test(`compiler leak violation includes line/col in importer in ${mode}`, async () => {
+ const violations = await readViolations(mode)
+ const v = violations.find(
+ (x) => x.importer.includes('compiler-leak') && x.type === 'file',
+ )
+ expect(v).toBeDefined()
+ expect(v!.importer).toMatch(/:\d+:\d+$/)
+ })
+}
- // boundary-safe.ts imports secret.server.ts, but the import should be pruned
- // from the client build because it is only referenced inside compiler
- // boundaries (createServerFn/createServerOnlyFn/createIsomorphicFn).
- const safeHits = violations.filter(
- (v) =>
- v.envType === 'client' &&
- (v.importer.includes('boundary-safe') ||
- v.trace.some((s) => s.file.includes('boundary-safe'))),
- )
+for (const mode of ['build', 'dev'] as const) {
+ test(`leaky @tanstack/react-start/server import points to usage site in ${mode}`, async () => {
+ const violations = await readViolations(mode)
+ const v = violations.find(
+ (x) =>
+ x.type === 'specifier' &&
+ x.specifier === '@tanstack/react-start/server' &&
+ x.importer.includes('leaky-server-import'),
+ )
+ expect(v).toBeDefined()
+ expect(v!.importer).toContain('violations/leaky-server-import')
+ expect(v!.importer).toMatch(/:\d+:\d+$/)
+ })
+}
- expect(safeHits).toEqual([])
-})
+for (const mode of ['build', 'dev'] as const) {
+ test(`client-env violations exist in ${mode}`, async () => {
+ const violations = await readViolations(mode)
+ const clientViolations = violations.filter((v) => v.envType === 'client')
+ expect(clientViolations.length).toBeGreaterThanOrEqual(
+ mode === 'build' ? 2 : 1,
+ )
+ })
+}
-test('compiler-processed module has code snippet in dev', async () => {
- const violations = await readViolations('dev')
+for (const mode of ['build', 'dev'] as const) {
+ test(`no false positive for boundary-safe pattern in ${mode}`, async () => {
+ const violations = await readViolations(mode)
+
+ // boundary-safe.ts imports secret.server.ts but only uses it inside
+ // compiler boundaries (createServerFn/createServerOnlyFn/createIsomorphicFn).
+ const isBoundarySafe = (s: string) => /(?
+ v.envType === 'client' &&
+ (isBoundarySafe(v.importer) ||
+ v.trace.some((s) => isBoundarySafe(s.file))),
+ )
+
+ expect(safeHits).toEqual([])
+ })
+}
- // compiler-leak.ts is processed by the Start compiler (createServerFn),
- // which shortens the output. The snippet must still show the original
- // source lines (mapped via sourcesContent in the compiler's sourcemap).
- const compilerViolation = violations.find(
- (v) => v.envType === 'client' && v.importer.includes('compiler-leak'),
- )
+for (const mode of ['build', 'dev'] as const) {
+ test(`compiler-processed module has code snippet in ${mode}`, async () => {
+ const violations = await readViolations(mode)
- expect(compilerViolation).toBeDefined()
- expect(compilerViolation!.snippet).toBeDefined()
- expect(compilerViolation!.snippet!.lines.length).toBeGreaterThan(0)
+ // compiler-leak.ts is processed by the Start compiler (createServerFn),
+ // which shortens the output. The snippet must still show the original
+ // source lines (mapped via sourcesContent in the compiler's sourcemap).
+ const compilerViolation = violations.find(
+ (v) => v.envType === 'client' && v.importer.includes('compiler-leak'),
+ )
- // The snippet should contain the original source, not compiled output
- const snippetText = compilerViolation!.snippet!.lines.join('\n')
- expect(snippetText).toContain('getSecret')
-})
+ expect(compilerViolation).toBeDefined()
+ expect(compilerViolation!.snippet).toBeDefined()
+ expect(compilerViolation!.snippet!.lines.length).toBeGreaterThan(0)
-// ---------------------------------------------------------------------------
-// Client-only violations: server (SSR) importing client-only code
-// ---------------------------------------------------------------------------
+ const snippetText = compilerViolation!.snippet!.lines.join('\n')
+ expect(snippetText).toContain('getSecret')
+ })
+}
test('file-based violation: SSR importing .client. file', async () => {
const violations = await readViolations('build')
@@ -382,38 +340,161 @@ test('build has violations in both client and SSR environments', async () => {
expect(ssrViolations.length).toBeGreaterThanOrEqual(2)
})
-test('no false positive for factory-safe middleware pattern in dev', async () => {
- const violations = await readViolations('dev')
+for (const mode of ['build', 'dev'] as const) {
+ test(`no false positive for factory-safe middleware pattern in ${mode}`, async () => {
+ const violations = await readViolations(mode)
+
+ // createSecretFactory.ts uses @tanstack/react-start/server and ../secret.server
+ // ONLY inside createMiddleware().server() callbacks. The compiler strips these
+ // on the client, so import-protection must not fire for them.
+ const factoryHits = violations.filter(
+ (v) =>
+ v.envType === 'client' &&
+ (v.importer.includes('createSecretFactory') ||
+ v.importer.includes('factory-safe') ||
+ v.trace.some(
+ (s) =>
+ s.file.includes('createSecretFactory') ||
+ s.file.includes('factory-safe'),
+ )),
+ )
+
+ expect(factoryHits).toEqual([])
+ })
+}
+
+for (const mode of ['build', 'dev'] as const) {
+ test(`no false positive for cross-boundary-safe pattern in ${mode}`, async () => {
+ const violations = await readViolations(mode)
+
+ // session-util.ts imports @tanstack/react-start/server, but it's only ever
+ // imported by usage.ts which uses it exclusively inside compiler boundaries
+ // (createServerFn().handler, createMiddleware().server). The compiler should
+ // prune the import chain from the client build.
+ const crossHits = violations.filter(
+ (v) =>
+ v.envType === 'client' &&
+ (v.importer.includes('cross-boundary-safe') ||
+ v.importer.includes('session-util') ||
+ v.trace.some(
+ (s) =>
+ s.file.includes('cross-boundary-safe') ||
+ s.file.includes('session-util'),
+ )),
+ )
+
+ expect(crossHits).toEqual([])
+ })
+}
+
+for (const mode of ['build', 'dev'] as const) {
+ test(`cross-boundary-leak: leaky consumer still produces violation in ${mode}`, async () => {
+ const violations = await readViolations(mode)
+
+ const leakHits = violations.filter(
+ (v) =>
+ v.envType === 'client' &&
+ (v.importer.includes('cross-boundary-leak') ||
+ v.importer.includes('shared-util') ||
+ v.trace.some(
+ (s) =>
+ s.file.includes('leaky-consumer') ||
+ s.file.includes('shared-util'),
+ )),
+ )
+
+ expect(leakHits.length).toBeGreaterThanOrEqual(1)
+ })
+}
- // createSecretFactory.ts uses @tanstack/react-start/server and ../secret.server
- // ONLY inside createMiddleware().server() callbacks. The compiler strips these
- // on the client, so import-protection must not fire for them.
- const factoryHits = violations.filter(
+for (const mode of ['build', 'dev'] as const) {
+ test(`beforeload-leak: server import via beforeLoad triggers client violation in ${mode}`, async () => {
+ const violations = await readViolations(mode)
+
+ const hits = violations.filter(
+ (v) =>
+ v.envType === 'client' &&
+ (v.importer.includes('beforeload-server-leak') ||
+ v.importer.includes('beforeload-leak') ||
+ v.trace.some(
+ (s) =>
+ s.file.includes('beforeload-server-leak') ||
+ s.file.includes('beforeload-leak'),
+ )),
+ )
+
+ expect(hits.length).toBeGreaterThanOrEqual(1)
+
+ if (mode === 'build') {
+ const specHit = hits.find(
+ (v) =>
+ v.type === 'specifier' &&
+ v.specifier === '@tanstack/react-start/server',
+ )
+ expect(specHit).toBeDefined()
+ }
+ })
+}
+
+test('beforeload-leak: violation trace includes the route file', async () => {
+ const violations = await readViolations('build')
+
+ const hit = violations.find(
(v) =>
v.envType === 'client' &&
- (v.importer.includes('createSecretFactory') ||
- v.importer.includes('factory-safe') ||
- v.trace.some(
- (s) =>
- s.file.includes('createSecretFactory') ||
- s.file.includes('factory-safe'),
- )),
+ v.type === 'specifier' &&
+ v.specifier === '@tanstack/react-start/server' &&
+ (v.importer.includes('beforeload-server-leak') ||
+ v.trace.some((s) => s.file.includes('beforeload-server-leak'))),
)
- expect(factoryHits).toEqual([])
+ expect(hit).toBeDefined()
+ expect(hit!.trace.length).toBeGreaterThanOrEqual(2)
+
+ // The trace should include beforeload-leak route somewhere in the chain
+ const traceFiles = hit!.trace.map((s) => s.file).join(' -> ')
+ expect(traceFiles).toContain('beforeload-leak')
})
-test('no false positive for boundary-safe pattern in dev', async () => {
- const violations = await readViolations('dev')
+// Warm-start regression tests: second navigation (cached modules) must
+// still produce the same violations as the cold run.
- // boundary-safe.ts imports secret.server.ts but only uses it inside
- // compiler boundaries (createServerFn/createServerOnlyFn/createIsomorphicFn).
- const safeHits = violations.filter(
- (v) =>
- v.envType === 'client' &&
- (v.importer.includes('boundary-safe') ||
- v.trace.some((s) => s.file.includes('boundary-safe'))),
- )
+test('warm run produces violations', async () => {
+ const warm = await readViolations('dev.warm')
+ expect(warm.length).toBeGreaterThan(0)
+})
+
+test('warm run detects the same unique violations as cold run', async () => {
+ const cold = await readViolations('dev.cold')
+ const warm = await readViolations('dev.warm')
+
+ // Deduplicate by (envType, type, specifier, importer-file) since the same
+ // logical violation can be reported multiple times via different code paths.
+ const uniqueKey = (v: Violation) =>
+ `${v.envType}|${v.type}|${v.specifier}|${v.importer.replace(/:.*/, '')}`
+
+ const coldUniq = [...new Set(cold.map(uniqueKey))].sort()
+ const warmUniq = [...new Set(warm.map(uniqueKey))].sort()
+ expect(warmUniq).toEqual(coldUniq)
+})
+
+test('warm run traces include line numbers', async () => {
+ const warm = await readViolations('dev.warm')
+
+ const v = warm.find((x) => x.type === 'file' && x.trace.length >= 3)
+ expect(v).toBeDefined()
- expect(safeHits).toEqual([])
+ for (let i = 1; i < v!.trace.length; i++) {
+ const step = v!.trace[i]
+ if (step.specifier?.includes('?tsr-split=')) continue
+ if (step.file.includes('routeTree.gen')) continue
+ const prev = v!.trace[i - 1]
+ if (prev?.specifier?.includes('?tsr-split=')) continue
+
+ expect(
+ step.line,
+ `warm trace step ${i} (${step.file}) should have a line number`,
+ ).toBeDefined()
+ expect(step.line).toBeGreaterThan(0)
+ }
})
diff --git a/e2e/react-start/import-protection/tests/violations.setup.ts b/e2e/react-start/import-protection/tests/violations.setup.ts
index 084f5d0e39a..07233d602c9 100644
--- a/e2e/react-start/import-protection/tests/violations.setup.ts
+++ b/e2e/react-start/import-protection/tests/violations.setup.ts
@@ -1,15 +1,18 @@
import fs from 'node:fs'
import path from 'node:path'
import { spawn } from 'node:child_process'
-import { chromium, type FullConfig } from '@playwright/test'
+import { chromium } from '@playwright/test'
import { getTestServerPort } from '@tanstack/router-e2e-utils'
import packageJson from '../package.json' with { type: 'json' }
import { extractViolationsFromLog } from './violations.utils'
+import type { FullConfig } from '@playwright/test'
+import type { Violation } from './violations.utils'
async function waitForHttpOk(url: string, timeoutMs: number): Promise {
const start = Date.now()
- // eslint-disable-next-line no-constant-condition
+
+ // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
while (true) {
if (Date.now() - start > timeoutMs) {
throw new Error(`Timed out waiting for ${url}`)
@@ -75,65 +78,90 @@ const routes = [
'/leaky-server-import',
'/client-only-violations',
'/client-only-jsx',
+ '/beforeload-leak',
]
-async function captureDevViolations(cwd: string): Promise {
- const port = await getTestServerPort(`${packageJson.name}_dev`)
- const baseURL = `http://localhost:${port}`
- const logFile = path.resolve(cwd, 'webserver-dev.log')
+async function navigateAllRoutes(
+ baseURL: string,
+ browser: Awaited>,
+): Promise {
+ const context = await browser.newContext()
+ const page = await context.newPage()
+
+ for (const route of routes) {
+ try {
+ await page.goto(`${baseURL}${route}`, {
+ waitUntil: 'networkidle',
+ timeout: 15_000,
+ })
+ } catch {
+ // ignore navigation errors — we only care about server logs
+ }
+ }
+
+ await context.close()
+}
- const out = fs.createWriteStream(logFile)
+/**
+ * Starts a dev server, navigates all routes, captures violations.
+ * Returns the extracted violations array.
+ */
+async function runDevPass(
+ cwd: string,
+ port: number,
+): Promise> {
+ const baseURL = `http://localhost:${port}`
+ const logChunks: Array = []
const child = startDevServer(cwd, port)
- child.stdout?.on('data', (d: Buffer) => out.write(d))
- child.stderr?.on('data', (d: Buffer) => out.write(d))
+ child.stdout?.on('data', (d: Buffer) => logChunks.push(d.toString()))
+ child.stderr?.on('data', (d: Buffer) => logChunks.push(d.toString()))
try {
await waitForHttpOk(baseURL, 30_000)
- // Use a real browser to navigate to every route. This triggers SSR
- // (server-env transforms + compiler cross-module resolution) AND client
- // module loading (client-env transforms), exactly mirroring real usage.
- // Direct HTTP fetches of module URLs do NOT trigger the compiler's
- // cross-module resolution path that surfaces certain violations.
const browser = await chromium.launch()
try {
- const context = await browser.newContext()
- const page = await context.newPage()
-
- for (const route of routes) {
- try {
- await page.goto(`${baseURL}${route}`, {
- waitUntil: 'networkidle',
- timeout: 15_000,
- })
- } catch {
- // ignore navigation errors — we only care about server logs
- }
- }
-
- await context.close()
+ await navigateAllRoutes(baseURL, browser)
} finally {
await browser.close()
}
- // Give the server a moment to flush logs.
await new Promise((r) => setTimeout(r, 750))
} finally {
await killChild(child)
- out.end()
}
- if (!fs.existsSync(logFile)) {
- fs.writeFileSync(path.resolve(cwd, 'violations.dev.json'), '[]')
- return
- }
+ const text = logChunks.join('')
+ return extractViolationsFromLog(text)
+}
+
+/**
+ * Captures dev violations in two passes:
+ * 1. Cold — fresh dev server, Vite compiles all modules from scratch.
+ * 2. Warm — restart dev server (Vite's .vite cache persists on disk),
+ * modules are pre-transformed so resolveId/transform paths differ.
+ */
+async function captureDevViolations(cwd: string): Promise {
+ const port = await getTestServerPort(`${packageJson.name}_dev`)
+
+ const coldViolations = await runDevPass(cwd, port)
- const text = fs.readFileSync(logFile, 'utf-8')
- const violations = extractViolationsFromLog(text)
fs.writeFileSync(
path.resolve(cwd, 'violations.dev.json'),
- JSON.stringify(violations, null, 2),
+ JSON.stringify(coldViolations, null, 2),
+ )
+ fs.writeFileSync(
+ path.resolve(cwd, 'violations.dev.cold.json'),
+ JSON.stringify(coldViolations, null, 2),
+ )
+
+ // Warm pass: the .vite cache from the cold run is still on disk.
+ const warmViolations = await runDevPass(cwd, port)
+
+ fs.writeFileSync(
+ path.resolve(cwd, 'violations.dev.warm.json'),
+ JSON.stringify(warmViolations, null, 2),
)
}
diff --git a/packages/start-plugin-core/src/import-protection-plugin/defaults.ts b/packages/start-plugin-core/src/import-protection-plugin/defaults.ts
index 7b69b647ee3..2d264c4ea8b 100644
--- a/packages/start-plugin-core/src/import-protection-plugin/defaults.ts
+++ b/packages/start-plugin-core/src/import-protection-plugin/defaults.ts
@@ -1,4 +1,3 @@
-import type { CompileStartFrameworkOptions } from '../types'
import type { ImportProtectionEnvRules } from '../schema'
import type { Pattern } from './utils'
@@ -7,19 +6,15 @@ export interface DefaultImportProtectionRules {
server: Required
}
+const frameworks = ['react', 'solid', 'vue'] as const
+
/**
- * Returns the default import protection rules for a given framework.
+ * Returns the default import protection rules.
+ *
+ * All three framework variants are always included so that, e.g., a React
+ * project also denies `@tanstack/solid-start/server` imports.
*/
-export function getDefaultImportProtectionRules(
- _framework: CompileStartFrameworkOptions,
-): DefaultImportProtectionRules {
- const frameworks: Array = [
- 'react',
- 'solid',
- 'vue',
- ]
-
- // Deny client importing server-specific entrypoints
+export function getDefaultImportProtectionRules(): DefaultImportProtectionRules {
const clientSpecifiers: Array = frameworks.map(
(fw) => `@tanstack/${fw}-start/server`,
)
@@ -39,16 +34,10 @@ export function getDefaultImportProtectionRules(
/**
* Marker module specifiers that restrict a file to a specific environment.
*/
-export function getMarkerSpecifiers(_framework: CompileStartFrameworkOptions): {
+export function getMarkerSpecifiers(): {
serverOnly: Array
clientOnly: Array
} {
- const frameworks: Array = [
- 'react',
- 'solid',
- 'vue',
- ]
-
return {
serverOnly: frameworks.map((fw) => `@tanstack/${fw}-start/server-only`),
clientOnly: frameworks.map((fw) => `@tanstack/${fw}-start/client-only`),
diff --git a/packages/start-plugin-core/src/import-protection-plugin/plugin.ts b/packages/start-plugin-core/src/import-protection-plugin/plugin.ts
index c01cb44eee3..65812207f97 100644
--- a/packages/start-plugin-core/src/import-protection-plugin/plugin.ts
+++ b/packages/start-plugin-core/src/import-protection-plugin/plugin.ts
@@ -1,4 +1,3 @@
-import * as path from 'pathe'
import { normalizePath } from 'vite'
import { resolveViteId } from '../utils'
@@ -11,7 +10,15 @@ import {
} from './defaults'
import { findPostCompileUsagePos } from './postCompileUsage'
import { compileMatchers, matchesAny } from './matchers'
-import { dedupePatterns, normalizeFilePath } from './utils'
+import {
+ clearNormalizeFilePathCache,
+ dedupePatterns,
+ escapeRegExp,
+ extractImportSources,
+ getOrCreate,
+ normalizeFilePath,
+ relativizePath,
+} from './utils'
import { collectMockExportNamesBySource } from './rewriteDeniedImports'
import {
MARKER_PREFIX,
@@ -30,16 +37,18 @@ import {
mockRuntimeModuleIdFromViolation,
} from './virtualModules'
import {
+ ImportLocCache,
addTraceImportLocations,
buildCodeSnippet,
buildLineIndex,
+ clearImportPatternCache,
findImportStatementLocationFromTransformed,
findPostCompileUsageLocation,
pickOriginalCodeFromSourcesContent,
} from './sourceLocation'
-import type { PluginOption } from 'vite'
+import type { PluginOption, ViteDevServer } from 'vite'
import type { CompiledMatcher } from './matchers'
-import type { ViolationInfo } from './trace'
+import type { Loc, TraceStep, ViolationInfo } from './trace'
import type {
SourceMapLike,
TransformResult,
@@ -51,10 +60,30 @@ import type {
} from '../schema'
import type { CompileStartFrameworkOptions, GetConfigFn } from '../types'
-// Re-export public API that tests and other consumers depend on.
+const SERVER_FN_LOOKUP_QUERY = '?' + SERVER_FN_LOOKUP
+const RESOLVED_MARKER_SERVER_ONLY = resolveViteId(`${MARKER_PREFIX}server-only`)
+const RESOLVED_MARKER_CLIENT_ONLY = resolveViteId(`${MARKER_PREFIX}client-only`)
+
+const IMPORT_PROTECTION_DEBUG =
+ process.env.TSR_IMPORT_PROTECTION_DEBUG === '1' ||
+ process.env.TSR_IMPORT_PROTECTION_DEBUG === 'true'
+const IMPORT_PROTECTION_DEBUG_FILTER =
+ process.env.TSR_IMPORT_PROTECTION_DEBUG_FILTER
+
+function debugLog(...args: Array) {
+ if (!IMPORT_PROTECTION_DEBUG) return
+ console.warn('[import-protection:debug]', ...args)
+}
+
+/** Check if a value matches the debug filter (when set). */
+function matchesDebugFilter(...values: Array): boolean {
+ if (!IMPORT_PROTECTION_DEBUG_FILTER) return true
+ return values.some((v) => v.includes(IMPORT_PROTECTION_DEBUG_FILTER))
+}
+
export { RESOLVED_MOCK_MODULE_ID } from './virtualModules'
export { rewriteDeniedImports } from './rewriteDeniedImports'
-export { dedupePatterns } from './utils'
+export { dedupePatterns, extractImportSources } from './utils'
export type { Pattern } from './utils'
/**
@@ -68,6 +97,9 @@ interface PluginConfig {
srcDirectory: string
framework: CompileStartFrameworkOptions
+ /** Absolute, query-free entry file ids used for trace roots. */
+ entryFiles: Array
+
effectiveBehavior: ImportProtectionBehavior
mockAccess: 'error' | 'warn' | 'off'
logMode: 'once' | 'always'
@@ -119,20 +151,65 @@ interface EnvState {
resolveCacheByFile: Map>
/** Import location cache. Key: `${importerFile}::${source}`. */
- importLocCache: Map<
- string,
- { file?: string; line: number; column: number } | null
- >
- /** Reverse index: file path → Set of importLocCache keys for that file. */
- importLocByFile: Map>
+ importLocCache: ImportLocCache
/** Deduplication of logged violations (no env prefix in key). */
seenViolations: Set
+ /**
+ * Modules transitively loaded during a `fetchModule(?SERVER_FN_LOOKUP)` call.
+ * In dev mode the compiler calls `fetchModule(id + '?' + SERVER_FN_LOOKUP)` to
+ * analyse a module's exports. The direct target carries the query parameter so
+ * `isPreTransformResolve` is `true`. But Vite also resolves the target's own
+ * imports (and their imports, etc.) with the plain file path as the importer —
+ * those would otherwise fire false-positive violations.
+ *
+ * We record every module resolved while walking a SERVER_FN_LOOKUP chain so
+ * that their child imports are also treated as pre-transform resolves.
+ */
+ serverFnLookupModules: Set
+
/** Transform result cache (code + composed sourcemap + original source). */
transformResultCache: Map
/** Reverse index: physical file path → Set of transformResultCache keys. */
transformResultKeysByFile: Map>
+
+ /** Cached provider that reads from {@link transformResultCache}. */
+ transformResultProvider: TransformResultProvider
+
+ /**
+ * Post-transform resolved imports. Populated by the transform-cache hook
+ * after resolving every import source found in the transformed code.
+ * Key: transform cache key (normalised module ID incl. query params).
+ * Value: set of resolved child file paths.
+ */
+ postTransformImports: Map>
+
+ /**
+ * Whether a `resolveId` call without an importer has been observed for this
+ * environment since `buildStart`. Vite calls `resolveId(source, undefined)`
+ * for true entry modules during a cold start. On warm start (`.vite` cache
+ * exists), Vite reuses its module graph and does NOT call `resolveId` for
+ * entries, so this stays `false`.
+ *
+ * When `false`, the import graph is considered unreliable (edges may be
+ * missing) and violations are reported immediately instead of deferred.
+ */
+ hasSeenEntry: boolean
+
+ /**
+ * Violations deferred in dev mock mode. Keyed by the violating importer's
+ * normalized file path. Violations are confirmed or discarded by the
+ * transform-cache hook once enough post-transform data is available to
+ * determine whether the importer is still reachable from an entry point.
+ */
+ pendingViolations: Map>
+}
+
+interface PendingViolation {
+ info: ViolationInfo
+ /** The mock module ID that resolveId already returned for this violation. */
+ mockReturnValue: string
}
/**
@@ -155,12 +232,115 @@ export interface ImportProtectionPluginOptions {
export function importProtectionPlugin(
opts: ImportProtectionPluginOptions,
): PluginOption {
+ let devServer: ViteDevServer | null = null
+
+ type ModuleGraphNode = {
+ id?: string | null
+ url?: string
+ importers: Set
+ }
+
+ /**
+ * Build an import trace using Vite's per-environment module graph, which
+ * is authoritative even on warm starts when the plugin's own ImportGraph
+ * may be incomplete (Vite skips resolveId for cached modules).
+ */
+ function buildTraceFromModuleGraph(
+ envName: string,
+ env: EnvState,
+ targetFile: string,
+ ): Array | null {
+ if (!devServer) return null
+ const environment = devServer.environments[envName]
+ if (!environment) return null
+
+ const file = normalizeFilePath(targetFile)
+ const start = environment.moduleGraph.getModuleById(file)
+ if (!start) return null
+
+ // Resolve a module graph node to its normalized file path once and
+ // cache the result so BFS + reconstruction don't recompute.
+ const nodeIds = new Map()
+ function nodeId(n: ModuleGraphNode): string {
+ let cached = nodeIds.get(n)
+ if (cached === undefined) {
+ cached = n.id
+ ? normalizeFilePath(n.id)
+ : n.url
+ ? normalizeFilePath(n.url)
+ : ''
+ nodeIds.set(n, cached)
+ }
+ return cached
+ }
+
+ const queue: Array = [start]
+ const visited = new Set([start])
+ const parent = new Map()
+
+ let entryRoot: ModuleGraphNode | null = null
+ let fallbackRoot: ModuleGraphNode | null = null
+ let qi = 0
+ while (qi < queue.length) {
+ const node = queue[qi++]!
+ const id = nodeId(node)
+
+ if (id && env.graph.entries.has(id)) {
+ entryRoot = node
+ break
+ }
+
+ const importers = node.importers
+ if (importers.size === 0) {
+ if (!fallbackRoot) fallbackRoot = node
+ continue
+ }
+
+ for (const imp of importers) {
+ if (visited.has(imp)) continue
+ visited.add(imp)
+ parent.set(imp, node)
+ queue.push(imp)
+ }
+ }
+
+ const root = entryRoot ?? fallbackRoot
+
+ if (!root) return null
+
+ // Reconstruct: root -> ... -> start
+ const chain: Array = []
+ let cur: ModuleGraphNode | undefined = root
+ for (let i = 0; i < config.maxTraceDepth + 2 && cur; i++) {
+ chain.push(cur)
+ if (cur === start) break
+ cur = parent.get(cur)
+ }
+
+ const steps: Array = []
+ for (let i = 0; i < chain.length; i++) {
+ const id = nodeId(chain[i]!)
+ if (!id) continue
+ let specifier: string | undefined
+ if (i + 1 < chain.length) {
+ const nextId = nodeId(chain[i + 1]!)
+ if (nextId) {
+ specifier = env.graph.reverseEdges.get(nextId)?.get(id)
+ }
+ }
+ steps.push(specifier ? { file: id, specifier } : { file: id })
+ }
+
+ return steps.length ? steps : null
+ }
+
const config: PluginConfig = {
enabled: true,
root: '',
command: 'build',
srcDirectory: '',
framework: opts.framework,
+ entryFiles: [],
effectiveBehavior: 'error',
mockAccess: 'error',
logMode: 'once',
@@ -180,51 +360,6 @@ export function importProtectionPlugin(
const envStates = new Map()
const shared: SharedState = { fileMarkerKind: new Map() }
- // ---------------------------------------------------------------------------
- // Internal helpers
- // ---------------------------------------------------------------------------
-
- /**
- * Create a per-env `importLocCache` whose `.set` method automatically
- * maintains the reverse index (`importLocByFile`) for O(1) invalidation
- * in `hotUpdate`.
- *
- * Cache keys have the format `${importerFile}::${source}`.
- */
- function createImportLocCache(
- env: EnvState,
- ): Map {
- const cache = new Map<
- string,
- { file?: string; line: number; column: number } | null
- >()
- const originalSet = cache.set.bind(cache)
- cache.set = function (key, value) {
- originalSet(key, value)
- const sepIdx = key.indexOf('::')
- if (sepIdx !== -1) {
- const file = key.slice(0, sepIdx)
- let fileKeys = env.importLocByFile.get(file)
- if (!fileKeys) {
- fileKeys = new Set()
- env.importLocByFile.set(file, fileKeys)
- }
- fileKeys.add(key)
- }
- return this
- }
- return cache
- }
-
- function getMockEdgeExports(
- env: EnvState,
- importerId: string,
- source: string,
- ): Array {
- const importerFile = normalizeFilePath(importerId)
- return env.mockExportsByImporter.get(importerFile)?.get(source) ?? []
- }
-
function getMarkerKindForFile(
fileId: string,
): 'server' | 'client' | undefined {
@@ -232,45 +367,61 @@ export function importProtectionPlugin(
return shared.fileMarkerKind.get(file)
}
+ type ViolationReporter = {
+ warn: (msg: string) => void
+ error: (msg: string) => never
+ }
+
/**
- * Build a {@link TransformResultProvider} for the given environment.
+ * Build the best available trace for a module and enrich each step with
+ * line/column locations. Tries the plugin's own ImportGraph first, then
+ * Vite's moduleGraph (authoritative on warm start), keeping whichever is
+ * longer. Annotates the last step with the denied specifier + location.
*
- * The provider reads from the transform result cache that is populated by
- * the `tanstack-start-core:import-protection-transform-cache` plugin's
- * transform hook.
+ * Shared by {@link buildViolationInfo} and {@link processPendingViolations}.
*/
- function getTransformResultProvider(env: EnvState): TransformResultProvider {
- return {
- getTransformResult(id: string) {
- // Try the full normalized ID first (preserves query params like
- // ?tsr-split=component for virtual modules).
- const fullKey = normalizePath(id)
- const exact = env.transformResultCache.get(fullKey)
- if (exact) return exact
-
- // Fall back to the query-stripped path for modules looked up by
- // their physical file path (e.g. trace steps, modules without
- // query params).
- const strippedKey = normalizeFilePath(id)
- return strippedKey !== fullKey
- ? env.transformResultCache.get(strippedKey)
- : undefined
- },
+ async function rebuildAndAnnotateTrace(
+ provider: TransformResultProvider,
+ env: EnvState,
+ envName: string,
+ normalizedImporter: string,
+ specifier: string,
+ importerLoc: Loc | undefined,
+ traceOverride?: Array,
+ ): Promise> {
+ let trace =
+ traceOverride ??
+ buildTrace(env.graph, normalizedImporter, config.maxTraceDepth)
+
+ if (config.command === 'serve') {
+ const mgTrace = buildTraceFromModuleGraph(
+ envName,
+ env,
+ normalizedImporter,
+ )
+ if (mgTrace && mgTrace.length > trace.length) {
+ trace = mgTrace
+ }
}
- }
+ await addTraceImportLocations(provider, trace, env.importLocCache)
- type ViolationReporter = {
- warn: (msg: string) => void
- error: (msg: string) => never
+ if (trace.length > 0) {
+ const last = trace[trace.length - 1]!
+ if (!last.specifier) last.specifier = specifier
+ if (importerLoc && last.line == null) {
+ last.line = importerLoc.line
+ last.column = importerLoc.column
+ }
+ }
+
+ return trace
}
/**
* Build a complete {@link ViolationInfo} with trace, location, and snippet.
*
* This is the single path that all violation types go through: specifier,
- * file, and marker. Centralizing it eliminates the duplicated sequences of
- * `buildTrace` → `addTraceImportLocations` → location lookup → annotate →
- * snippet that previously appeared 5 times in the codebase.
+ * file, and marker.
*/
async function buildViolationInfo(
provider: TransformResultProvider,
@@ -291,14 +442,8 @@ export function importProtectionPlugin(
| 'snippet'
| 'importerLoc'
>,
+ traceOverride?: Array,
): Promise {
- const trace = buildTrace(
- env.graph,
- normalizedImporter,
- config.maxTraceDepth,
- )
- await addTraceImportLocations(provider, trace, env.importLocCache)
-
const loc =
(await findPostCompileUsageLocation(
provider,
@@ -313,16 +458,15 @@ export function importProtectionPlugin(
env.importLocCache,
))
- // Annotate the last trace step with the denied import's specifier and
- // location so every trace step (including the leaf) gets file:line:col.
- if (trace.length > 0) {
- const last = trace[trace.length - 1]!
- if (!last.specifier) last.specifier = source
- if (loc && last.line == null) {
- last.line = loc.line
- last.column = loc.column
- }
- }
+ const trace = await rebuildAndAnnotateTrace(
+ provider,
+ env,
+ envName,
+ normalizedImporter,
+ source,
+ loc,
+ traceOverride,
+ )
const snippet = loc ? buildCodeSnippet(provider, importer, loc) : undefined
@@ -339,8 +483,14 @@ export function importProtectionPlugin(
}
}
- async function maybeReportMarkerViolationFromResolvedImport(
- ctx: ViolationReporter,
+ /**
+ * Check if a resolved import violates marker restrictions (e.g. importing
+ * a server-only module in the client env). If so, build and return the
+ * {@link ViolationInfo} — the caller is responsible for reporting/deferring.
+ *
+ * Returns `undefined` when the resolved import has no marker conflict.
+ */
+ async function buildMarkerViolationFromResolvedImport(
provider: TransformResultProvider,
env: EnvState,
envName: string,
@@ -349,8 +499,8 @@ export function importProtectionPlugin(
source: string,
resolvedId: string,
relativePath: string,
- opts?: { silent?: boolean },
- ): Promise | undefined> {
+ traceOverride?: Array,
+ ): Promise {
const markerKind = getMarkerKindForFile(resolvedId)
const violates =
(envType === 'client' && markerKind === 'server') ||
@@ -359,7 +509,7 @@ export function importProtectionPlugin(
const normalizedImporter = normalizeFilePath(importer)
- const info = await buildViolationInfo(
+ return buildViolationInfo(
provider,
env,
envName,
@@ -375,19 +525,8 @@ export function importProtectionPlugin(
? `Module "${relativePath}" is marked server-only but is imported in the client environment`
: `Module "${relativePath}" is marked client-only but is imported in the server environment`,
},
+ traceOverride,
)
-
- return handleViolation.call(ctx, env, info, opts)
- }
-
- function buildMockEdgeModuleId(
- env: EnvState,
- importerId: string,
- source: string,
- runtimeId: string,
- ): string {
- const exports = getMockEdgeExports(env, importerId, source)
- return makeMockEdgeModuleId(exports, source, runtimeId)
}
function getEnvType(envName: string): 'client' | 'server' {
@@ -416,7 +555,7 @@ export function importProtectionPlugin(
function getEnv(envName: string): EnvState {
let envState = envStates.get(envName)
if (!envState) {
- const importLocByFile = new Map>()
+ const transformResultCache = new Map()
envState = {
graph: new ImportGraph(),
deniedSources: new Set(),
@@ -424,50 +563,58 @@ export function importProtectionPlugin(
mockExportsByImporter: new Map(),
resolveCache: new Map(),
resolveCacheByFile: new Map(),
- importLocCache: new Map(), // placeholder, replaced below
- importLocByFile,
+ importLocCache: new ImportLocCache(),
seenViolations: new Set(),
- transformResultCache: new Map(),
+ transformResultCache,
transformResultKeysByFile: new Map(),
+ transformResultProvider: {
+ getTransformResult(id: string) {
+ const fullKey = normalizePath(id)
+ const exact = transformResultCache.get(fullKey)
+ if (exact) return exact
+ const strippedKey = normalizeFilePath(id)
+ return strippedKey !== fullKey
+ ? transformResultCache.get(strippedKey)
+ : undefined
+ },
+ },
+ postTransformImports: new Map(),
+ hasSeenEntry: false,
+ serverFnLookupModules: new Set(),
+ pendingViolations: new Map(),
}
- // Install reverse-index-maintaining importLocCache
- envState.importLocCache = createImportLocCache(envState)
envStates.set(envName, envState)
}
return envState
}
+ const shouldCheckImporterCache = new Map()
function shouldCheckImporter(importer: string): boolean {
- // Normalize for matching
- const relativePath = path.relative(config.root, importer)
+ let result = shouldCheckImporterCache.get(importer)
+ if (result !== undefined) return result
+
+ const relativePath = relativizePath(importer, config.root)
- // Check exclude first
if (
config.excludeMatchers.length > 0 &&
matchesAny(relativePath, config.excludeMatchers)
) {
- return false
- }
-
- // Check ignore importers
- if (
+ result = false
+ } else if (
config.ignoreImporterMatchers.length > 0 &&
matchesAny(relativePath, config.ignoreImporterMatchers)
) {
- return false
- }
-
- // Check include
- if (config.includeMatchers.length > 0) {
- return !!matchesAny(relativePath, config.includeMatchers)
- }
-
- // Default: check if within srcDirectory
- if (config.srcDirectory) {
- return importer.startsWith(config.srcDirectory)
+ result = false
+ } else if (config.includeMatchers.length > 0) {
+ result = !!matchesAny(relativePath, config.includeMatchers)
+ } else if (config.srcDirectory) {
+ result = importer.startsWith(config.srcDirectory)
+ } else {
+ result = true
}
- return true
+ shouldCheckImporterCache.set(importer, result)
+ return result
}
function dedupeKey(
@@ -487,12 +634,279 @@ export function importProtectionPlugin(
}
function getRelativePath(absolutePath: string): string {
- return normalizePath(path.relative(config.root, absolutePath))
+ return relativizePath(normalizePath(absolutePath), config.root)
+ }
+
+ /** Register known Start entrypoints as trace roots for all environments. */
+ function registerEntries(): void {
+ const { resolvedStartConfig } = opts.getConfig()
+ for (const envDef of opts.environments) {
+ const envState = getEnv(envDef.name)
+ if (resolvedStartConfig.routerFilePath) {
+ envState.graph.addEntry(
+ normalizePath(resolvedStartConfig.routerFilePath),
+ )
+ }
+ if (resolvedStartConfig.startFilePath) {
+ envState.graph.addEntry(
+ normalizePath(resolvedStartConfig.startFilePath),
+ )
+ }
+ }
+ }
+
+ function checkPostTransformReachability(
+ env: EnvState,
+ file: string,
+ ): 'reachable' | 'unreachable' | 'unknown' {
+ const visited = new Set()
+ const queue: Array = [file]
+ let hasUnknownEdge = false
+ let qi = 0
+
+ while (qi < queue.length) {
+ const current = queue[qi++]!
+ if (visited.has(current)) continue
+ visited.add(current)
+
+ if (env.graph.entries.has(current)) {
+ return 'reachable'
+ }
+
+ // Walk reverse edges
+ const importers = env.graph.reverseEdges.get(current)
+ if (!importers) continue
+
+ for (const [parent] of importers) {
+ if (visited.has(parent)) continue
+
+ // Check all code-split variants for this parent. The edge is
+ // live if ANY variant's resolved imports include `current`.
+ const keySet = env.transformResultKeysByFile.get(parent)
+ let anyVariantCached = false
+ let edgeLive = false
+
+ if (keySet) {
+ for (const k of keySet) {
+ const resolvedImports = env.postTransformImports.get(k)
+ if (resolvedImports) {
+ anyVariantCached = true
+ if (resolvedImports.has(current)) {
+ edgeLive = true
+ break
+ }
+ }
+ }
+ }
+
+ // Fallback: direct file-path key
+ if (!anyVariantCached) {
+ const resolvedImports = env.postTransformImports.get(parent)
+ if (resolvedImports) {
+ anyVariantCached = true
+ if (resolvedImports.has(current)) {
+ edgeLive = true
+ }
+ }
+ }
+
+ if (!anyVariantCached) {
+ const hasTransformResult =
+ env.transformResultCache.has(parent) ||
+ (keySet ? keySet.size > 0 : false)
+
+ if (hasTransformResult) {
+ // Transform ran but postTransformImports not yet populated
+ hasUnknownEdge = true
+ continue
+ }
+
+ // Transform never ran — Vite served from cache (warm start).
+ // Conservatively treat edge as live.
+ queue.push(parent)
+ continue
+ }
+
+ if (edgeLive) {
+ queue.push(parent)
+ }
+ }
+ }
+
+ return hasUnknownEdge ? 'unknown' : 'unreachable'
+ }
+
+ /**
+ * Process pending violations for the given environment. Called from the
+ * transform-cache hook after each module transform is cached, because new
+ * transform data may allow us to confirm or discard pending violations.
+ *
+ * @param warnFn - `this.warn` from the transform hook context
+ */
+ async function processPendingViolations(
+ env: EnvState,
+ warnFn: (msg: string) => void,
+ ): Promise {
+ if (env.pendingViolations.size === 0) return
+
+ const toDelete: Array = []
+
+ for (const [file, violations] of env.pendingViolations) {
+ // On warm start, skip graph reachability — confirm immediately.
+ const status = env.hasSeenEntry
+ ? checkPostTransformReachability(env, file)
+ : 'reachable'
+
+ if (status === 'reachable') {
+ for (const pv of violations) {
+ const key = dedupeKey(
+ pv.info.type,
+ pv.info.importer,
+ pv.info.specifier,
+ pv.info.resolved,
+ )
+ if (!hasSeen(env, key)) {
+ const freshTrace = await rebuildAndAnnotateTrace(
+ env.transformResultProvider,
+ env,
+ pv.info.env,
+ pv.info.importer,
+ pv.info.specifier,
+ pv.info.importerLoc,
+ )
+ if (freshTrace.length > pv.info.trace.length) {
+ pv.info.trace = freshTrace
+ }
+
+ if (config.onViolation) {
+ const result = config.onViolation(pv.info)
+ if (result === false) continue
+ }
+ warnFn(formatViolation(pv.info, config.root))
+ }
+ }
+ toDelete.push(file)
+ } else if (status === 'unreachable') {
+ toDelete.push(file)
+ }
+ // 'unknown' — keep pending for next transform-cache invocation.
+ }
+
+ for (const file of toDelete) {
+ env.pendingViolations.delete(file)
+ }
}
- // ---------------------------------------------------------------------------
- // Vite plugins
- // ---------------------------------------------------------------------------
+ /**
+ * Record a violation as pending for later confirmation via graph
+ * reachability. Called from `resolveId` when `shouldDefer` is true.
+ */
+ function deferViolation(
+ env: EnvState,
+ importerFile: string,
+ info: ViolationInfo,
+ mockReturnValue:
+ | { id: string; syntheticNamedExports: boolean }
+ | string
+ | undefined,
+ ): void {
+ getOrCreate(env.pendingViolations, importerFile, () => []).push({
+ info,
+ mockReturnValue:
+ typeof mockReturnValue === 'string'
+ ? mockReturnValue
+ : (mockReturnValue?.id ?? ''),
+ })
+ }
+
+ function handleViolation(
+ ctx: ViolationReporter,
+ env: EnvState,
+ info: ViolationInfo,
+ violationOpts?: { silent?: boolean },
+ ): { id: string; syntheticNamedExports: boolean } | string | undefined {
+ const key = dedupeKey(
+ info.type,
+ info.importer,
+ info.specifier,
+ info.resolved,
+ )
+
+ if (!violationOpts?.silent) {
+ if (config.onViolation) {
+ const result = config.onViolation(info)
+ if (result === false) {
+ return undefined
+ }
+ }
+
+ const seen = hasSeen(env, key)
+
+ if (config.effectiveBehavior === 'error') {
+ if (!seen) ctx.error(formatViolation(info, config.root))
+ return undefined
+ }
+
+ if (!seen) {
+ ctx.warn(formatViolation(info, config.root))
+ }
+ } else {
+ if (config.effectiveBehavior === 'error') {
+ return undefined
+ }
+ }
+
+ env.deniedSources.add(info.specifier)
+ getOrCreate(env.deniedEdges, info.importer, () => new Set()).add(
+ info.specifier,
+ )
+
+ if (config.command === 'serve') {
+ const runtimeId = mockRuntimeModuleIdFromViolation(
+ info,
+ config.mockAccess,
+ config.root,
+ )
+ const importerFile = normalizeFilePath(info.importer)
+ const exports =
+ env.mockExportsByImporter.get(importerFile)?.get(info.specifier) ?? []
+ return resolveViteId(
+ makeMockEdgeModuleId(exports, info.specifier, runtimeId),
+ )
+ }
+
+ // Build: Rollup uses syntheticNamedExports
+ return { id: RESOLVED_MOCK_MODULE_ID, syntheticNamedExports: true }
+ }
+
+ /**
+ * Unified violation dispatch: either defers or reports immediately.
+ *
+ * When `shouldDefer` is true, calls `handleViolation` silently to obtain
+ * the mock module ID, stores the violation as pending, and triggers
+ * `processPendingViolations`. Otherwise reports (or silences for
+ * pre-transform resolves) immediately.
+ *
+ * Returns the mock module ID / resolve result from `handleViolation`.
+ */
+ async function reportOrDeferViolation(
+ ctx: ViolationReporter,
+ env: EnvState,
+ importerFile: string,
+ info: ViolationInfo,
+ shouldDefer: boolean,
+ isPreTransformResolve: boolean,
+ ): Promise> {
+ if (shouldDefer) {
+ const result = handleViolation(ctx, env, info, { silent: true })
+ deferViolation(env, importerFile, info, result)
+ await processPendingViolations(env, ctx.warn.bind(ctx))
+ return result
+ }
+ return handleViolation(ctx, env, info, {
+ silent: isPreTransformResolve,
+ })
+ }
return [
{
@@ -513,10 +927,14 @@ export function importProtectionPlugin(
const { startConfig, resolvedStartConfig } = opts.getConfig()
config.srcDirectory = resolvedStartConfig.srcDirectory
+ config.entryFiles = [
+ resolvedStartConfig.routerFilePath,
+ resolvedStartConfig.startFilePath,
+ ].filter((f): f is string => Boolean(f))
+
const userOpts: ImportProtectionOptions | undefined =
startConfig.importProtection
- // Determine if plugin is enabled
if (userOpts?.enabled === false) {
config.enabled = false
return
@@ -524,7 +942,6 @@ export function importProtectionPlugin(
config.enabled = true
- // Determine effective behavior
if (userOpts?.behavior) {
if (typeof userOpts.behavior === 'string') {
config.effectiveBehavior = userOpts.behavior
@@ -535,38 +952,27 @@ export function importProtectionPlugin(
: (userOpts.behavior.build ?? 'error')
}
} else {
- // Defaults: dev='mock', build='error'
config.effectiveBehavior =
viteConfig.command === 'serve' ? 'mock' : 'error'
}
- // Log mode
config.logMode = userOpts?.log ?? 'once'
-
- // Mock runtime access diagnostics
config.mockAccess = userOpts?.mockAccess ?? 'error'
-
- // Max trace depth
config.maxTraceDepth = userOpts?.maxTraceDepth ?? 20
+ if (userOpts?.onViolation) {
+ const fn = userOpts.onViolation
+ config.onViolation = (info) => fn(info)
+ }
- // User callback
- config.onViolation = userOpts?.onViolation as
- | ((info: ViolationInfo) => boolean | void)
- | undefined
-
- // Get default rules
- const defaults = getDefaultImportProtectionRules(opts.framework)
+ const defaults = getDefaultImportProtectionRules()
- // Merge user rules with defaults and compile matchers per env.
- // IMPORTANT: client specifier denies for Start server entrypoints must
- // always include the framework defaults even when the user provides a
- // custom list.
+ // Client specifier denies always include framework defaults even
+ // when the user provides a custom list.
const clientSpecifiers = dedupePatterns([
...defaults.client.specifiers,
...(userOpts?.client?.specifiers ?? []),
])
- // For file patterns, user config overrides defaults.
const clientFiles = userOpts?.client?.files
? [...userOpts.client.files]
: [...defaults.client.files]
@@ -600,41 +1006,35 @@ export function importProtectionPlugin(
}
// Marker specifiers
- const markers = getMarkerSpecifiers(opts.framework)
+ const markers = getMarkerSpecifiers()
config.markerSpecifiers = {
serverOnly: new Set(markers.serverOnly),
clientOnly: new Set(markers.clientOnly),
}
+ },
- // Use known Start env entrypoints as trace roots.
- // This makes traces deterministic and prevents 1-line traces.
- for (const envDef of opts.environments) {
- const envState = getEnv(envDef.name)
-
- if (resolvedStartConfig.routerFilePath) {
- envState.graph.addEntry(
- normalizePath(resolvedStartConfig.routerFilePath),
- )
- }
- if (resolvedStartConfig.startFilePath) {
- envState.graph.addEntry(
- normalizePath(resolvedStartConfig.startFilePath),
- )
- }
- }
+ configureServer(server) {
+ devServer = server
},
buildStart() {
if (!config.enabled) return
+ // Clear memoization caches that grow unboundedly across builds
+ clearNormalizeFilePathCache()
+ clearImportPatternCache()
+ shouldCheckImporterCache.clear()
+
// Clear per-env caches
for (const envState of envStates.values()) {
envState.resolveCache.clear()
envState.resolveCacheByFile.clear()
envState.importLocCache.clear()
- envState.importLocByFile.clear()
envState.seenViolations.clear()
envState.transformResultCache.clear()
envState.transformResultKeysByFile.clear()
+ envState.postTransformImports.clear()
+ envState.hasSeenEntry = false
+ envState.serverFnLookupModules.clear()
envState.graph.clear()
envState.deniedSources.clear()
envState.deniedEdges.clear()
@@ -644,21 +1044,7 @@ export function importProtectionPlugin(
// Clear shared state
shared.fileMarkerKind.clear()
- // Re-add known entries after clearing.
- for (const envDef of opts.environments) {
- const envState = getEnv(envDef.name)
- const { resolvedStartConfig } = opts.getConfig()
- if (resolvedStartConfig.routerFilePath) {
- envState.graph.addEntry(
- normalizePath(resolvedStartConfig.routerFilePath),
- )
- }
- if (resolvedStartConfig.startFilePath) {
- envState.graph.addEntry(
- normalizePath(resolvedStartConfig.startFilePath),
- )
- }
- }
+ registerEntries()
},
hotUpdate(ctx) {
@@ -672,14 +1058,7 @@ export function importProtectionPlugin(
// Invalidate per-env caches
for (const envState of envStates.values()) {
- // Invalidate cached import locations using reverse index
- const locKeys = envState.importLocByFile.get(importerFile)
- if (locKeys) {
- for (const key of locKeys) {
- envState.importLocCache.delete(key)
- }
- envState.importLocByFile.delete(importerFile)
- }
+ envState.importLocCache.deleteByFile(importerFile)
// Invalidate resolve cache using reverse index
const resolveKeys = envState.resolveCacheByFile.get(importerFile)
@@ -694,6 +1073,8 @@ export function importProtectionPlugin(
envState.graph.invalidate(importerFile)
envState.deniedEdges.delete(importerFile)
envState.mockExportsByImporter.delete(importerFile)
+ envState.serverFnLookupModules.delete(importerFile)
+ envState.pendingViolations.delete(importerFile)
// Invalidate transform result cache for this file.
const transformKeys =
@@ -701,11 +1082,13 @@ export function importProtectionPlugin(
if (transformKeys) {
for (const key of transformKeys) {
envState.transformResultCache.delete(key)
+ envState.postTransformImports.delete(key)
}
envState.transformResultKeysByFile.delete(importerFile)
} else {
// Fallback: at least clear the physical-file entry.
envState.transformResultCache.delete(importerFile)
+ envState.postTransformImports.delete(importerFile)
}
}
}
@@ -713,13 +1096,36 @@ export function importProtectionPlugin(
},
async resolveId(source, importer, _options) {
- if (!config.enabled) return undefined
const envName = this.environment.name
const env = getEnv(envName)
const envType = getEnvType(envName)
- const provider = getTransformResultProvider(env)
+ const provider = env.transformResultProvider
+ const isScanResolve = !!(_options as Record).scan
+
+ if (IMPORT_PROTECTION_DEBUG) {
+ const importerPath = importer
+ ? normalizeFilePath(importer)
+ : '(entry)'
+ const isEntryResolve = !importer
+ const filtered =
+ IMPORT_PROTECTION_DEBUG_FILTER === 'entry'
+ ? isEntryResolve
+ : matchesDebugFilter(source, importerPath)
+ if (filtered) {
+ debugLog('resolveId', {
+ env: envName,
+ envType,
+ source,
+ importer: importerPath,
+ isEntryResolve,
+ hasSeenEntry: env.hasSeenEntry,
+ command: config.command,
+ behavior: config.effectiveBehavior,
+ })
+ }
+ }
- // Internal virtual modules must resolve in dev.
+ // Internal virtual modules
if (source === MOCK_MODULE_ID) {
return RESOLVED_MOCK_MODULE_ID
}
@@ -733,115 +1139,95 @@ export function importProtectionPlugin(
return resolveViteId(source)
}
- // Skip if no importer (entry points)
if (!importer) {
- // Track entry-ish modules so traces can terminate.
- // Vite may pass virtual ids here; normalize but keep them.
env.graph.addEntry(source)
+ env.hasSeenEntry = true
return undefined
}
- // Skip virtual modules
if (source.startsWith('\0') || source.startsWith('virtual:')) {
return undefined
}
- // Two code paths resolve imports from raw (pre-compiler-transform)
- // source in dev mode:
- //
- // 1. The Start compiler calls `fetchModule(id + '?' + SERVER_FN_LOOKUP)`
- // to inspect a child module's exports. The compiler's own transform
- // is excluded for these requests, so Vite sees the original imports.
- //
- // 2. Vite's dep-optimizer scanner (`options.scan === true`) uses esbuild
- // to discover bare imports for pre-bundling. esbuild reads raw source
- // without running Vite transform hooks, so it also sees imports that
- // the compiler would normally strip.
- //
- // In both cases the imports are NOT real client-side imports. We must
- // suppress violation *reporting* (no warnings / errors) but still return
- // mock module IDs so that transitive resolution doesn't blow up.
+ const normalizedImporter = normalizeFilePath(importer)
+ const isDirectLookup = importer.includes(SERVER_FN_LOOKUP_QUERY)
+
+ if (isDirectLookup) {
+ env.serverFnLookupModules.add(normalizedImporter)
+ }
+
const isPreTransformResolve =
- importer.includes('?' + SERVER_FN_LOOKUP) ||
- !!(_options as Record).scan
+ isDirectLookup ||
+ env.serverFnLookupModules.has(normalizedImporter) ||
+ isScanResolve
+
+ // Dev mock mode: defer violations until post-transform data is
+ // available, then confirm/discard via graph reachability.
+ const isDevMock =
+ config.command === 'serve' && config.effectiveBehavior === 'mock'
+
+ const shouldDefer = isDevMock && !isPreTransformResolve
// Check if this is a marker import
- if (config.markerSpecifiers.serverOnly.has(source)) {
- // Record importer as server-only
- const resolvedImporter = normalizeFilePath(importer)
- const existing = shared.fileMarkerKind.get(resolvedImporter)
- if (existing && existing !== 'server') {
+ const markerKind = config.markerSpecifiers.serverOnly.has(source)
+ ? ('server' as const)
+ : config.markerSpecifiers.clientOnly.has(source)
+ ? ('client' as const)
+ : undefined
+
+ if (markerKind) {
+ const existing = shared.fileMarkerKind.get(normalizedImporter)
+ if (existing && existing !== markerKind) {
this.error(
- `[import-protection] File "${getRelativePath(resolvedImporter)}" has both server-only and client-only markers. This is not allowed.`,
+ `[import-protection] File "${getRelativePath(normalizedImporter)}" has both server-only and client-only markers. This is not allowed.`,
)
}
- shared.fileMarkerKind.set(resolvedImporter, 'server')
+ shared.fileMarkerKind.set(normalizedImporter, markerKind)
+
+ const violatesEnv =
+ (envType === 'client' && markerKind === 'server') ||
+ (envType === 'server' && markerKind === 'client')
- // If we're in the client environment, this is a violation
- if (envType === 'client') {
+ if (violatesEnv) {
const info = await buildViolationInfo(
provider,
env,
envName,
envType,
importer,
- resolvedImporter,
+ normalizedImporter,
source,
{
type: 'marker',
- message: `Module "${getRelativePath(resolvedImporter)}" is marked server-only but is imported in the client environment`,
+ message:
+ markerKind === 'server'
+ ? `Module "${getRelativePath(normalizedImporter)}" is marked server-only but is imported in the client environment`
+ : `Module "${getRelativePath(normalizedImporter)}" is marked client-only but is imported in the server environment`,
},
)
- handleViolation.call(this, env, info, {
- silent: isPreTransformResolve,
- })
- }
-
- // Return virtual empty module
- return resolveViteId(`${MARKER_PREFIX}server-only`)
- }
-
- if (config.markerSpecifiers.clientOnly.has(source)) {
- const resolvedImporter = normalizeFilePath(importer)
- const existing = shared.fileMarkerKind.get(resolvedImporter)
- if (existing && existing !== 'client') {
- this.error(
- `[import-protection] File "${getRelativePath(resolvedImporter)}" has both server-only and client-only markers. This is not allowed.`,
- )
- }
- shared.fileMarkerKind.set(resolvedImporter, 'client')
-
- if (envType === 'server') {
- const info = await buildViolationInfo(
- provider,
+ await reportOrDeferViolation(
+ this,
env,
- envName,
- envType,
- importer,
- resolvedImporter,
- source,
- {
- type: 'marker',
- message: `Module "${getRelativePath(resolvedImporter)}" is marked client-only but is imported in the server environment`,
- },
+ normalizedImporter,
+ info,
+ shouldDefer,
+ isPreTransformResolve,
)
- handleViolation.call(this, env, info, {
- silent: isPreTransformResolve,
- })
}
- return resolveViteId(`${MARKER_PREFIX}client-only`)
+ return markerKind === 'server'
+ ? RESOLVED_MARKER_SERVER_ONLY
+ : RESOLVED_MARKER_CLIENT_ONLY
}
// Check if the importer is within our scope
- const normalizedImporter = normalizeFilePath(importer)
if (!shouldCheckImporter(normalizedImporter)) {
return undefined
}
const matchers = getRulesForEnvironment(envName)
- // 1. Specifier-based denial (fast, no resolution needed)
+ // 1. Specifier-based denial
const specifierMatch = matchesAny(source, matchers.specifiers)
if (specifierMatch) {
env.graph.addEdge(source, normalizedImporter, source)
@@ -856,46 +1242,48 @@ export function importProtectionPlugin(
{
type: 'specifier',
pattern: specifierMatch.pattern,
- message: `Import "${source}" is denied in the "${envName}" environment`,
+ message: `Import "${source}" is denied in the ${envType} environment`,
},
)
- return handleViolation.call(this, env, info, {
- silent: isPreTransformResolve,
- })
+ return reportOrDeferViolation(
+ this,
+ env,
+ normalizedImporter,
+ info,
+ shouldDefer,
+ isPreTransformResolve,
+ )
}
- // 2. Resolve the import (cached) — needed for file-based denial,
- // marker checks, and graph edge tracking.
+ // 2. Resolve the import (cached)
const cacheKey = `${normalizedImporter}:${source}`
let resolved: string | null
if (env.resolveCache.has(cacheKey)) {
- resolved = env.resolveCache.get(cacheKey) || null
+ resolved = env.resolveCache.get(cacheKey) ?? null
} else {
const result = await this.resolve(source, importer, {
skipSelf: true,
})
resolved = result ? normalizeFilePath(result.id) : null
env.resolveCache.set(cacheKey, resolved)
-
- // Maintain reverse index for O(1) hotUpdate invalidation.
- // Index by the importer so that when a file changes, all resolve
- // cache entries where it was the importer are cleared.
- let fileKeys = env.resolveCacheByFile.get(normalizedImporter)
- if (!fileKeys) {
- fileKeys = new Set()
- env.resolveCacheByFile.set(normalizedImporter, fileKeys)
- }
- fileKeys.add(cacheKey)
+ getOrCreate(
+ env.resolveCacheByFile,
+ normalizedImporter,
+ () => new Set(),
+ ).add(cacheKey)
}
if (resolved) {
const relativePath = getRelativePath(resolved)
- // Always record the edge for trace building, even when not denied.
+ // Propagate pre-transform status transitively
+ if (isPreTransformResolve && !isScanResolve) {
+ env.serverFnLookupModules.add(resolved)
+ }
+
env.graph.addEdge(resolved, normalizedImporter, source)
- // File-based denial check
const fileMatch =
matchers.files.length > 0
? matchesAny(relativePath, matchers.files)
@@ -914,17 +1302,20 @@ export function importProtectionPlugin(
type: 'file',
pattern: fileMatch.pattern,
resolved,
- message: `Import "${source}" (resolved to "${relativePath}") is denied in the "${envName}" environment`,
+ message: `Import "${source}" (resolved to "${relativePath}") is denied in the ${envType} environment`,
},
)
- return handleViolation.call(this, env, info, {
- silent: isPreTransformResolve,
- })
+ return reportOrDeferViolation(
+ this,
+ env,
+ normalizedImporter,
+ info,
+ shouldDefer,
+ isPreTransformResolve,
+ )
}
- // Marker restrictions apply regardless of explicit deny rules.
- const markerRes = await maybeReportMarkerViolationFromResolvedImport(
- this,
+ const markerInfo = await buildMarkerViolationFromResolvedImport(
provider,
env,
envName,
@@ -933,10 +1324,16 @@ export function importProtectionPlugin(
source,
resolved,
relativePath,
- { silent: isPreTransformResolve },
)
- if (markerRes !== undefined) {
- return markerRes
+ if (markerInfo) {
+ return reportOrDeferViolation(
+ this,
+ env,
+ normalizedImporter,
+ markerInfo,
+ shouldDefer,
+ isPreTransformResolve,
+ )
}
}
@@ -946,11 +1343,26 @@ export function importProtectionPlugin(
load: {
filter: {
id: new RegExp(
- `(${RESOLVED_MOCK_MODULE_ID.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}|${RESOLVED_MARKER_PREFIX.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}|${RESOLVED_MOCK_EDGE_PREFIX.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}|${RESOLVED_MOCK_RUNTIME_PREFIX.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')})`,
+ [
+ RESOLVED_MOCK_MODULE_ID,
+ RESOLVED_MARKER_PREFIX,
+ RESOLVED_MOCK_EDGE_PREFIX,
+ RESOLVED_MOCK_RUNTIME_PREFIX,
+ ]
+ .map(escapeRegExp)
+ .join('|'),
),
},
handler(id) {
- if (!config.enabled) return undefined
+ if (IMPORT_PROTECTION_DEBUG) {
+ if (matchesDebugFilter(id)) {
+ debugLog('load:handler', {
+ env: this.environment.name,
+ id: normalizePath(id),
+ })
+ }
+ }
+
if (id === RESOLVED_MOCK_MODULE_ID) {
return loadSilentMockModule()
}
@@ -976,25 +1388,9 @@ export function importProtectionPlugin(
},
},
{
- // This plugin runs WITHOUT `enforce` so it executes after all
- // `enforce: 'pre'` transform hooks (including the Start compiler).
- // It captures the transformed code + composed sourcemap for every module
- // so that the `resolveId` hook (in the main plugin above) can look up
- // the importer's transform result and map violation locations back to
- // original source.
- //
- // Why not use `ctx.load()` in `resolveId`?
- // - Vite dev: `this.load()` returns a ModuleInfo proxy that throws on
- // `.code` access — code is not exposed.
- // - Rollup build: `ModuleInfo` has `.code` but NOT `.map`, so we
- // can't map generated positions back to original source.
- //
- // By caching in the transform hook we get both code and the composed
- // sourcemap that chains all the way back to the original file.
- //
- // Performance: only files under `srcDirectory` are cached because only
- // those can be importers in a violation. Third-party code in
- // node_modules is never checked.
+ // Captures transformed code + composed sourcemap for location mapping.
+ // Runs after all `enforce: 'pre'` hooks (including the Start compiler).
+ // Only files under `srcDirectory` are cached.
name: 'tanstack-start-core:import-protection-transform-cache',
applyToEnvironment(env) {
@@ -1008,33 +1404,32 @@ export function importProtectionPlugin(
include: [/\.[cm]?[tj]sx?($|\?)/],
},
},
- handler(code, id) {
- if (!config.enabled) return undefined
+ async handler(code, id) {
const envName = this.environment.name
const file = normalizeFilePath(id)
- // Only cache files that could ever be checked as an importer.
- // This reuses the same include/exclude/ignoreImporters predicate as
- // the main import-protection resolveId hook.
+ if (IMPORT_PROTECTION_DEBUG) {
+ if (matchesDebugFilter(file)) {
+ debugLog('transform-cache', {
+ env: envName,
+ id: normalizePath(id),
+ file,
+ })
+ }
+ }
+
if (!shouldCheckImporter(file)) {
return undefined
}
- // getCombinedSourcemap() returns the composed sourcemap of all
- // transform hooks that ran before this one. It includes
- // sourcesContent so we can extract original source later.
+ // getCombinedSourcemap() returns the composed sourcemap
let map: SourceMapLike | undefined
try {
map = this.getCombinedSourcemap()
} catch {
- // No sourcemap available (e.g. virtual modules or modules
- // that no prior plugin produced a map for).
map = undefined
}
- // Extract the original source from sourcesContent right here.
- // Composed sourcemaps can contain multiple sources; try to pick the
- // entry that best matches this importer.
let originalCode: string | undefined
if (map?.sourcesContent) {
originalCode = pickOriginalCodeFromSourcesContent(
@@ -1044,15 +1439,16 @@ export function importProtectionPlugin(
)
}
- // Precompute a line index for fast index->line/col conversions.
const lineIndex = buildLineIndex(code)
-
- // Key by the full normalized module ID including query params
- // (e.g. "src/routes/index.tsx?tsr-split=component") so that
- // virtual modules derived from the same physical file each get
- // their own cache entry.
const cacheKey = normalizePath(id)
+
const envState = getEnv(envName)
+
+ // Propagate SERVER_FN_LOOKUP status before import-analysis
+ if (id.includes(SERVER_FN_LOOKUP_QUERY)) {
+ envState.serverFnLookupModules.add(file)
+ }
+
envState.transformResultCache.set(cacheKey, {
code,
map,
@@ -1060,19 +1456,14 @@ export function importProtectionPlugin(
lineIndex,
})
- // Maintain reverse index so hotUpdate invalidation is O(keys for file).
- let keySet = envState.transformResultKeysByFile.get(file)
- if (!keySet) {
- keySet = new Set()
- envState.transformResultKeysByFile.set(file, keySet)
- }
+ const keySet = getOrCreate(
+ envState.transformResultKeysByFile,
+ file,
+ () => new Set(),
+ )
keySet.add(cacheKey)
- // Also store/update the stripped-path entry so that lookups by
- // physical file path (e.g. from trace steps in the import graph,
- // which normalize away query params) still find a result.
- // The last variant transformed wins, which is acceptable — trace
- // lookups are best-effort for line numbers.
+ // Also store stripped-path entry for physical-file lookups.
if (cacheKey !== file) {
envState.transformResultCache.set(file, {
code,
@@ -1083,7 +1474,29 @@ export function importProtectionPlugin(
keySet.add(file)
}
- // Return nothing — we don't modify the code.
+ // Resolve import sources to canonical paths for reachability checks.
+ const importSources = extractImportSources(code)
+ const resolvedChildren = new Set()
+ for (const src of importSources) {
+ try {
+ const resolved = await this.resolve(src, id, { skipSelf: true })
+ if (resolved && !resolved.external) {
+ const resolvedPath = normalizeFilePath(resolved.id)
+ resolvedChildren.add(resolvedPath)
+ // Populate import graph edges for warm-start trace accuracy
+ envState.graph.addEdge(resolvedPath, file, src)
+ }
+ } catch {
+ // Non-fatal
+ }
+ }
+ envState.postTransformImports.set(cacheKey, resolvedChildren)
+ if (cacheKey !== file) {
+ envState.postTransformImports.set(file, resolvedChildren)
+ }
+
+ await processPendingViolations(envState, this.warn.bind(this))
+
return undefined
},
},
@@ -1115,7 +1528,6 @@ export function importProtectionPlugin(
},
},
handler(code, id) {
- if (!config.enabled) return undefined
const envName = this.environment.name
const envState = envStates.get(envName)
if (!envState) return undefined
@@ -1141,73 +1553,4 @@ export function importProtectionPlugin(
},
},
] satisfies Array
-
- // ---------------------------------------------------------------------------
- // Violation handling
- // ---------------------------------------------------------------------------
-
- function handleViolation(
- this: { warn: (msg: string) => void; error: (msg: string) => never },
- env: EnvState,
- info: ViolationInfo,
- opts?: { silent?: boolean },
- ): { id: string; syntheticNamedExports: boolean } | string | undefined {
- const key = dedupeKey(
- info.type,
- info.importer,
- info.specifier,
- info.resolved,
- )
-
- if (!opts?.silent) {
- // Call user callback
- if (config.onViolation) {
- const result = config.onViolation(info)
- if (result === false) {
- return undefined
- }
- }
-
- const seen = hasSeen(env, key)
-
- if (config.effectiveBehavior === 'error') {
- if (!seen) this.error(formatViolation(info, config.root))
- return undefined
- }
-
- // Mock mode: log once, but always return the mock module.
- if (!seen) {
- this.warn(formatViolation(info, config.root))
- }
- } else {
- // Silent mode: in error behavior, skip entirely (no mock needed
- // for compiler-internal lookups); in mock mode, fall through to
- // return the mock module ID without logging.
- if (config.effectiveBehavior === 'error') {
- return undefined
- }
- }
-
- env.deniedSources.add(info.specifier)
- let edgeSet = env.deniedEdges.get(info.importer)
- if (!edgeSet) {
- edgeSet = new Set()
- env.deniedEdges.set(info.importer, edgeSet)
- }
- edgeSet.add(info.specifier)
-
- if (config.command === 'serve') {
- const runtimeId = mockRuntimeModuleIdFromViolation(
- info,
- config.mockAccess,
- config.root,
- )
- return resolveViteId(
- buildMockEdgeModuleId(env, info.importer, info.specifier, runtimeId),
- )
- }
-
- // Build: Rollup can synthesize named exports.
- return { id: RESOLVED_MOCK_MODULE_ID, syntheticNamedExports: true }
- }
}
diff --git a/packages/start-plugin-core/src/import-protection-plugin/postCompileUsage.ts b/packages/start-plugin-core/src/import-protection-plugin/postCompileUsage.ts
index 109ddfe25e4..8a298cfb83f 100644
--- a/packages/start-plugin-core/src/import-protection-plugin/postCompileUsage.ts
+++ b/packages/start-plugin-core/src/import-protection-plugin/postCompileUsage.ts
@@ -1,120 +1,15 @@
+import babel from '@babel/core'
import * as t from '@babel/types'
import { parseAst } from '@tanstack/router-utils'
-export type UsagePos = { line: number; column0: number }
-
-function collectPatternBindings(
- node: t.Node | null | undefined,
- out: Set,
-): void {
- if (!node) return
- if (t.isIdentifier(node)) {
- out.add(node.name)
- return
- }
- if (t.isRestElement(node)) {
- collectPatternBindings(node.argument, out)
- return
- }
- if (t.isAssignmentPattern(node)) {
- collectPatternBindings(node.left, out)
- return
- }
- if (t.isObjectPattern(node)) {
- for (const prop of node.properties) {
- if (t.isRestElement(prop)) {
- collectPatternBindings(prop.argument, out)
- } else if (t.isObjectProperty(prop)) {
- collectPatternBindings(prop.value as t.Node, out)
- }
- }
- return
- }
- if (t.isArrayPattern(node)) {
- for (const el of node.elements) {
- collectPatternBindings(el, out)
- }
- return
- }
-}
-
-function isBindingPosition(node: t.Node, parent: t.Node | null): boolean {
- if (!parent) return false
- if (t.isFunctionDeclaration(parent) && parent.id === node) return true
- if (t.isFunctionExpression(parent) && parent.id === node) return true
- if (t.isClassDeclaration(parent) && parent.id === node) return true
- if (t.isClassExpression(parent) && parent.id === node) return true
- if (t.isVariableDeclarator(parent) && parent.id === node) return true
- if (t.isImportSpecifier(parent) && parent.local === node) return true
- if (t.isImportDefaultSpecifier(parent) && parent.local === node) return true
- if (t.isImportNamespaceSpecifier(parent) && parent.local === node) return true
- if (
- t.isObjectProperty(parent) &&
- parent.key === node &&
- !parent.computed &&
- // In `{ foo }`, the identifier is also a value reference and must count as
- // usage. Babel represents this as `shorthand: true`.
- !parent.shorthand
- )
- return true
- if (t.isObjectMethod(parent) && parent.key === node && !parent.computed)
- return true
- if (t.isExportSpecifier(parent) && parent.exported === node) return true
- return false
-}
-
-function isPreferredUsage(node: t.Node, parent: t.Node | null): boolean {
- if (!parent) return false
- if (t.isCallExpression(parent) && parent.callee === node) return true
- if (t.isNewExpression(parent) && parent.callee === node) return true
- if (t.isMemberExpression(parent) && parent.object === node) return true
- return false
-}
-
-function isScopeNode(node: t.Node): boolean {
- return (
- t.isProgram(node) ||
- t.isFunctionDeclaration(node) ||
- t.isFunctionExpression(node) ||
- t.isArrowFunctionExpression(node) ||
- t.isBlockStatement(node) ||
- t.isCatchClause(node)
- )
-}
-
-/** `var` hoists to the nearest function or program scope, not block scopes. */
-function isFunctionScopeNode(node: t.Node): boolean {
- return (
- t.isProgram(node) ||
- t.isFunctionDeclaration(node) ||
- t.isFunctionExpression(node) ||
- t.isArrowFunctionExpression(node)
- )
-}
-
-function collectScopeBindings(node: t.Node, out: Set): void {
- if (
- t.isFunctionDeclaration(node) ||
- t.isFunctionExpression(node) ||
- t.isArrowFunctionExpression(node)
- ) {
- for (const p of node.params) {
- collectPatternBindings(p, out)
- }
- return
- }
-
- if (t.isCatchClause(node)) {
- collectPatternBindings(node.param, out)
- return
- }
-}
+type UsagePos = { line: number; column0: number }
/**
* Given transformed code, returns the first "meaningful" usage position for an
* import from `source` that survives compilation.
*
- * The returned column is 0-based (Babel loc semantics).
+ * "Preferred" positions (call, new, member-access) take priority over bare
+ * identifier references. The returned column is 0-based (Babel loc semantics).
*/
export function findPostCompileUsagePos(
code: string,
@@ -122,7 +17,7 @@ export function findPostCompileUsagePos(
): UsagePos | undefined {
const ast = parseAst({ code })
- // 1) Determine local names bound from this specifier
+ // Collect local names bound from this specifier
const imported = new Set()
for (const node of ast.program.body) {
if (t.isImportDeclaration(node) && node.source.value === source) {
@@ -138,129 +33,62 @@ export function findPostCompileUsagePos(
let preferred: UsagePos | undefined
let anyUsage: UsagePos | undefined
- // Scope stack (module scope at index 0).
- // Each entry tracks bindings and whether it is a function/program scope
- // (needed for `var` hoisting).
- interface ScopeEntry {
- bindings: Set
- isFnScope: boolean
- }
- const scopes: Array = [{ bindings: new Set(), isFnScope: true }]
-
- function isShadowed(name: string): boolean {
- // Check inner scopes only
- for (let i = scopes.length - 1; i >= 1; i--) {
- if (scopes[i]!.bindings.has(name)) return true
- }
- return false
- }
-
- function record(node: t.Node, kind: 'preferred' | 'any') {
- const loc = node.loc?.start
- if (!loc) return
- const pos: UsagePos = { line: loc.line, column0: loc.column }
- if (kind === 'preferred') {
- preferred ||= pos
- } else {
- anyUsage ||= pos
- }
- }
-
- function pushScope(node: t.Node): void {
- const bindings = new Set()
- collectScopeBindings(node, bindings)
- scopes.push({ bindings, isFnScope: isFunctionScopeNode(node) })
- }
-
- function popScope(): void {
- scopes.pop()
- }
-
- /** Find the nearest function/program scope entry in the stack. */
- function nearestFnScope(): ScopeEntry {
- for (let i = scopes.length - 1; i >= 0; i--) {
- if (scopes[i]!.isFnScope) return scopes[i]!
- }
- // Should never happen (index 0 is always a function scope).
- return scopes[0]!
- }
-
- // The walker accepts AST nodes, arrays (from node children like
- // `body`, `params`, etc.), or null/undefined for optional children.
- type Walkable =
- | t.Node
- | ReadonlyArray
- | null
- | undefined
-
- function walk(node: Walkable, parent: t.Node | null) {
- if (!node) return
- if (preferred && anyUsage) return
-
- if (Array.isArray(node)) {
- for (const n of node) walk(n, parent)
- return
- }
-
- // After the array check + early return, node is guaranteed to be t.Node.
- // TypeScript doesn't narrow ReadonlyArray from the union, so we assert.
- const astNode = node as t.Node
-
- // Skip import declarations entirely
- if (t.isImportDeclaration(astNode)) return
-
- const enterScope = isScopeNode(astNode)
- if (enterScope) {
- pushScope(astNode)
- }
-
- // Add lexical bindings for variable declarations and class/function decls.
- // Note: function/class *declaration* identifiers bind in the parent scope,
- // so we register them before walking children.
- if (t.isFunctionDeclaration(astNode) && astNode.id) {
- scopes[scopes.length - 2]?.bindings.add(astNode.id.name)
- }
- if (t.isClassDeclaration(astNode) && astNode.id) {
- scopes[scopes.length - 2]?.bindings.add(astNode.id.name)
- }
- if (t.isVariableDeclarator(astNode)) {
- // `var` hoists to the nearest function/program scope, not block scope.
- const isVar = t.isVariableDeclaration(parent) && parent.kind === 'var'
- const target = isVar
- ? nearestFnScope().bindings
- : scopes[scopes.length - 1]!.bindings
- collectPatternBindings(astNode.id, target)
- }
+ // babel.traverse can throw on malformed scopes (e.g. duplicate bindings from
+ // import + const re-declaration) because parseAst doesn't attach a hub
+ try {
+ babel.traverse(ast, {
+ ImportDeclaration(path) {
+ path.skip()
+ },
+
+ Identifier(path: babel.NodePath) {
+ if (preferred && anyUsage) {
+ path.stop()
+ return
+ }
- if (t.isIdentifier(astNode) && imported.has(astNode.name)) {
- if (!isBindingPosition(astNode, parent) && !isShadowed(astNode.name)) {
- if (isPreferredUsage(astNode, parent)) {
- record(astNode, 'preferred')
+ const { node, parent, scope } = path
+ if (!imported.has(node.name)) return
+
+ // Skip binding positions (declarations, import specifiers, etc.)
+ if (path.isBindingIdentifier()) return
+
+ // Skip non-shorthand object property keys — they don't reference the import
+ if (
+ t.isObjectProperty(parent) &&
+ parent.key === node &&
+ !parent.computed &&
+ !parent.shorthand
+ )
+ return
+ if (t.isObjectMethod(parent) && parent.key === node && !parent.computed)
+ return
+ if (t.isExportSpecifier(parent) && parent.exported === node) return
+
+ // Skip if shadowed by a closer binding
+ const binding = scope.getBinding(node.name)
+ if (binding && binding.kind !== 'module') return
+
+ const loc = node.loc?.start
+ if (!loc) return
+ const pos: UsagePos = { line: loc.line, column0: loc.column }
+
+ const isPreferred =
+ (t.isCallExpression(parent) && parent.callee === node) ||
+ (t.isNewExpression(parent) && parent.callee === node) ||
+ (t.isMemberExpression(parent) && parent.object === node)
+
+ if (isPreferred) {
+ preferred ||= pos
} else {
- record(astNode, 'any')
+ anyUsage ||= pos
}
- }
- }
-
- // Iterate child properties of this AST node. We use a Record cast since
- // Babel node types don't expose an index signature, but we need to walk
- // all child properties generically.
- const record_ = astNode as unknown as Record
- for (const key of Object.keys(record_)) {
- const value = record_[key]
- if (!value) continue
- if (key === 'loc' || key === 'start' || key === 'end') continue
- if (key === 'parent') continue
- if (typeof value === 'string' || typeof value === 'number') continue
- walk(value as Walkable, astNode)
- if (preferred && anyUsage) break
- }
-
- if (enterScope) {
- popScope()
- }
+ },
+ })
+ } catch {
+ // Scope analysis failed — cannot determine usage positions reliably
+ return undefined
}
- walk(ast.program, null)
return preferred ?? anyUsage
}
diff --git a/packages/start-plugin-core/src/import-protection-plugin/rewriteDeniedImports.ts b/packages/start-plugin-core/src/import-protection-plugin/rewriteDeniedImports.ts
index 895c939094b..cba0f435596 100644
--- a/packages/start-plugin-core/src/import-protection-plugin/rewriteDeniedImports.ts
+++ b/packages/start-plugin-core/src/import-protection-plugin/rewriteDeniedImports.ts
@@ -2,14 +2,36 @@ import * as t from '@babel/types'
import { generateFromAst, parseAst } from '@tanstack/router-utils'
import { MOCK_MODULE_ID } from './virtualModules'
-
-// ---------------------------------------------------------------------------
-// Export name collection (for dev mock-edge modules)
-// ---------------------------------------------------------------------------
+import { getOrCreate } from './utils'
export function isValidExportName(name: string): boolean {
- if (name === 'default') return false
- return /^[A-Za-z_$][A-Za-z0-9_$]*$/.test(name)
+ if (name === 'default' || name.length === 0) return false
+ const first = name.charCodeAt(0)
+ // First char: A-Z (65-90), a-z (97-122), _ (95), $ (36)
+ if (
+ !(
+ (first >= 65 && first <= 90) ||
+ (first >= 97 && first <= 122) ||
+ first === 95 ||
+ first === 36
+ )
+ )
+ return false
+ for (let i = 1; i < name.length; i++) {
+ const ch = name.charCodeAt(i)
+ // Subsequent: A-Z, a-z, 0-9 (48-57), _, $
+ if (
+ !(
+ (ch >= 65 && ch <= 90) ||
+ (ch >= 97 && ch <= 122) ||
+ (ch >= 48 && ch <= 57) ||
+ ch === 95 ||
+ ch === 36
+ )
+ )
+ return false
+ }
+ return true
}
/**
@@ -23,13 +45,8 @@ export function collectMockExportNamesBySource(
const namesBySource = new Map>()
const add = (source: string, name: string) => {
- if (!isValidExportName(name)) return
- let set = namesBySource.get(source)
- if (!set) {
- set = new Set()
- namesBySource.set(source, set)
- }
- set.add(name)
+ if (name === 'default' || name.length === 0) return
+ getOrCreate(namesBySource, source, () => new Set()).add(name)
}
for (const node of ast.program.body) {
@@ -66,10 +83,6 @@ export function collectMockExportNamesBySource(
return out
}
-// ---------------------------------------------------------------------------
-// AST-based import rewriting
-// ---------------------------------------------------------------------------
-
/**
* Rewrite static imports/re-exports from denied sources using Babel AST transforms.
*
@@ -101,16 +114,13 @@ export function rewriteDeniedImports(
for (let i = ast.program.body.length - 1; i >= 0; i--) {
const node = ast.program.body[i]!
- // --- import declarations ---
if (t.isImportDeclaration(node)) {
- // Skip type-only imports
if (node.importKind === 'type') continue
if (!deniedSources.has(node.source.value)) continue
const mockVar = `__tss_deny_${mockCounter++}`
const replacements: Array = []
- // import __tss_deny_N from ''
replacements.push(
t.importDeclaration(
[t.importDefaultSpecifier(t.identifier(mockVar))],
@@ -119,18 +129,10 @@ export function rewriteDeniedImports(
)
for (const specifier of node.specifiers) {
- if (t.isImportDefaultSpecifier(specifier)) {
- // import def from 'denied' -> const def = __tss_deny_N
- replacements.push(
- t.variableDeclaration('const', [
- t.variableDeclarator(
- t.identifier(specifier.local.name),
- t.identifier(mockVar),
- ),
- ]),
- )
- } else if (t.isImportNamespaceSpecifier(specifier)) {
- // import * as ns from 'denied' -> const ns = __tss_deny_N
+ if (
+ t.isImportDefaultSpecifier(specifier) ||
+ t.isImportNamespaceSpecifier(specifier)
+ ) {
replacements.push(
t.variableDeclaration('const', [
t.variableDeclarator(
@@ -140,9 +142,7 @@ export function rewriteDeniedImports(
]),
)
} else if (t.isImportSpecifier(specifier)) {
- // Skip type-only specifiers
if (specifier.importKind === 'type') continue
- // import { a as b } from 'denied' -> const b = __tss_deny_N.a
const importedName = t.isIdentifier(specifier.imported)
? specifier.imported.name
: specifier.imported.value
@@ -165,7 +165,6 @@ export function rewriteDeniedImports(
continue
}
- // --- export { x } from 'denied' ---
if (t.isExportNamedDeclaration(node) && node.source) {
if (node.exportKind === 'type') continue
if (!deniedSources.has(node.source.value)) continue
@@ -173,15 +172,12 @@ export function rewriteDeniedImports(
const mockVar = `__tss_deny_${mockCounter++}`
const replacements: Array = []
- // import __tss_deny_N from ''
replacements.push(
t.importDeclaration(
[t.importDefaultSpecifier(t.identifier(mockVar))],
t.stringLiteral(getMockModuleId(node.source.value)),
),
)
-
- // For each re-exported specifier, create an exported const
const exportSpecifiers: Array<{
localName: string
exportedName: string
@@ -195,7 +191,6 @@ export function rewriteDeniedImports(
: specifier.exported.value
const internalVar = `__tss_reexport_${localName}`
- // const __tss_reexport_x = __tss_deny_N.x
replacements.push(
t.variableDeclaration('const', [
t.variableDeclarator(
@@ -211,7 +206,6 @@ export function rewriteDeniedImports(
}
}
- // export { __tss_reexport_x as x, ... }
if (exportSpecifiers.length > 0) {
replacements.push(
t.exportNamedDeclaration(
@@ -231,12 +225,10 @@ export function rewriteDeniedImports(
continue
}
- // --- export * from 'denied' ---
if (t.isExportAllDeclaration(node)) {
if (node.exportKind === 'type') continue
if (!deniedSources.has(node.source.value)) continue
- // Remove the star re-export entirely
ast.program.body.splice(i, 1)
modified = true
continue
diff --git a/packages/start-plugin-core/src/import-protection-plugin/sourceLocation.ts b/packages/start-plugin-core/src/import-protection-plugin/sourceLocation.ts
index a4047cea2de..aef2f3e2e3f 100644
--- a/packages/start-plugin-core/src/import-protection-plugin/sourceLocation.ts
+++ b/packages/start-plugin-core/src/import-protection-plugin/sourceLocation.ts
@@ -1,22 +1,15 @@
import { SourceMapConsumer } from 'source-map'
import * as path from 'pathe'
-import { normalizeFilePath } from './utils'
+import { escapeRegExp, getOrCreate, normalizeFilePath } from './utils'
import type { Loc } from './trace'
import type { RawSourceMap } from 'source-map'
-// ---------------------------------------------------------------------------
// Source-map type compatible with both Rollup's SourceMap and source-map's
-// RawSourceMap. We define our own structural type so that the value returned
-// by `getCombinedSourcemap()` (version: number) flows seamlessly into
-// `SourceMapConsumer` (version: string) without requiring a cast.
-// ---------------------------------------------------------------------------
+// RawSourceMap. Structural type avoids version: number vs string mismatch.
/**
* Minimal source-map shape used throughout the import-protection plugin.
- *
- * Structurally compatible with both Rollup's `SourceMap` (version: number)
- * and the `source-map` package's `RawSourceMap` (version: string).
*/
export interface SourceMapLike {
file?: string
@@ -28,21 +21,7 @@ export interface SourceMapLike {
mappings: string
}
-// ---------------------------------------------------------------------------
// Transform result provider (replaces ctx.load() which doesn't work in dev)
-// ---------------------------------------------------------------------------
-
-/**
- * A cached transform result for a single module.
- *
- * - `code` – fully-transformed source (after all plugins).
- * - `map` – composed sourcemap (chains back to the original file).
- * - `originalCode` – the untransformed source, extracted from the
- * sourcemap's `sourcesContent[0]` during the transform
- * hook. Used by {@link buildCodeSnippet} so we never
- * have to re-derive it via a flaky `sourceContentFor`
- * lookup at display time.
- */
export interface TransformResult {
code: string
map: SourceMapLike | undefined
@@ -54,29 +33,14 @@ export interface TransformResult {
/**
* Provides the transformed code and composed sourcemap for a module.
*
- * During `resolveId`, Vite's `this.load()` does NOT return code/map in dev
- * mode (the ModuleInfo proxy throws on `.code` access). Even in build mode,
- * Rollup's `ModuleInfo` has `.code` but not `.map`.
- *
- * Instead, we populate this cache from a late-running transform hook that
- * stores `{ code, map, originalCode }` for every module as it passes through
- * the pipeline. By the time `resolveId` fires for an import, the importer
- * has already been fully transformed, so the cache always has the data we
- * need.
- *
- * The `id` parameter is the **raw** module ID (may include Vite query
- * parameters like `?tsr-split=component`). Implementations should look up
- * with the full ID first, then fall back to the query-stripped path so that
- * virtual-module variants are resolved correctly without losing the base-file
- * fallback.
+ * Populated from a late-running transform hook. By the time `resolveId`
+ * fires for an import, the importer has already been fully transformed.
*/
export interface TransformResultProvider {
getTransformResult: (id: string) => TransformResult | undefined
}
-// ---------------------------------------------------------------------------
// Index → line/column conversion
-// ---------------------------------------------------------------------------
export type LineIndex = {
offsets: Array
@@ -107,53 +71,18 @@ function indexToLineColWithIndex(
lineIndex: LineIndex,
idx: number,
): { line: number; column0: number } {
- let line = 1
-
const offsets = lineIndex.offsets
const ub = upperBound(offsets, idx)
const lineIdx = Math.max(0, ub - 1)
- line = lineIdx + 1
+ const line = lineIdx + 1
const lineStart = offsets[lineIdx] ?? 0
return { line, column0: Math.max(0, idx - lineStart) }
}
-// ---------------------------------------------------------------------------
-// Pick the best original source from sourcesContent
-// ---------------------------------------------------------------------------
-
-function suffixSegmentScore(a: string, b: string): number {
- const aSeg = a.split('/').filter(Boolean)
- const bSeg = b.split('/').filter(Boolean)
- let score = 0
- for (
- let i = aSeg.length - 1, j = bSeg.length - 1;
- i >= 0 && j >= 0;
- i--, j--
- ) {
- if (aSeg[i] !== bSeg[j]) break
- score++
- }
- return score
-}
-
-function normalizeSourceCandidate(
- source: string,
- root: string,
- sourceRoot: string | undefined,
-): string {
- // Prefer resolving relative source paths against root/sourceRoot when present.
- if (!source) return ''
- if (path.isAbsolute(source)) return normalizeFilePath(source)
- const base = sourceRoot ? path.resolve(root, sourceRoot) : root
- return normalizeFilePath(path.resolve(base, source))
-}
-
/**
- * Pick the most-likely original source text for `importerFile`.
- *
- * Sourcemaps can contain multiple sources (composed maps), so `sourcesContent[0]`
- * is not guaranteed to represent the importer.
+ * Pick the most-likely original source text for `importerFile` from
+ * a sourcemap that may contain multiple sources.
*/
export function pickOriginalCodeFromSourcesContent(
map: SourceMapLike | undefined,
@@ -166,6 +95,9 @@ export function pickOriginalCodeFromSourcesContent(
const file = normalizeFilePath(importerFile)
const sourceRoot = map.sourceRoot
+ const fileSeg = file.split('/').filter(Boolean)
+
+ const resolveBase = sourceRoot ? path.resolve(root, sourceRoot) : root
let bestIdx = -1
let bestScore = -1
@@ -176,21 +108,32 @@ export function pickOriginalCodeFromSourcesContent(
const src = map.sources[i] ?? ''
- // Exact match via raw normalized source.
const normalizedSrc = normalizeFilePath(src)
if (normalizedSrc === file) {
return content
}
- // Exact match via resolved absolute candidate.
- const resolved = normalizeSourceCandidate(src, root, sourceRoot)
+ let resolved: string
+ if (!src) {
+ resolved = ''
+ } else if (path.isAbsolute(src)) {
+ resolved = normalizeFilePath(src)
+ } else {
+ resolved = normalizeFilePath(path.resolve(resolveBase, src))
+ }
if (resolved === file) {
return content
}
+ // Count matching path segments from the end.
+ const normalizedSrcSeg = normalizedSrc.split('/').filter(Boolean)
+ const resolvedSeg =
+ resolved !== normalizedSrc
+ ? resolved.split('/').filter(Boolean)
+ : normalizedSrcSeg
const score = Math.max(
- suffixSegmentScore(normalizedSrc, file),
- suffixSegmentScore(resolved, file),
+ segmentSuffixScore(normalizedSrcSeg, fileSeg),
+ segmentSuffixScore(resolvedSeg, fileSeg),
)
if (score > bestScore) {
@@ -199,21 +142,28 @@ export function pickOriginalCodeFromSourcesContent(
}
}
- // Require at least a basename match; otherwise fall back to index 0.
if (bestIdx !== -1 && bestScore >= 1) {
- const best = map.sourcesContent[bestIdx]
- return typeof best === 'string' ? best : undefined
+ return map.sourcesContent[bestIdx] ?? undefined
}
- const fallback = map.sourcesContent[0]
- return typeof fallback === 'string' ? fallback : undefined
+ return map.sourcesContent[0] ?? undefined
}
-// ---------------------------------------------------------------------------
-// Sourcemap: generated → original mapping
-// ---------------------------------------------------------------------------
+/** Count matching path segments from the end of `aSeg` against `bSeg`. */
+function segmentSuffixScore(aSeg: Array, bSeg: Array): number {
+ let score = 0
+ for (
+ let i = aSeg.length - 1, j = bSeg.length - 1;
+ i >= 0 && j >= 0;
+ i--, j--
+ ) {
+ if (aSeg[i] !== bSeg[j]) break
+ score++
+ }
+ return score
+}
-export async function mapGeneratedToOriginal(
+async function mapGeneratedToOriginal(
map: SourceMapLike | undefined,
generated: { line: number; column0: number },
fallbackFile: string,
@@ -244,28 +194,32 @@ export async function mapGeneratedToOriginal(
}
}
} catch {
- // Invalid or malformed sourcemap — fall through to fallback.
+ // Malformed sourcemap
}
return fallback
}
-// Cache SourceMapConsumer per sourcemap object.
const consumerCache = new WeakMap