diff --git a/benchmark/cluster/echo.js b/benchmark/cluster/echo.js index 71ded75c9d2572..a3f103bc0b8987 100644 --- a/benchmark/cluster/echo.js +++ b/benchmark/cluster/echo.js @@ -8,7 +8,7 @@ if (cluster.isPrimary) { payload: ['string', 'object'], sendsPerBroadcast: [1, 10], serialization: ['json', 'advanced'], - n: [1e5], + n: [1e3], }); function main({ diff --git a/doc/api/cli.md b/doc/api/cli.md index 5e61ba21283697..35620859d64a9f 100644 --- a/doc/api/cli.md +++ b/doc/api/cli.md @@ -3254,6 +3254,11 @@ added: v22.1.0 Enable the [module compile cache][] for the Node.js instance. See the documentation of [module compile cache][] for details. +### `NODE_COMPILE_CACHE_PORTABLE=1` + +When set to 1, the [module compile cache][] can be reused across different directory +locations as long as the module layout relative to the cache directory remains the same. + ### `NODE_DEBUG=module[,…]` + +Type: Documentation-only + +Allowing a [`fs.Dir`][] object to be closed on garbage collection is +deprecated. In the future, doing so might result in a thrown error that will +terminate the process. + +Please ensure that all `fs.Dir` objects are explicitly closed using +`Dir.prototype.close()` or `using` keyword: + +```mjs +import { opendir } from 'node:fs/promises'; + +{ + await using dir = await opendir('/async/disposable/directory'); +} // Closed by dir[Symbol.asyncDispose]() + +{ + using dir = await opendir('/sync/disposable/directory'); +} // Closed by dir[Symbol.dispose]() + +{ + const dir = await opendir('/unconditionally/iterated/directory'); + for await (const entry of dir) { + // process an entry + } // Closed by iterator +} + +{ + let dir; + try { + dir = await opendir('/legacy/closeable/directory'); + } finally { + await dir?.close(); + } +} +``` + [DEP0142]: #dep0142-repl_builtinlibs [NIST SP 800-38D]: https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-38d.pdf [RFC 6066]: https://tools.ietf.org/html/rfc6066#section-3 @@ -4083,6 +4129,7 @@ an internal nodejs implementation rather than a public facing API, use `node:htt [`ecdh.setPublicKey()`]: crypto.md#ecdhsetpublickeypublickey-encoding [`emitter.listenerCount(eventName)`]: events.md#emitterlistenercounteventname-listener [`events.listenerCount(emitter, eventName)`]: events.md#eventslistenercountemitter-eventname +[`fs.Dir`]: fs.md#class-fsdir [`fs.FileHandle`]: fs.md#class-filehandle [`fs.access()`]: fs.md#fsaccesspath-mode-callback [`fs.appendFile()`]: fs.md#fsappendfilepath-data-options-callback diff --git a/doc/api/module.md b/doc/api/module.md index f1e55c733ea4cc..a2c7a4be7901c3 100644 --- a/doc/api/module.md +++ b/doc/api/module.md @@ -399,6 +399,28 @@ the [`NODE_COMPILE_CACHE=dir`][] environment variable if it's set, or defaults to `path.join(os.tmpdir(), 'node-compile-cache')` otherwise. To locate the compile cache directory used by a running Node.js instance, use [`module.getCompileCacheDir()`][]. +By default, caches are invalidated when the absolute paths of the modules being +cached are changed. To keep the cache working after moving the +project directory, enable portable compile cache. This allows previously compiled +modules to be reused across different directory locations as long as the layout relative +to the cache directory remains the same. This would be done on a best-effort basis. If +Node.js cannot compute the location of a module relative to the cache directory, the module +will not be cached. + +There are two ways to enable the portable mode: + +1. Using the portable option in module.enableCompileCache(): + + ```js + // Non-portable cache (default): cache breaks if project is moved + module.enableCompileCache({ path: '/path/to/cache/storage/dir' }); + + // Portable cache: cache works after the project is moved + module.enableCompileCache({ path: '/path/to/cache/storage/dir', portable: true }); + ``` + +2. Setting the environment variable: [`NODE_COMPILE_CACHE_PORTABLE=1`][] + Currently when using the compile cache with [V8 JavaScript code coverage][], the coverage being collected by V8 may be less precise in functions that are deserialized from the code cache. It's recommended to turn this off when @@ -1789,6 +1811,7 @@ returned object contains the following keys: [`--import`]: cli.md#--importmodule [`--require`]: cli.md#-r---require-module [`NODE_COMPILE_CACHE=dir`]: cli.md#node_compile_cachedir +[`NODE_COMPILE_CACHE_PORTABLE=1`]: cli.md#node_compile_cache_portable1 [`NODE_DISABLE_COMPILE_CACHE=1`]: cli.md#node_disable_compile_cache1 [`NODE_V8_COVERAGE=dir`]: cli.md#node_v8_coveragedir [`SourceMap`]: #class-modulesourcemap diff --git a/doc/node.1 b/doc/node.1 index 6210cbf42b26d4..f672b6a3cdaf09 100644 --- a/doc/node.1 +++ b/doc/node.1 @@ -719,6 +719,13 @@ Enable the .Sy module compile cache for the Node.js instance. . +.It Ev NODE_COMPILE_CACHE_PORTABLE +When set to '1' or 'true', the +.Sy module compile cache +will be hit as long as the location of the modules relative to the cache directory remain +consistent. This can be used in conjunction with .Ev NODE_COMPILE_CACHE +to enable portable on-disk caching. +. .It Ev NODE_DEBUG Ar modules... Comma-separated list of core modules that should print debug information. . diff --git a/lib/internal/crypto/aes.js b/lib/internal/crypto/aes.js index 0abffe85c9881b..0474060d394c99 100644 --- a/lib/internal/crypto/aes.js +++ b/lib/internal/crypto/aes.js @@ -5,7 +5,6 @@ const { ArrayBufferPrototypeSlice, ArrayFrom, ArrayPrototypePush, - PromiseReject, SafeSet, TypedArrayPrototypeSlice, } = primordials; @@ -144,7 +143,7 @@ function asyncAesKwCipher(mode, key, data) { getVariant('AES-KW', key[kAlgorithm].length))); } -function asyncAesGcmCipher(mode, key, data, algorithm) { +async function asyncAesGcmCipher(mode, key, data, algorithm) { const { tagLength = 128 } = algorithm; const tagByteLength = tagLength / 8; @@ -160,9 +159,9 @@ function asyncAesGcmCipher(mode, key, data, algorithm) { // > If *plaintext* has a length less than *tagLength* bits, then `throw` // > an `OperationError`. if (tagByteLength > tag.byteLength) { - return PromiseReject(lazyDOMException( + throw lazyDOMException( 'The provided data is too small.', - 'OperationError')); + 'OperationError'); } data = slice(data, 0, -tagByteLength); @@ -173,7 +172,7 @@ function asyncAesGcmCipher(mode, key, data, algorithm) { break; } - return jobPromise(() => new AESCipherJob( + return await jobPromise(() => new AESCipherJob( kCryptoJobAsync, mode, key[kKeyObject][kHandle], @@ -184,7 +183,7 @@ function asyncAesGcmCipher(mode, key, data, algorithm) { algorithm.additionalData)); } -function asyncAesOcbCipher(mode, key, data, algorithm) { +async function asyncAesOcbCipher(mode, key, data, algorithm) { const { tagLength = 128 } = algorithm; const tagByteLength = tagLength / 8; @@ -197,9 +196,9 @@ function asyncAesOcbCipher(mode, key, data, algorithm) { // Similar to GCM, OCB requires the tag to be present for decryption if (tagByteLength > tag.byteLength) { - return PromiseReject(lazyDOMException( + throw lazyDOMException( 'The provided data is too small.', - 'OperationError')); + 'OperationError'); } data = slice(data, 0, -tagByteLength); @@ -210,7 +209,7 @@ function asyncAesOcbCipher(mode, key, data, algorithm) { break; } - return jobPromise(() => new AESCipherJob( + return await jobPromise(() => new AESCipherJob( kCryptoJobAsync, mode, key[kKeyObject][kHandle], @@ -245,12 +244,15 @@ async function aesGenerateKey(algorithm, extractable, keyUsages) { 'SyntaxError'); } - const key = await generateKey('aes', { length }).catch((err) => { + let key; + try { + key = await generateKey('aes', { length }); + } catch (err) { throw lazyDOMException( 'The operation failed for an operation-specific reason' + `[${err.message}]`, { name: 'OperationError', cause: err }); - }); + } return new InternalCryptoKey( key, diff --git a/lib/internal/crypto/cfrg.js b/lib/internal/crypto/cfrg.js index 9380e2a3e746ac..c5bbaae90cf595 100644 --- a/lib/internal/crypto/cfrg.js +++ b/lib/internal/crypto/cfrg.js @@ -149,11 +149,14 @@ async function cfrgGenerateKey(algorithm, extractable, keyUsages) { break; } - const keyPair = await generateKeyPair(genKeyType).catch((err) => { + let keyPair; + try { + keyPair = await generateKeyPair(genKeyType); + } catch (err) { throw lazyDOMException( 'The operation failed for an operation-specific reason', { name: 'OperationError', cause: err }); - }); + } let publicUsages; let privateUsages; @@ -340,14 +343,14 @@ function cfrgImportKey( extractable); } -function eddsaSignVerify(key, data, algorithm, signature) { +async function eddsaSignVerify(key, data, algorithm, signature) { const mode = signature === undefined ? kSignJobModeSign : kSignJobModeVerify; const type = mode === kSignJobModeSign ? 'private' : 'public'; if (key[kKeyType] !== type) throw lazyDOMException(`Key must be a ${type} key`, 'InvalidAccessError'); - return jobPromise(() => new SignJob( + return await jobPromise(() => new SignJob( kCryptoJobAsync, mode, key[kKeyObject][kHandle], diff --git a/lib/internal/crypto/chacha20_poly1305.js b/lib/internal/crypto/chacha20_poly1305.js index bcc778b24d7738..0979d7aaddbb61 100644 --- a/lib/internal/crypto/chacha20_poly1305.js +++ b/lib/internal/crypto/chacha20_poly1305.js @@ -4,7 +4,6 @@ const { ArrayBufferIsView, ArrayBufferPrototypeSlice, ArrayFrom, - PromiseReject, SafeSet, TypedArrayPrototypeSlice, } = primordials; @@ -47,7 +46,7 @@ function validateKeyLength(length) { throw lazyDOMException('Invalid key length', 'DataError'); } -function c20pCipher(mode, key, data, algorithm) { +async function c20pCipher(mode, key, data, algorithm) { let tag; switch (mode) { case kWebCryptoCipherDecrypt: { @@ -55,9 +54,9 @@ function c20pCipher(mode, key, data, algorithm) { TypedArrayPrototypeSlice : ArrayBufferPrototypeSlice; if (data.byteLength < 16) { - return PromiseReject(lazyDOMException( + throw lazyDOMException( 'The provided data is too small.', - 'OperationError')); + 'OperationError'); } tag = slice(data, -16); @@ -69,7 +68,7 @@ function c20pCipher(mode, key, data, algorithm) { break; } - return jobPromise(() => new ChaCha20Poly1305CipherJob( + return await jobPromise(() => new ChaCha20Poly1305CipherJob( kCryptoJobAsync, mode, key[kKeyObject][kHandle], @@ -91,12 +90,15 @@ async function c20pGenerateKey(algorithm, extractable, keyUsages) { 'SyntaxError'); } - const keyData = await randomBytes(32).catch((err) => { + let keyData; + try { + keyData = await randomBytes(32); + } catch (err) { throw lazyDOMException( 'The operation failed for an operation-specific reason' + `[${err.message}]`, { name: 'OperationError', cause: err }); - }); + } return new InternalCryptoKey( createSecretKey(keyData), diff --git a/lib/internal/crypto/ec.js b/lib/internal/crypto/ec.js index c417aa09670f0b..dd7997c82cbf91 100644 --- a/lib/internal/crypto/ec.js +++ b/lib/internal/crypto/ec.js @@ -97,11 +97,14 @@ async function ecGenerateKey(algorithm, extractable, keyUsages) { // Fall through } - const keypair = await generateKeyPair('ec', { namedCurve }).catch((err) => { + let keyPair; + try { + keyPair = await generateKeyPair('ec', { namedCurve }); + } catch (err) { throw lazyDOMException( 'The operation failed for an operation-specific reason', { name: 'OperationError', cause: err }); - }); + } let publicUsages; let privateUsages; @@ -120,14 +123,14 @@ async function ecGenerateKey(algorithm, extractable, keyUsages) { const publicKey = new InternalCryptoKey( - keypair.publicKey, + keyPair.publicKey, keyAlgorithm, publicUsages, true); const privateKey = new InternalCryptoKey( - keypair.privateKey, + keyPair.privateKey, keyAlgorithm, privateUsages, extractable); @@ -281,7 +284,7 @@ function ecImportKey( extractable); } -function ecdsaSignVerify(key, data, { name, hash }, signature) { +async function ecdsaSignVerify(key, data, { name, hash }, signature) { const mode = signature === undefined ? kSignJobModeSign : kSignJobModeVerify; const type = mode === kSignJobModeSign ? 'private' : 'public'; @@ -290,7 +293,7 @@ function ecdsaSignVerify(key, data, { name, hash }, signature) { const hashname = normalizeHashName(hash.name); - return jobPromise(() => new SignJob( + return await jobPromise(() => new SignJob( kCryptoJobAsync, mode, key[kKeyObject][kHandle], diff --git a/lib/internal/crypto/hash.js b/lib/internal/crypto/hash.js index e4d94da1c5ee96..ef8020ebb587bf 100644 --- a/lib/internal/crypto/hash.js +++ b/lib/internal/crypto/hash.js @@ -221,7 +221,7 @@ async function asyncDigest(algorithm, data) { case 'cSHAKE128': // Fall through case 'cSHAKE256': - return jobPromise(() => new HashJob( + return await jobPromise(() => new HashJob( kCryptoJobAsync, normalizeHashName(algorithm.name), data, diff --git a/lib/internal/crypto/mac.js b/lib/internal/crypto/mac.js index 0564f6c19d285f..a31c3ddb0d9484 100644 --- a/lib/internal/crypto/mac.js +++ b/lib/internal/crypto/mac.js @@ -64,11 +64,14 @@ async function hmacGenerateKey(algorithm, extractable, keyUsages) { 'SyntaxError'); } - const key = await generateKey('hmac', { length }).catch((err) => { + let key; + try { + key = await generateKey('hmac', { length }); + } catch (err) { throw lazyDOMException( 'The operation failed for an operation-specific reason', { name: 'OperationError', cause: err }); - }); + } return new InternalCryptoKey( key, @@ -94,12 +97,15 @@ async function kmacGenerateKey(algorithm, extractable, keyUsages) { 'SyntaxError'); } - const keyData = await randomBytes(length / 8).catch((err) => { + let keyData; + try { + keyData = await randomBytes(length / 8); + } catch (err) { throw lazyDOMException( 'The operation failed for an operation-specific reason' + `[${err.message}]`, { name: 'OperationError', cause: err }); - }); + } return new InternalCryptoKey( createSecretKey(keyData), diff --git a/lib/internal/crypto/ml_dsa.js b/lib/internal/crypto/ml_dsa.js index f1e6594f9e1a68..ebe3bfe3d17ca0 100644 --- a/lib/internal/crypto/ml_dsa.js +++ b/lib/internal/crypto/ml_dsa.js @@ -88,11 +88,14 @@ async function mlDsaGenerateKey(algorithm, extractable, keyUsages) { 'SyntaxError'); } - const keyPair = await generateKeyPair(name.toLowerCase()).catch((err) => { + let keyPair; + try { + keyPair = await generateKeyPair(name.toLowerCase()); + } catch (err) { throw lazyDOMException( 'The operation failed for an operation-specific reason', { name: 'OperationError', cause: err }); - }); + } const publicUsages = getUsagesUnion(usageSet, 'verify'); const privateUsages = getUsagesUnion(usageSet, 'sign'); @@ -284,14 +287,14 @@ function mlDsaImportKey( extractable); } -function mlDsaSignVerify(key, data, algorithm, signature) { +async function mlDsaSignVerify(key, data, algorithm, signature) { const mode = signature === undefined ? kSignJobModeSign : kSignJobModeVerify; const type = mode === kSignJobModeSign ? 'private' : 'public'; if (key[kKeyType] !== type) throw lazyDOMException(`Key must be a ${type} key`, 'InvalidAccessError'); - return jobPromise(() => new SignJob( + return await jobPromise(() => new SignJob( kCryptoJobAsync, mode, key[kKeyObject][kHandle], diff --git a/lib/internal/crypto/ml_kem.js b/lib/internal/crypto/ml_kem.js index 5f6efc01125a4b..f6eb76cef10b20 100644 --- a/lib/internal/crypto/ml_kem.js +++ b/lib/internal/crypto/ml_kem.js @@ -59,11 +59,14 @@ async function mlKemGenerateKey(algorithm, extractable, keyUsages) { 'SyntaxError'); } - const keyPair = await generateKeyPair(name.toLowerCase()).catch((err) => { + let keyPair; + try { + keyPair = await generateKeyPair(name.toLowerCase()); + } catch (err) { throw lazyDOMException( 'The operation failed for an operation-specific reason', { name: 'OperationError', cause: err }); - }); + } const publicUsages = getUsagesUnion(usageSet, 'encapsulateBits', 'encapsulateKey'); const privateUsages = getUsagesUnion(usageSet, 'decapsulateBits', 'decapsulateKey'); diff --git a/lib/internal/crypto/rsa.js b/lib/internal/crypto/rsa.js index e3567a98c41878..c6b3985dbaee66 100644 --- a/lib/internal/crypto/rsa.js +++ b/lib/internal/crypto/rsa.js @@ -93,7 +93,7 @@ function validateRsaOaepAlgorithm(algorithm) { } } -function rsaOaepCipher(mode, key, data, algorithm) { +async function rsaOaepCipher(mode, key, data, algorithm) { validateRsaOaepAlgorithm(algorithm); const type = mode === kWebCryptoCipherEncrypt ? 'public' : 'private'; @@ -103,7 +103,7 @@ function rsaOaepCipher(mode, key, data, algorithm) { 'InvalidAccessError'); } - return jobPromise(() => new RSACipherJob( + return await jobPromise(() => new RSACipherJob( kCryptoJobAsync, mode, key[kKeyObject][kHandle], @@ -150,14 +150,17 @@ async function rsaKeyGenerate( } } - const keypair = await generateKeyPair('rsa', { - modulusLength, - publicExponent: publicExponentConverted, - }).catch((err) => { + let keyPair; + try { + keyPair = await generateKeyPair('rsa', { + modulusLength, + publicExponent: publicExponentConverted, + }); + } catch (err) { throw lazyDOMException( 'The operation failed for an operation-specific reason', { name: 'OperationError', cause: err }); - }); + } const keyAlgorithm = { name, @@ -183,14 +186,14 @@ async function rsaKeyGenerate( const publicKey = new InternalCryptoKey( - keypair.publicKey, + keyPair.publicKey, keyAlgorithm, publicUsages, true); const privateKey = new InternalCryptoKey( - keypair.privateKey, + keyPair.privateKey, keyAlgorithm, privateUsages, extractable); @@ -327,14 +330,14 @@ function rsaImportKey( }, keyUsages, extractable); } -function rsaSignVerify(key, data, { saltLength }, signature) { +async function rsaSignVerify(key, data, { saltLength }, signature) { const mode = signature === undefined ? kSignJobModeSign : kSignJobModeVerify; const type = mode === kSignJobModeSign ? 'private' : 'public'; if (key[kKeyType] !== type) throw lazyDOMException(`Key must be a ${type} key`, 'InvalidAccessError'); - return jobPromise(() => { + return await jobPromise(() => { if (key[kAlgorithm].name === 'RSA-PSS') { validateInt32( saltLength, diff --git a/lib/internal/crypto/webcrypto.js b/lib/internal/crypto/webcrypto.js index ba5632c24df7ef..869c07ef87fbe6 100644 --- a/lib/internal/crypto/webcrypto.js +++ b/lib/internal/crypto/webcrypto.js @@ -84,7 +84,7 @@ async function digest(algorithm, data) { algorithm = normalizeAlgorithm(algorithm, 'digest'); - return ReflectApply(asyncDigest, this, [algorithm, data]); + return await ReflectApply(asyncDigest, this, [algorithm, data]); } function randomUUID() { @@ -246,20 +246,20 @@ async function deriveBits(algorithm, baseKey, length = null) { case 'X448': // Fall through case 'ECDH': - return require('internal/crypto/diffiehellman') + return await require('internal/crypto/diffiehellman') .ecdhDeriveBits(algorithm, baseKey, length); case 'HKDF': - return require('internal/crypto/hkdf') + return await require('internal/crypto/hkdf') .hkdfDeriveBits(algorithm, baseKey, length); case 'PBKDF2': - return require('internal/crypto/pbkdf2') + return await require('internal/crypto/pbkdf2') .pbkdf2DeriveBits(algorithm, baseKey, length); case 'Argon2d': // Fall through case 'Argon2i': // Fall through case 'Argon2id': - return require('internal/crypto/argon2') + return await require('internal/crypto/argon2') .argon2DeriveBits(algorithm, baseKey, length); } throw lazyDOMException('Unrecognized algorithm name', 'NotSupportedError'); @@ -391,12 +391,12 @@ async function exportKeySpki(key) { case 'RSA-PSS': // Fall through case 'RSA-OAEP': - return require('internal/crypto/rsa') + return await require('internal/crypto/rsa') .rsaExportKey(key, kWebCryptoKeyFormatSPKI); case 'ECDSA': // Fall through case 'ECDH': - return require('internal/crypto/ec') + return await require('internal/crypto/ec') .ecExportKey(key, kWebCryptoKeyFormatSPKI); case 'Ed25519': // Fall through @@ -405,13 +405,14 @@ async function exportKeySpki(key) { case 'X25519': // Fall through case 'X448': - return require('internal/crypto/cfrg') + return await require('internal/crypto/cfrg') .cfrgExportKey(key, kWebCryptoKeyFormatSPKI); case 'ML-DSA-44': // Fall through case 'ML-DSA-65': // Fall through case 'ML-DSA-87': + // Note: mlDsaExportKey does not return a Promise. return require('internal/crypto/ml_dsa') .mlDsaExportKey(key, kWebCryptoKeyFormatSPKI); case 'ML-KEM-512': @@ -419,6 +420,7 @@ async function exportKeySpki(key) { case 'ML-KEM-768': // Fall through case 'ML-KEM-1024': + // Note: mlKemExportKey does not return a Promise. return require('internal/crypto/ml_kem') .mlKemExportKey(key, kWebCryptoKeyFormatSPKI); default: @@ -433,12 +435,12 @@ async function exportKeyPkcs8(key) { case 'RSA-PSS': // Fall through case 'RSA-OAEP': - return require('internal/crypto/rsa') + return await require('internal/crypto/rsa') .rsaExportKey(key, kWebCryptoKeyFormatPKCS8); case 'ECDSA': // Fall through case 'ECDH': - return require('internal/crypto/ec') + return await require('internal/crypto/ec') .ecExportKey(key, kWebCryptoKeyFormatPKCS8); case 'Ed25519': // Fall through @@ -447,13 +449,14 @@ async function exportKeyPkcs8(key) { case 'X25519': // Fall through case 'X448': - return require('internal/crypto/cfrg') + return await require('internal/crypto/cfrg') .cfrgExportKey(key, kWebCryptoKeyFormatPKCS8); case 'ML-DSA-44': // Fall through case 'ML-DSA-65': // Fall through case 'ML-DSA-87': + // Note: mlDsaExportKey does not return a Promise. return require('internal/crypto/ml_dsa') .mlDsaExportKey(key, kWebCryptoKeyFormatPKCS8); case 'ML-KEM-512': @@ -461,6 +464,7 @@ async function exportKeyPkcs8(key) { case 'ML-KEM-768': // Fall through case 'ML-KEM-1024': + // Note: mlKemExportKey does not return a Promise. return require('internal/crypto/ml_kem') .mlKemExportKey(key, kWebCryptoKeyFormatPKCS8); default: @@ -473,7 +477,7 @@ async function exportKeyRawPublic(key, format) { case 'ECDSA': // Fall through case 'ECDH': - return require('internal/crypto/ec') + return await require('internal/crypto/ec') .ecExportKey(key, kWebCryptoKeyFormatRaw); case 'Ed25519': // Fall through @@ -482,7 +486,7 @@ async function exportKeyRawPublic(key, format) { case 'X25519': // Fall through case 'X448': - return require('internal/crypto/cfrg') + return await require('internal/crypto/cfrg') .cfrgExportKey(key, kWebCryptoKeyFormatRaw); case 'ML-DSA-44': // Fall through @@ -493,6 +497,7 @@ async function exportKeyRawPublic(key, format) { if (format !== 'raw-public') { return undefined; } + // Note: mlDsaExportKey does not return a Promise. return require('internal/crypto/ml_dsa') .mlDsaExportKey(key, kWebCryptoKeyFormatRaw); } @@ -505,6 +510,7 @@ async function exportKeyRawPublic(key, format) { if (format !== 'raw-public') { return undefined; } + // Note: mlKemExportKey does not return a Promise. return require('internal/crypto/ml_kem') .mlKemExportKey(key, kWebCryptoKeyFormatRaw); } @@ -520,6 +526,7 @@ async function exportKeyRawSeed(key) { case 'ML-DSA-65': // Fall through case 'ML-DSA-87': + // Note: mlDsaExportKey does not return a Promise. return require('internal/crypto/ml_dsa') .mlDsaExportKey(key, kWebCryptoKeyFormatRaw); case 'ML-KEM-512': @@ -527,6 +534,7 @@ async function exportKeyRawSeed(key) { case 'ML-KEM-768': // Fall through case 'ML-KEM-1024': + // Note: mlKemExportKey does not return a Promise. return require('internal/crypto/ml_kem') .mlKemExportKey(key, kWebCryptoKeyFormatRaw); default: @@ -933,7 +941,7 @@ async function wrapKey(format, key, wrappingKey, algorithm) { } } - return cipherOrWrap( + return await cipherOrWrap( kWebCryptoCipherEncrypt, algorithm, wrappingKey, @@ -1022,7 +1030,7 @@ async function unwrapKey( ); } -function signVerify(algorithm, key, data, signature) { +async function signVerify(algorithm, key, data, signature) { let usage = 'sign'; if (signature !== undefined) { usage = 'verify'; @@ -1040,31 +1048,31 @@ function signVerify(algorithm, key, data, signature) { case 'RSA-PSS': // Fall through case 'RSASSA-PKCS1-v1_5': - return require('internal/crypto/rsa') + return await require('internal/crypto/rsa') .rsaSignVerify(key, data, algorithm, signature); case 'ECDSA': - return require('internal/crypto/ec') + return await require('internal/crypto/ec') .ecdsaSignVerify(key, data, algorithm, signature); case 'Ed25519': // Fall through case 'Ed448': // Fall through - return require('internal/crypto/cfrg') + return await require('internal/crypto/cfrg') .eddsaSignVerify(key, data, algorithm, signature); case 'HMAC': - return require('internal/crypto/mac') + return await require('internal/crypto/mac') .hmacSignVerify(key, data, algorithm, signature); case 'ML-DSA-44': // Fall through case 'ML-DSA-65': // Fall through case 'ML-DSA-87': - return require('internal/crypto/ml_dsa') + return await require('internal/crypto/ml_dsa') .mlDsaSignVerify(key, data, algorithm, signature); case 'KMAC128': // Fall through case 'KMAC256': - return require('internal/crypto/mac') + return await require('internal/crypto/mac') .kmacSignVerify(key, data, algorithm, signature); } throw lazyDOMException('Unrecognized algorithm name', 'NotSupportedError'); @@ -1089,7 +1097,7 @@ async function sign(algorithm, key, data) { context: '3rd argument', }); - return signVerify(algorithm, key, data); + return await signVerify(algorithm, key, data); } async function verify(algorithm, key, signature, data) { @@ -1115,7 +1123,7 @@ async function verify(algorithm, key, signature, data) { context: '4th argument', }); - return signVerify(algorithm, key, data, signature); + return await signVerify(algorithm, key, data, signature); } async function cipherOrWrap(mode, algorithm, key, data, op) { @@ -1138,7 +1146,7 @@ async function cipherOrWrap(mode, algorithm, key, data, op) { switch (algorithm.name) { case 'RSA-OAEP': - return require('internal/crypto/rsa') + return await require('internal/crypto/rsa') .rsaCipher(mode, key, data, algorithm); case 'AES-CTR': // Fall through @@ -1147,14 +1155,14 @@ async function cipherOrWrap(mode, algorithm, key, data, op) { case 'AES-GCM': // Fall through case 'AES-OCB': - return require('internal/crypto/aes') + return await require('internal/crypto/aes') .aesCipher(mode, key, data, algorithm); case 'ChaCha20-Poly1305': - return require('internal/crypto/chacha20_poly1305') + return await require('internal/crypto/chacha20_poly1305') .c20pCipher(mode, key, data, algorithm); case 'AES-KW': if (op === 'wrapKey' || op === 'unwrapKey') { - return require('internal/crypto/aes') + return await require('internal/crypto/aes') .aesCipher(mode, key, data, algorithm); } } @@ -1181,7 +1189,13 @@ async function encrypt(algorithm, key, data) { }); algorithm = normalizeAlgorithm(algorithm, 'encrypt'); - return cipherOrWrap(kWebCryptoCipherEncrypt, algorithm, key, data, 'encrypt'); + return await cipherOrWrap( + kWebCryptoCipherEncrypt, + algorithm, + key, + data, + 'encrypt', + ); } async function decrypt(algorithm, key, data) { @@ -1204,7 +1218,13 @@ async function decrypt(algorithm, key, data) { }); algorithm = normalizeAlgorithm(algorithm, 'decrypt'); - return cipherOrWrap(kWebCryptoCipherDecrypt, algorithm, key, data, 'decrypt'); + return await cipherOrWrap( + kWebCryptoCipherDecrypt, + algorithm, + key, + data, + 'decrypt', + ); } // Implements https://wicg.github.io/webcrypto-modern-algos/#SubtleCrypto-method-getPublicKey @@ -1267,7 +1287,7 @@ async function encapsulateBits(encapsulationAlgorithm, encapsulationKey) { case 'ML-KEM-512': case 'ML-KEM-768': case 'ML-KEM-1024': - return require('internal/crypto/ml_kem') + return await require('internal/crypto/ml_kem') .mlKemEncapsulate(encapsulationKey); } @@ -1381,7 +1401,7 @@ async function decapsulateBits(decapsulationAlgorithm, decapsulationKey, ciphert case 'ML-KEM-512': case 'ML-KEM-768': case 'ML-KEM-1024': - return require('internal/crypto/ml_kem') + return await require('internal/crypto/ml_kem') .mlKemDecapsulate(decapsulationKey, ciphertext); } diff --git a/lib/internal/modules/helpers.js b/lib/internal/modules/helpers.js index 9f66578a459e7c..1ccae074e0405b 100644 --- a/lib/internal/modules/helpers.js +++ b/lib/internal/modules/helpers.js @@ -402,18 +402,31 @@ function stringify(body) { } /** - * Enable on-disk compiled cache for all user modules being complied in the current Node.js instance + * Enable on-disk compiled cache for all user modules being compiled in the current Node.js instance * after this method is called. - * If cacheDir is undefined, defaults to the NODE_MODULE_CACHE environment variable. - * If NODE_MODULE_CACHE isn't set, default to path.join(os.tmpdir(), 'node-compile-cache'). - * @param {string|undefined} cacheDir + * This method accepts either: + * - A string `cacheDir`: the path to the cache directory. + * - An options object `{path?: string, portable?: boolean}`: + * - `path`: A string path to the cache directory. + * - `portable`: If `portable` is true, the cache directory will be considered relative. Defaults to false. + * If cache path is undefined, it defaults to the NODE_MODULE_CACHE environment variable. + * If `NODE_MODULE_CACHE` isn't set, it defaults to `path.join(os.tmpdir(), 'node-compile-cache')`. + * @param {string | { path?: string, portable?: boolean } | undefined} options * @returns {{status: number, message?: string, directory?: string}} */ -function enableCompileCache(cacheDir) { +function enableCompileCache(options) { + let cacheDir; + let portable = false; + + if (typeof options === 'object' && options !== null) { + ({ path: cacheDir, portable = false } = options); + } else { + cacheDir = options; + } if (cacheDir === undefined) { cacheDir = join(lazyTmpdir(), 'node-compile-cache'); } - const nativeResult = _enableCompileCache(cacheDir); + const nativeResult = _enableCompileCache(cacheDir, portable); const result = { status: nativeResult[0] }; if (nativeResult[1]) { result.message = nativeResult[1]; diff --git a/lib/internal/url.js b/lib/internal/url.js index 9105940b2a45a0..a1473fdac8aba3 100644 --- a/lib/internal/url.js +++ b/lib/internal/url.js @@ -91,6 +91,8 @@ const { Buffer } = require('buffer'); const { validateFunction, + validateObject, + kValidateObjectAllowObjects, } = require('internal/validators'); const { percentDecode } = require('internal/data_url'); @@ -1431,6 +1433,7 @@ function domainToUnicode(domain) { * @returns {Record} */ function urlToHttpOptions(url) { + validateObject(url, 'url', kValidateObjectAllowObjects); const { hostname, pathname, port, username, password, search } = url; const options = { __proto__: null, diff --git a/lib/sqlite.js b/lib/sqlite.js index b011fd0921b0a8..6d6ada72008f1c 100644 --- a/lib/sqlite.js +++ b/lib/sqlite.js @@ -1,19 +1,6 @@ 'use strict'; -const { - SymbolDispose, -} = primordials; const { emitExperimentalWarning } = require('internal/util'); -const binding = internalBinding('sqlite'); emitExperimentalWarning('SQLite'); -// TODO(cjihrig): Move this to C++ once Symbol.dispose reaches Stage 4. -binding.DatabaseSync.prototype[SymbolDispose] = function() { - try { - this.close(); - } catch { - // Ignore errors. - } -}; - -module.exports = binding; +module.exports = internalBinding('sqlite'); diff --git a/src/compile_cache.cc b/src/compile_cache.cc index d2efbd9655bee6..5e2c56cb925cc8 100644 --- a/src/compile_cache.cc +++ b/src/compile_cache.cc @@ -13,6 +13,9 @@ #include // getuid #endif +#ifdef _WIN32 +#include +#endif namespace node { using v8::Function; @@ -223,13 +226,52 @@ void CompileCacheHandler::ReadCacheFile(CompileCacheEntry* entry) { Debug(" success, size=%d\n", total_read); } +static std::string GetRelativePath(std::string_view path, + std::string_view base) { +// On Windows, the native encoding is UTF-16, so we need to convert +// the paths to wide strings before using std::filesystem::path. +// On other platforms, std::filesystem::path can handle UTF-8 directly. +#ifdef _WIN32 + std::filesystem::path module_path( + ConvertToWideString(std::string(path), CP_UTF8)); + std::filesystem::path base_path( + ConvertToWideString(std::string(base), CP_UTF8)); +#else + std::filesystem::path module_path(path); + std::filesystem::path base_path(base); +#endif + std::filesystem::path relative = module_path.lexically_relative(base_path); + auto u8str = relative.u8string(); + return std::string(u8str.begin(), u8str.end()); +} + CompileCacheEntry* CompileCacheHandler::GetOrInsert(Local code, Local filename, CachedCodeType type) { DCHECK(!compile_cache_dir_.empty()); + Environment* env = Environment::GetCurrent(isolate_->GetCurrentContext()); Utf8Value filename_utf8(isolate_, filename); - uint32_t key = GetCacheKey(filename_utf8.ToStringView(), type); + std::string file_path = filename_utf8.ToString(); + // If the portable cache is enabled and it seems possible to compute the + // relative position from an absolute path, we use the relative position + // in the cache key. + if (portable_ == EnableOption::PORTABLE && IsAbsoluteFilePath(file_path)) { + // Normalize the path to ensure it is consistent. + std::string normalized_file_path = NormalizeFileURLOrPath(env, file_path); + if (normalized_file_path.empty()) { + return nullptr; + } + std::string relative_path = + GetRelativePath(normalized_file_path, normalized_compile_cache_dir_); + if (!relative_path.empty()) { + file_path = relative_path; + Debug("[compile cache] using relative path %s from %s\n", + file_path.c_str(), + compile_cache_dir_.c_str()); + } + } + uint32_t key = GetCacheKey(file_path, type); // TODO(joyeecheung): don't encode this again into UTF8. If we read the // UTF8 content on disk as raw buffer (from the JS layer, while watching out @@ -500,7 +542,8 @@ CompileCacheHandler::CompileCacheHandler(Environment* env) // - $NODE_VERSION-$ARCH-$CACHE_DATA_VERSION_TAG-$UID // - $FILENAME_AND_MODULE_TYPE_HASH.cache: a hash of filename + module type CompileCacheEnableResult CompileCacheHandler::Enable(Environment* env, - const std::string& dir) { + const std::string& dir, + EnableOption option) { std::string cache_tag = GetCacheVersionTag(); std::string absolute_cache_dir_base = PathResolve(env, {dir}); std::string cache_dir_with_tag = @@ -548,6 +591,11 @@ CompileCacheEnableResult CompileCacheHandler::Enable(Environment* env, result.cache_directory = absolute_cache_dir_base; compile_cache_dir_ = cache_dir_with_tag; + portable_ = option; + if (option == EnableOption::PORTABLE) { + normalized_compile_cache_dir_ = + NormalizeFileURLOrPath(env, compile_cache_dir_); + } result.status = CompileCacheEnableStatus::ENABLED; return result; } diff --git a/src/compile_cache.h b/src/compile_cache.h index 72910084e18bca..36d40c68974895 100644 --- a/src/compile_cache.h +++ b/src/compile_cache.h @@ -62,10 +62,14 @@ struct CompileCacheEnableResult { std::string message; // Set in case of failure. }; +enum class EnableOption : uint8_t { DEFAULT, PORTABLE }; + class CompileCacheHandler { public: explicit CompileCacheHandler(Environment* env); - CompileCacheEnableResult Enable(Environment* env, const std::string& dir); + CompileCacheEnableResult Enable(Environment* env, + const std::string& dir, + EnableOption option = EnableOption::DEFAULT); void Persist(); @@ -103,6 +107,8 @@ class CompileCacheHandler { bool is_debug_ = false; std::string compile_cache_dir_; + std::string normalized_compile_cache_dir_; + EnableOption portable_ = EnableOption::DEFAULT; std::unordered_map> compiler_cache_store_; }; diff --git a/src/env.cc b/src/env.cc index 53f0bf7fc1e5c8..6a6b224dcc4065 100644 --- a/src/env.cc +++ b/src/env.cc @@ -1122,11 +1122,21 @@ void Environment::InitializeCompileCache() { dir_from_env.empty()) { return; } - EnableCompileCache(dir_from_env); + std::string portable_env; + bool portable = credentials::SafeGetenv( + "NODE_COMPILE_CACHE_PORTABLE", &portable_env, this) && + !portable_env.empty() && portable_env == "1"; + if (portable) { + Debug(this, + DebugCategory::COMPILE_CACHE, + "[compile cache] using relative path\n"); + } + EnableCompileCache(dir_from_env, + portable ? EnableOption::PORTABLE : EnableOption::DEFAULT); } CompileCacheEnableResult Environment::EnableCompileCache( - const std::string& cache_dir) { + const std::string& cache_dir, EnableOption option) { CompileCacheEnableResult result; std::string disable_env; if (credentials::SafeGetenv( @@ -1143,7 +1153,7 @@ CompileCacheEnableResult Environment::EnableCompileCache( if (!compile_cache_handler_) { std::unique_ptr handler = std::make_unique(this); - result = handler->Enable(this, cache_dir); + result = handler->Enable(this, cache_dir, option); if (result.status == CompileCacheEnableStatus::ENABLED) { compile_cache_handler_ = std::move(handler); AtExit( diff --git a/src/env.h b/src/env.h index f3a2d221f4bb52..ed2253f1fc3cf8 100644 --- a/src/env.h +++ b/src/env.h @@ -1023,7 +1023,8 @@ class Environment final : public MemoryRetainer { void InitializeCompileCache(); // Enable built-in compile cache if it has not yet been enabled. // The cache will be persisted to disk on exit. - CompileCacheEnableResult EnableCompileCache(const std::string& cache_dir); + CompileCacheEnableResult EnableCompileCache(const std::string& cache_dir, + EnableOption option); void FlushCompileCache(); void RunAndClearNativeImmediates(bool only_refed = false); diff --git a/src/node_modules.cc b/src/node_modules.cc index 9de75dda832d70..82b061c0cab735 100644 --- a/src/node_modules.cc +++ b/src/node_modules.cc @@ -501,8 +501,14 @@ void EnableCompileCache(const FunctionCallbackInfo& args) { THROW_ERR_INVALID_ARG_TYPE(env, "cacheDir should be a string"); return; } + + EnableOption option = EnableOption::DEFAULT; + if (args.Length() > 1 && args[1]->IsTrue()) { + option = EnableOption::PORTABLE; + } + Utf8Value value(isolate, args[0]); - CompileCacheEnableResult result = env->EnableCompileCache(*value); + CompileCacheEnableResult result = env->EnableCompileCache(*value, option); Local values[3]; values[0] = v8::Integer::New(isolate, static_cast(result.status)); if (ToV8Value(context, result.message).ToLocal(&values[1]) && diff --git a/src/node_sqlite.cc b/src/node_sqlite.cc index 0df20be77ce860..9140c63440c88a 100644 --- a/src/node_sqlite.cc +++ b/src/node_sqlite.cc @@ -1080,6 +1080,14 @@ void DatabaseSync::Close(const FunctionCallbackInfo& args) { db->connection_ = nullptr; } +void DatabaseSync::Dispose(const v8::FunctionCallbackInfo& args) { + v8::TryCatch try_catch(args.GetIsolate()); + Close(args); + if (try_catch.HasCaught()) { + CHECK(try_catch.CanContinue()); + } +} + void DatabaseSync::Prepare(const FunctionCallbackInfo& args) { DatabaseSync* db; ASSIGN_OR_RETURN_UNWRAP(&db, args.This()); @@ -2629,6 +2637,7 @@ Local Session::GetConstructorTemplate(Environment* env) { SetProtoMethod( isolate, tmpl, "patchset", Session::Changeset); SetProtoMethod(isolate, tmpl, "close", Session::Close); + SetProtoDispose(isolate, tmpl, Session::Dispose); env->set_sqlite_session_constructor_template(tmpl); } return tmpl; @@ -2673,6 +2682,14 @@ void Session::Close(const FunctionCallbackInfo& args) { session->Delete(); } +void Session::Dispose(const v8::FunctionCallbackInfo& args) { + v8::TryCatch try_catch(args.GetIsolate()); + Close(args); + if (try_catch.HasCaught()) { + CHECK(try_catch.CanContinue()); + } +} + void Session::Delete() { if (!database_ || !database_->connection_ || session_ == nullptr) return; sqlite3session_delete(session_); @@ -2708,6 +2725,7 @@ static void Initialize(Local target, SetProtoMethod(isolate, db_tmpl, "open", DatabaseSync::Open); SetProtoMethod(isolate, db_tmpl, "close", DatabaseSync::Close); + SetProtoDispose(isolate, db_tmpl, DatabaseSync::Dispose); SetProtoMethod(isolate, db_tmpl, "prepare", DatabaseSync::Prepare); SetProtoMethod(isolate, db_tmpl, "exec", DatabaseSync::Exec); SetProtoMethod(isolate, db_tmpl, "function", DatabaseSync::CustomFunction); @@ -2745,6 +2763,8 @@ static void Initialize(Local target, target, "StatementSync", StatementSync::GetConstructorTemplate(env)); + SetConstructorFunction( + context, target, "Session", Session::GetConstructorTemplate(env)); target->Set(context, env->constants_string(), constants).Check(); diff --git a/src/node_sqlite.h b/src/node_sqlite.h index 3a9f08c16573b2..5bc6366398406b 100644 --- a/src/node_sqlite.h +++ b/src/node_sqlite.h @@ -92,6 +92,7 @@ class DatabaseSync : public BaseObject { static void IsTransactionGetter( const v8::FunctionCallbackInfo& args); static void Close(const v8::FunctionCallbackInfo& args); + static void Dispose(const v8::FunctionCallbackInfo& args); static void Prepare(const v8::FunctionCallbackInfo& args); static void Exec(const v8::FunctionCallbackInfo& args); static void Location(const v8::FunctionCallbackInfo& args); @@ -230,6 +231,7 @@ class Session : public BaseObject { template static void Changeset(const v8::FunctionCallbackInfo& args); static void Close(const v8::FunctionCallbackInfo& args); + static void Dispose(const v8::FunctionCallbackInfo& args); static v8::Local GetConstructorTemplate( Environment* env); static BaseObjectPtr Create(Environment* env, diff --git a/src/path.cc b/src/path.cc index 1f88e38a857171..582786a77ce6f4 100644 --- a/src/path.cc +++ b/src/path.cc @@ -1,8 +1,10 @@ #include "path.h" #include #include +#include "ada.h" #include "env-inl.h" #include "node_internals.h" +#include "node_url.h" namespace node { @@ -88,6 +90,10 @@ std::string NormalizeString(const std::string_view path, } #ifdef _WIN32 +constexpr bool IsWindowsDriveLetter(const std::string_view path) noexcept { + return path.size() > 2 && IsWindowsDeviceRoot(path[0]) && + (path[1] == ':' && (path[2] == '/' || path[2] == '\\')); +} constexpr bool IsWindowsDeviceRoot(const char c) noexcept { return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z'); } @@ -333,4 +339,44 @@ void FromNamespacedPath(std::string* path) { #endif } +// Check if a path looks like an absolute path or file URL. +bool IsAbsoluteFilePath(std::string_view path) { + if (path.rfind("file://", 0) == 0) { + return true; + } +#ifdef _WIN32 + if (path.size() > 0 && path[0] == '\\') return true; + if (IsWindowsDriveLetter(path)) return true; +#endif + if (path.size() > 0 && path[0] == '/') return true; + return false; +} + +// Normalizes paths by resolving file URLs and converting to a consistent +// format with forward slashes. +std::string NormalizeFileURLOrPath(Environment* env, std::string_view path) { + std::string normalized_string(path); + constexpr std::string_view file_scheme = "file://"; + if (normalized_string.rfind(file_scheme, 0) == 0) { + auto out = ada::parse(normalized_string); + auto file_path = url::FileURLToPath(env, *out); + if (!file_path.has_value()) { + return std::string(); + } + normalized_string = file_path.value(); + } + normalized_string = NormalizeString(normalized_string, false, "/"); +#ifdef _WIN32 + if (IsWindowsDriveLetter(normalized_string)) { + normalized_string[0] = ToLower(normalized_string[0]); + } + for (char& c : normalized_string) { + if (c == '\\') { + c = '/'; + } + } +#endif + return normalized_string; +} + } // namespace node diff --git a/src/path.h b/src/path.h index 2045e7b44a9bb1..67f21eba2a6279 100644 --- a/src/path.h +++ b/src/path.h @@ -18,9 +18,12 @@ std::string NormalizeString(const std::string_view path, std::string PathResolve(Environment* env, const std::vector& paths); +std::string NormalizeFileURLOrPath(Environment* env, std::string_view path); +bool IsAbsoluteFilePath(std::string_view path); #ifdef _WIN32 constexpr bool IsWindowsDeviceRoot(const char c) noexcept; +constexpr bool IsWindowsDriveLetter(const std::string_view path) noexcept; #endif // _WIN32 void ToNamespacedPath(Environment* env, BufferValue* path); diff --git a/src/util.cc b/src/util.cc index 78326b56eab457..660cfff6b8a0c5 100644 --- a/src/util.cc +++ b/src/util.cc @@ -598,6 +598,32 @@ void SetMethodNoSideEffect(Isolate* isolate, that->Set(name_string, t); } +void SetProtoDispose(v8::Isolate* isolate, + v8::Local that, + v8::FunctionCallback callback) { + Local signature = v8::Signature::New(isolate, that); + Local t = + NewFunctionTemplate(isolate, + callback, + signature, + v8::ConstructorBehavior::kThrow, + v8::SideEffectType::kHasSideEffect); + that->PrototypeTemplate()->Set(v8::Symbol::GetDispose(isolate), t); +} + +void SetProtoAsyncDispose(v8::Isolate* isolate, + v8::Local that, + v8::FunctionCallback callback) { + Local signature = v8::Signature::New(isolate, that); + Local t = + NewFunctionTemplate(isolate, + callback, + signature, + v8::ConstructorBehavior::kThrow, + v8::SideEffectType::kHasSideEffect); + that->PrototypeTemplate()->Set(v8::Symbol::GetAsyncDispose(isolate), t); +} + void SetProtoMethod(v8::Isolate* isolate, Local that, const std::string_view name, diff --git a/src/util.h b/src/util.h index 9eb7034e378f0d..b6f49bcf8e7eab 100644 --- a/src/util.h +++ b/src/util.h @@ -936,6 +936,16 @@ void SetMethodNoSideEffect(v8::Isolate* isolate, const std::string_view name, v8::FunctionCallback callback); +// Set the Symbol.dispose method on the prototype of the class. +void SetProtoDispose(v8::Isolate* isolate, + v8::Local that, + v8::FunctionCallback callback); + +// Set the Symbol.asyncDispose method on the prototype of the class. +void SetProtoAsyncDispose(v8::Isolate* isolate, + v8::Local that, + v8::FunctionCallback callback); + enum class SetConstructorFunctionFlag { NONE, SET_CLASS_NAME, diff --git a/test/parallel/test-compile-cache-api-error.js b/test/parallel/test-compile-cache-api-error.js index 580c8f756a0f04..a5dd2a1b76d3cd 100644 --- a/test/parallel/test-compile-cache-api-error.js +++ b/test/parallel/test-compile-cache-api-error.js @@ -6,6 +6,6 @@ require('../common'); const { enableCompileCache } = require('module'); const assert = require('assert'); -for (const invalid of [0, null, false, () => {}, {}, []]) { +for (const invalid of [0, null, false, 1, NaN, true, Symbol(0)]) { assert.throws(() => enableCompileCache(invalid), { code: 'ERR_INVALID_ARG_TYPE' }); } diff --git a/test/parallel/test-compile-cache-api-portable.js b/test/parallel/test-compile-cache-api-portable.js new file mode 100644 index 00000000000000..d31433e1a6a1f4 --- /dev/null +++ b/test/parallel/test-compile-cache-api-portable.js @@ -0,0 +1,106 @@ +'use strict'; + +// This tests module.enableCompileCache({ path, portable: true }) works +// and supports portable paths across directory relocations. + +require('../common'); +const { spawnSyncAndAssert } = require('../common/child_process'); +const assert = require('assert'); +const fs = require('fs'); +const tmpdir = require('../common/tmpdir'); +const path = require('path'); + +tmpdir.refresh(); +const workDir = path.join(tmpdir.path, 'work'); +const cacheRel = '.compile_cache_dir'; +fs.mkdirSync(workDir, { recursive: true }); + +const wrapper = path.join(workDir, 'wrapper.js'); +const target = path.join(workDir, 'target.js'); + +fs.writeFileSync( + wrapper, + ` + const { enableCompileCache, getCompileCacheDir } = require('module'); + console.log('dir before enableCompileCache:', getCompileCacheDir()); + enableCompileCache({ path: '${cacheRel}', portable: true }); + console.log('dir after enableCompileCache:', getCompileCacheDir()); +` +); + +fs.writeFileSync(target, ''); + +// First run +{ + spawnSyncAndAssert( + process.execPath, + ['-r', wrapper, target], + { + env: { + ...process.env, + NODE_DEBUG_NATIVE: 'COMPILE_CACHE', + }, + cwd: workDir, + }, + { + stdout(output) { + console.log(output); + assert.match(output, /dir before enableCompileCache: undefined/); + assert.match( + output, + /dir after enableCompileCache: .+\.compile_cache_dir/ + ); + return true; + }, + stderr(output) { + assert.match( + output, + /target\.js was not initialized, initializing the in-memory entry/ + ); + assert.match(output, /writing cache for .*target\.js.*success/); + return true; + }, + } + ); +} + +// Second run — moved directory, but same relative cache path +{ + const movedWorkDir = `${workDir}_moved`; + fs.renameSync(workDir, movedWorkDir); + + spawnSyncAndAssert( + process.execPath, + [ + '-r', + path.join(movedWorkDir, 'wrapper.js'), + path.join(movedWorkDir, 'target.js'), + ], + { + env: { + ...process.env, + NODE_DEBUG_NATIVE: 'COMPILE_CACHE', + }, + cwd: movedWorkDir, + }, + { + stdout(output) { + console.log(output); + assert.match(output, /dir before enableCompileCache: undefined/); + assert.match( + output, + /dir after enableCompileCache: .+\.compile_cache_dir/ + ); + return true; + }, + stderr(output) { + assert.match( + output, + /cache for .*target\.js was accepted, keeping the in-memory entry/ + ); + assert.match(output, /.*skip .*target\.js because cache was the same/); + return true; + }, + } + ); +} diff --git a/test/parallel/test-compile-cache-portable-esm.js b/test/parallel/test-compile-cache-portable-esm.js new file mode 100644 index 00000000000000..18766b6810eab1 --- /dev/null +++ b/test/parallel/test-compile-cache-portable-esm.js @@ -0,0 +1,84 @@ +'use strict'; + +// This tests NODE_COMPILE_CACHE works after moving directory and unusual characters in path are handled correctly. + +require('../common'); +const { spawnSyncAndAssert } = require('../common/child_process'); +const assert = require('assert'); +const tmpdir = require('../common/tmpdir'); +const fs = require('fs'); +const path = require('path'); + +tmpdir.refresh(); + +const workDir = path.join(tmpdir.path, 'work'); +const cacheRel = '.compile_cache_dir'; +fs.mkdirSync(workDir, { recursive: true }); + +const script = path.join(workDir, 'message.mjs'); +fs.writeFileSync( + script, + ` + export const message = 'A message'; + ` +); + +{ + spawnSyncAndAssert( + process.execPath, + [script], + { + env: { + ...process.env, + NODE_DEBUG_NATIVE: 'COMPILE_CACHE', + NODE_COMPILE_CACHE: cacheRel, + NODE_COMPILE_CACHE_PORTABLE: '1', + }, + cwd: workDir, + }, + { + stderr(output) { + console.log(output); + assert.match( + output, + /message\.mjs was not initialized, initializing the in-memory entry/ + ); + assert.match(output, /writing cache for .*message\.mjs.*success/); + return true; + }, + } + ); + + // Move the working directory and run again + const movedWorkDir = `${workDir}_moved`; + fs.renameSync(workDir, movedWorkDir); + + + spawnSyncAndAssert( + process.execPath, + [[path.join(movedWorkDir, 'message.mjs')]], + { + env: { + ...process.env, + NODE_DEBUG_NATIVE: 'COMPILE_CACHE', + NODE_COMPILE_CACHE: cacheRel, + NODE_COMPILE_CACHE_PORTABLE: '1', + }, + cwd: movedWorkDir, + }, + { + stderr(output) { + console.log(output); + assert.match( + output, + /cache for .*message\.mjs was accepted, keeping the in-memory entry/ + ); + assert.match( + output, + /.*skip .*message\.mjs because cache was the same/ + ); + return true; + }, + } + ); +} diff --git a/test/parallel/test-compile-cache-portable.js b/test/parallel/test-compile-cache-portable.js new file mode 100644 index 00000000000000..b5978b5b0c62e5 --- /dev/null +++ b/test/parallel/test-compile-cache-portable.js @@ -0,0 +1,75 @@ +'use strict'; + +// This tests NODE_COMPILE_CACHE works with the NODE_COMPILE_CACHE_PORTABLE +// environment variable. + +require('../common'); +const { spawnSyncAndAssert } = require('../common/child_process'); +const assert = require('assert'); +const fs = require('fs'); +const tmpdir = require('../common/tmpdir'); +const path = require('path'); + +tmpdir.refresh(); +const workDir = path.join(tmpdir.path, 'work'); +const cacheRel = '.compile_cache_dir'; +fs.mkdirSync(workDir, { recursive: true }); + +const script = path.join(workDir, 'script.js'); +fs.writeFileSync(script, ''); + +// First run +{ + spawnSyncAndAssert( + process.execPath, + [script], + { + env: { + ...process.env, + NODE_DEBUG_NATIVE: 'COMPILE_CACHE', + NODE_COMPILE_CACHE: cacheRel, + NODE_COMPILE_CACHE_PORTABLE: '1', + }, + cwd: workDir, + }, + { + stderr(output) { + console.log(output); + assert.match( + output, + /script\.js was not initialized, initializing the in-memory entry/ + ); + assert.match(output, /writing cache for .*script\.js.*success/); + return true; + }, + } + ); + + // Move the working directory and run again + const movedWorkDir = `${workDir}_moved`; + fs.renameSync(workDir, movedWorkDir); + spawnSyncAndAssert( + process.execPath, + [[path.join(movedWorkDir, 'script.js')]], + { + env: { + ...process.env, + NODE_DEBUG_NATIVE: 'COMPILE_CACHE', + NODE_COMPILE_CACHE: cacheRel, + NODE_COMPILE_CACHE_PORTABLE: '1', + }, + cwd: movedWorkDir, + }, + { + stderr(output) { + console.log(output); + assert.match( + output, + /cache for .*script\.js was accepted, keeping the in-memory entry/ + ); + assert.match(output, /.*skip .*script\.js because cache was the same/); + return true; + }, + } + ); +} diff --git a/test/parallel/test-sqlite-session.js b/test/parallel/test-sqlite-session.js index 6c638b8e4a3965..1fe78c6ec6622a 100644 --- a/test/parallel/test-sqlite-session.js +++ b/test/parallel/test-sqlite-session.js @@ -540,3 +540,18 @@ test('session.close() - closing twice', (t) => { message: 'session is not open' }); }); + +test('session supports ERM', (t) => { + const database = new DatabaseSync(':memory:'); + let afterDisposeSession; + { + using session = database.createSession(); + afterDisposeSession = session; + const changeset = session.changeset(); + t.assert.ok(changeset instanceof Uint8Array); + t.assert.strictEqual(changeset.length, 0); + } + t.assert.throws(() => afterDisposeSession.changeset(), { + message: /session is not open/, + }); +}); diff --git a/test/parallel/test-url-urltooptions.js b/test/parallel/test-url-urltooptions.js index cc4838eeecb00f..8e7e5dc89409a1 100644 --- a/test/parallel/test-url-urltooptions.js +++ b/test/parallel/test-url-urltooptions.js @@ -35,3 +35,12 @@ assert.strictEqual(copiedOpts.pathname, undefined); assert.strictEqual(copiedOpts.search, undefined); assert.strictEqual(copiedOpts.hash, undefined); assert.strictEqual(copiedOpts.href, undefined); + +// Test when passed an invalid argument +assert.throws(() => { + urlToHttpOptions('http://127.0.0.1'); +}, { + code: 'ERR_INVALID_ARG_TYPE', + message: 'The "url" argument must be of type object. Received type string (\'http://127.0.0.1\')', + name: 'TypeError' +});