Files
node/test/code-cache/test-code-cache.js
Joyee Cheung 7778c035a0 src: use STL containers instead of v8 values for static module data
Instead of putting the source code and the cache in v8::Objects,
put them in per-process std::maps. This has the following benefits:

- It's slightly lighter in weight compared to storing things on the
  v8 heap. Also it may be slightly faster since the preivous v8::Object
  is already in dictionary mode - though the difference is very small
  given the number of native modules is limited.
- The source and code cache generation templates are now much simpler
  since they just initialize static arrays and manipulate STL
  constructs.
- The static native module data can be accessed independently of any
  Environment or Isolate, and it's easy to look them up from the
  C++'s side.
- It's now impossible to mutate the source code used to compile
  native modules from the JS land since it's completely separate
  from the v8 heap. We can still get the constant strings from
  process.binding('natives') but that's all.

A few drive-by fixes:

- Remove DecorateErrorStack in LookupAndCompile - We don't need to
  capture the exception to decorate when we encounter
  errors during native module compilation, as those errors should be
  syntax errors and v8 is able to decorate them well. We use
  CompileFunctionInContext so there is no need to worry about
  wrappers either.
- The code cache could be rejected when node is started with v8 flags.
  Instead of aborting in that case, simply keep a record in the
  native_module_without_cache set.
- Refactor js2c.py a bit, reduce code duplication and inline Render()
  to make the one-byte/two-byte special treatment easier to read.

PR-URL: https://github.com/nodejs/node/pull/24384
Fixes: https://github.com/Remove
Reviewed-By: Anna Henningsen <anna@addaleax.net>
Reviewed-By: Franziska Hinkelmann <franziska.hinkelmann@gmail.com>
Reviewed-By: Tiancheng "Timothy" Gu <timothygu99@gmail.com>
Reviewed-By: James M Snell <jasnell@gmail.com>
2018-11-20 01:17:15 +08:00

69 lines
1.9 KiB
JavaScript

'use strict';
// Flags: --expose-internals
// This test verifies that if the binary is compiled with code cache,
// and the cache is used when built in modules are compiled.
// Otherwise, verifies that no cache is used when compiling builtins.
require('../common');
const assert = require('assert');
const {
cachableBuiltins,
cannotUseCache
} = require('internal/bootstrap/cache');
const {
isMainThread
} = require('worker_threads');
const {
internalBinding
} = require('internal/test/binding');
const {
getCacheUsage
} = internalBinding('native_module');
for (const key of cachableBuiltins) {
if (!isMainThread && key === 'trace_events') {
continue; // Cannot load trace_events in workers
}
require(key);
}
// The computation has to be delayed until we have done loading modules
const {
compiledWithoutCache,
compiledWithCache
} = getCacheUsage();
const loadedModules = process.moduleLoadList
.filter((m) => m.startsWith('NativeModule'))
.map((m) => m.replace('NativeModule ', ''));
// The binary is not configured with code cache, verifies that the builtins
// are all compiled without cache and we are doing the bookkeeping right.
if (process.config.variables.node_code_cache_path === undefined) {
console.log('The binary is not configured with code cache');
assert.deepStrictEqual(compiledWithCache, new Set());
for (const key of loadedModules) {
assert(compiledWithoutCache.has(key),
`"${key}" should've been compiled without code cache`);
}
} else {
console.log('The binary is configured with code cache');
assert.strictEqual(
typeof process.config.variables.node_code_cache_path,
'string'
);
for (const key of loadedModules) {
if (cannotUseCache.includes(key)) {
assert(compiledWithoutCache.has(key),
`"${key}" should've been compiled without code cache`);
} else {
assert(compiledWithCache.has(key),
`"${key}" should've been compiled with code cache`);
}
}
}