Module Resolution
In Production Environment
In the production build process, Vite
leverages the capabilities of rollup
to generate build artifacts. The following is the rollup
build process:
async function build() {
timeStart('generate module graph', 2);
await this.generateModuleGraph();
timeEnd('generate module graph', 2);
timeStart('sort modules', 2);
this.phase = BuildPhase.ANALYSE;
this.sortModules();
timeEnd('sort modules', 2);
timeStart('mark included statements', 2);
this.includeStatements();
timeEnd('mark included statements', 2);
this.phase = BuildPhase.GENERATE;
}
async function rollupInternal(rawInputOptions, watcher) {
await graph.pluginDriver.hookParallel('buildStart', [inputOptions]);
await graph.build();
}
Simply put, it can be divided into generating the module dependency graph
, sorting modules by execution order
, and tree shaking processing
. The module resolution
process in this section is the first step (generating module dependencies
).
The module resolution process can be briefly summarized as determining the build source
, retrieving the source code
, transforming the source code
, building the module context and initializing the AST instance
, collecting and building child dependencies
, and determining dependency relationships
.
Determining the Build Source
From the source code perspective, the build source consists of two parts. The first part uses this.options.input
as the entry point for resolution, as shown below:
async function generateModuleGraph() {
({
entryModules: this.entryModules,
implicitEntryModules: this.implicitEntryModules
} = await this.moduleLoader.addEntryModules(
normalizeEntryModules(this.options.input),
true
));
if (this.entryModules.length === 0) {
throw new Error('You must supply options.input to rollup');
}
/**
* modulesById contains the modules associated with this.emitFile and this.options.input
*/
for (const module of this.modulesById.values()) {
if (module instanceof Module) {
this.modules.push(module);
} else {
this.externalModules.push(module);
}
}
}
The other part is injecting entry modules through emitChunk
, with simplified code as follows:
class ModuleLoader {
async emitChunk({
fileName,
id,
importer,
name,
implicitlyLoadedAfterOneOf,
preserveSignature
}) {
const unresolvedModule = {
fileName: fileName || null,
id,
importer,
name: name || null
};
const module = implicitlyLoadedAfterOneOf
? await this.addEntryWithImplicitDependants(
unresolvedModule,
implicitlyLoadedAfterOneOf
)
: (await this.addEntryModules([unresolvedModule], false))
.newEntryModules[0];
if (preserveSignature != null) {
module.preserveSignature = preserveSignature;
}
return module;
}
}
class FileEmitter {
emitFile = emittedFile => {
if (!hasValidType(emittedFile)) {
return error(
errFailedValidation(
`Emitted files must be of type "asset" or "chunk", received "${emittedFile && emittedFile.type}".`
)
);
}
if (!hasValidName(emittedFile)) {
return error(
errFailedValidation(
`The "fileName" or "name" properties of emitted files must be strings that are neither absolute nor relative paths, received "${emittedFile.fileName || emittedFile.name}".`
)
);
}
if (emittedFile.type === 'chunk') {
return this.emitChunk(emittedFile);
}
return this.emitAsset(emittedFile);
};
emitChunk(emittedChunk) {
if (this.graph.phase > BuildPhase.LOAD_AND_PARSE) {
return error(errInvalidRollupPhaseForChunkEmission());
}
if (typeof emittedChunk.id !== 'string') {
return error(
errFailedValidation(
`Emitted chunks need to have a valid string id, received "${emittedChunk.id}"`
)
);
}
const consumedChunk = {
fileName: emittedChunk.fileName,
module: null,
name: emittedChunk.name || emittedChunk.id,
type: 'chunk'
};
this.graph.moduleLoader
.emitChunk(emittedChunk)
.then(module => (consumedChunk.module = module))
.catch(() => {
// Avoid unhandled Promise rejection as the error will be thrown later
// once module loading has finished
});
return this.assignReferenceId(consumedChunk, emittedChunk.id);
}
}
class PluginDriver {
constructor() {
this.emitFile = this.fileEmitter.emitFile.bind(this.fileEmitter);
this.pluginContexts = new Map(
this.plugins.map(plugin => [
plugin,
getPluginContext(
plugin,
pluginCache,
graph,
options,
this.fileEmitter,
existingPluginNames
)
])
);
}
}
async function transform(source, module, pluginDriver, warn) {
code = await pluginDriver.hookReduceArg0(
'transform',
[curSource, id],
transformReducer,
(pluginContext, plugin) => {
pluginName = plugin.name;
pluginContext = this.pluginContexts.get(plugin);
pluginContext = {
emitAsset: getDeprecatedContextHandler(
(name, source) =>
fileEmitter.emitFile({ name, source, type: 'asset' }),
'emitAsset',
'emitFile',
plugin.name,
true,
options
),
emitChunk: getDeprecatedContextHandler(
(id, options) =>
fileEmitter.emitFile({
id,
name: options && options.name,
type: 'chunk'
}),
'emitChunk',
'emitFile',
plugin.name,
true,
options
),
emitFile: fileEmitter.emitFile.bind(fileEmitter)
};
return {
...pluginContext,
emitAsset(name, source) {
emittedFiles.push({ name, source, type: 'asset' });
return pluginContext.emitAsset(name, source);
},
emitChunk(id, options) {
emittedFiles.push({
id,
name: options && options.name,
type: 'chunk'
});
return pluginContext.emitChunk(id, options);
},
emitFile(emittedFile) {
emittedFiles.push(emittedFile);
return pluginDriver.emitFile(emittedFile);
}
};
}
);
}
Simple mention of the above code process, when executing the plugin's hook
it injects context
, in the context provides the ability of fileEmitter.emitFile
. In the emitFile
it still uses this.graph.moduleLoader.emitChunk
, i.e. the final still calls the emitChunk
ability in the ModuleLoader
module.
Take the following example to explore how production is leveraging the emitChunk
ability as the entry point for building modules:
In the vite-plugin-federation
module federation plugin, you can find that if you expose modules, then in the buildStart
stage (before generateModuleGraph
) it executes emitFile
to emit the __remoteEntryHelper__
virtual module as the entry point for building the process.
function prodExposePlugin(options) {
return {
name: 'originjs:expose-production',
buildStart() {
// if we don't expose any modules, there is no need to emit file
if (parsedOptions.prodExpose.length > 0) {
this.emitFile({
fileName: `${builderInfo.assetsDir ? builderInfo.assetsDir + '/' : ''}${options.filename}`,
type: 'chunk',
id: '__remoteEntryHelper__',
preserveSignature: 'strict'
});
}
}
};
}
Therefore, for the above __remoteEntryHelper__
module, the build timing is earlier than the first case (using this.options.input
as the entry point).
At this point, we know that for this.options.input
and this.emitFile
as the entry point module will independently generate a chunk
. Observe the product to find that for import
dynamic import can also generate an independent chunk
, then how is it done?
First, we need to understand that rollup
distinguishes between static modules
and dynamic modules
for child dependencies of the current module. In the module resolution process, it is roughly the same, but in generating chunk
it will distinguish between specific dynamic modules
and do separate packaging processing.
Simplified code process as follows:
function getChunkAssignments(entryModules, manualChunkAliasByEntry) {
// ...
const { dependentEntryPointsByModule, dynamicEntryModules } =
analyzeModuleGraph(entryModules);
chunkDefinitions.push(
...createChunks(
[...entryModules, ...dynamicEntryModules],
assignedEntryPointsByModule
)
);
// ...
return chunkDefinitions;
}
async function generateChunks() {
for (const { alias, modules } of getChunkAssignments(
this.graph.entryModules,
manualChunkAliasByEntry
)) {
sortByExecutionOrder(modules);
const chunk = new Chunk(
modules,
this.inputOptions,
this.outputOptions,
this.unsetOptions,
this.pluginDriver,
this.graph.modulesById,
chunkByModule,
this.facadeChunkByModule,
this.includedNamespaces,
alias
);
chunks.push(chunk);
for (const module of modules) {
chunkByModule.set(module, chunk);
}
}
for (const chunk of chunks) {
chunk.link();
}
const facades = [];
for (const chunk of chunks) {
facades.push(...chunk.generateFacades());
}
return [...chunks, ...facades];
}
Above can find that chunk
is composed of entryModules
(built through this.options.input
and this.emitFile
) and dynamicEntryModules
(modules associated with import()
), two modules.
Retrieving the Source Code
Simplified code:
async function addModuleSource(id, importer, module) {
timeStart('load modules', 3);
let source;
try {
/**
* readQueue:
* limits the number of concurrent asynchronous tasks (options.maxParallelFileReads)
*/
source = await this.readQueue.run(async () => {
var _a;
return (_a = await this.pluginDriver.hookFirst('load', [id])) !==
null && _a !== void 0
? _a
: await promises.readFile(id, 'utf8');
});
} catch (err) {
// ...
}
// ...
}
It can be clearly seen that it first executes all plugins' load
hook, if there is a return value, it is the result of loading. If there is no return value, it relies on fs
ability to read local files.
TIP
Why do we need load
loading? Indeed, usually using fs
to read local files can meet the needs, of course, most of the time is also the case. Using load
plugin is largely for virtual modules
to serve. In the @originjs/vite-plugin-federation
module federation plugin, a large number of virtual modules are used, such as virtualFile.__federation__
、virtualFile.__federation_lib_semver
、 virtualFile.__federation_fn_import
、 virtualFile. __remoteEntryHelper__
etc. First execute load
, essentially for vite
, it does not know which module to parse is virtual module
or real module
, so it needs to first execute load
, if there is a return value, it is virtual module
, otherwise it is real module
.
Transforming the Source Code
await transform(
sourceDescription,
module,
this.pluginDriver,
this.options.onwarn
);
async function transform(source, module, pluginDriver, warn) {
const id = module.id;
const sourcemapChain = [];
const originalSourcemap =
source.map === null ? null : decodedSourcemap(source.map);
const originalCode = source.code;
let ast = source.ast;
const transformDependencies = [];
const emittedFiles = [];
let customTransformCache = false;
const useCustomTransformCache = () => (customTransformCache = true);
let pluginName = '';
const curSource = source.code;
// Format code
function transformReducer(previousCode, result, plugin) {
let code;
let map;
if (typeof result === 'string') {
code = result;
} else if (result && typeof result === 'object') {
module.updateOptions(result);
if (result.code == null) {
if (result.map || result.ast) {
warn(errNoTransformMapOrAstWithoutCode(plugin.name));
}
return previousCode;
}
({ code, map, ast } = result);
} else {
return previousCode;
}
// strict null check allows 'null' maps to not be pushed to the chain,
// while 'undefined' gets the missing map warning
if (map !== null) {
sourcemapChain.push(
decodedSourcemap(
typeof map === 'string' ? JSON.parse(map) : map
) || {
missing: true,
plugin: plugin.name
}
);
}
return code;
}
let code;
try {
code = await pluginDriver.hookReduceArg0(
'transform',
[curSource, id],
transformReducer,
(pluginContext, plugin) => {
pluginName = plugin.name;
return {
...pluginContext,
emitAsset(name, source) {
emittedFiles.push({ name, source, type: 'asset' });
return pluginContext.emitAsset(name, source);
},
emitChunk(id, options) {
emittedFiles.push({
id,
name: options && options.name,
type: 'chunk'
});
return pluginContext.emitChunk(id, options);
},
emitFile(emittedFile) {
emittedFiles.push(emittedFile);
return pluginDriver.emitFile(emittedFile);
}
// ...
};
}
);
} catch (err) {
throwPluginError(err, pluginName, { hook: 'transform', id });
}
if (!customTransformCache) {
// files emitted by a transform hook need to be emitted again if the hook is skipped
if (emittedFiles.length) module.transformFiles = emittedFiles;
}
return {
code
// ...
};
}
/**
* Plugin driver, globally unique instance.
* Initialized in the build dependency graph instance
* class Graph {
* constructor () {
* this.pluginDriver = new PluginDriver(this, options, options.plugins, this.pluginCache);
* this.acornParser = Parser.extend(...options.acornInjectPlugins);
* this.moduleLoader = new ModuleLoader(this, this.modulesById, this.options, this.pluginDriver);
* }
* }
* async function rollupInternal(rawInputOptions, watcher) {
* const graph = new Graph(inputOptions, watcher);
* }
*/
class PluginDriver {
hookReduceArg0(hookName, [arg0, ...rest], reduce, replaceContext) {
let promise = Promise.resolve(arg0);
for (const plugin of this.plugins) {
promise = promise.then(arg0 => {
const args = [arg0, ...rest];
const hookPromise = this.runHook(
hookName,
args,
plugin,
false,
replaceContext
);
// If the current plugin does not do any processing (returns undefined or null), then pass the current source to the next plugin for chain processing.
if (!hookPromise) return arg0;
/**
* Each plugin has its specific execution context (pluginContexts)
*/
return hookPromise.then(result =>
reduce.call(this.pluginContexts.get(plugin), arg0, result, plugin)
);
});
}
return promise;
}
runHook(hookName, args, plugin, permitValues, hookContext) {
const hook = plugin[hookName];
if (!hook) return undefined;
let context = this.pluginContexts.get(plugin);
if (hookContext) {
context = hookContext(context, plugin);
}
let action = null;
return Promise.resolve()
.then(() => {
// permit values allows values to be returned instead of a functional hook
if (typeof hook !== 'function') {
if (permitValues) return hook;
return throwInvalidHookError(hookName, plugin.name);
}
const hookResult = hook.apply(context, args);
if (!hookResult || !hookResult.then) {
// short circuit for non-thenables and non-Promises
return hookResult;
}
// Track pending hook actions to properly error out when
// unfulfilled promises cause rollup to abruptly and confusingly
// exit with a successful 0 return code but without producing any
// output, errors or warnings.
action = [plugin.name, hookName, args];
this.unfulfilledActions.add(action);
// Although it would be more elegant to just return hookResult here
// and put the .then() handler just above the .catch() handler below,
// doing so would subtly change the defacto async event dispatch order
// which at least one test and some plugins in the wild may depend on.
return Promise.resolve(hookResult).then(result => {
// action was fulfilled
this.unfulfilledActions.delete(action);
return result;
});
})
.catch(err => {
if (action !== null) {
// action considered to be fulfilled since error being handled
this.unfulfilledActions.delete(action);
}
return throwPluginError(err, plugin.name, { hook: hookName });
});
}
}
Chain processing each plugin's hookName
hook. If the plugin does not do any processing, then pass the processed source to the next plugin for processing, if it does, it formats the returned value of the plugin and extracts the processed source to the next plugin for processing, until all plugins have been executed before returning the processed source.
WARNING
TODO: For detailed content about Vite
injecting core plugins, a separate chapter will be dedicated to explain.
Building the Module Context and Initializing the AST Instance
transform
after code
is pure js
module, so you can leverage the acorn
ability to parse code
to ast
. For leveraging esbuild
ability to execute pre-build process, userPlugin
is not used, you can extend esbuild
build capability by configuring config.optimizeDeps.esbuildOptions.plugin
.
function setSource({
ast,
code,
customTransformCache,
originalCode,
originalSourcemap,
resolvedIds,
sourcemapChain,
transformDependencies,
transformFiles,
...moduleOptions
}) {
this.info.code = code;
this.originalCode = originalCode;
if (!ast) {
/**
* Leveraging the `acorn` ability to parse `code` to `ast`.
*/
ast = this.tryParse();
}
timeEnd('generate ast', 3);
this.resolvedIds = resolvedIds || Object.create(null);
// By default, `id` is the file name. Custom resolvers and loaders
// can change that, but it makes sense to use it for the source file name
const fileName = this.id;
this.magicString = new MagicString(code, {
filename: this.excludeFromSourcemap ? null : fileName,
indentExclusionRanges: []
});
timeStart('analyse ast', 3);
/**
* Initialize ast context, inject capabilities in the ast processing.
*/
this.astContext = {
addDynamicImport: this.addDynamicImport.bind(this),
addExport: this.addExport.bind(this),
addImport: this.addImport.bind(this),
code,
fileName,
getExports: this.getExports.bind(this),
getModuleName: this.basename.bind(this),
getReexports: this.getReexports.bind(this),
magicString: this.magicString,
module: this
// ...
};
/**
* this.graph.scope = new GlobalScope();
* Build the top-level scope of the current module and inherit from the global scope.
* In JS, scopes can be divided into global scope, function scope, eval scope, block scope(es6). Therefore, when encountering related ast node, it will
* Build new scope and inherit parent scope.
*/
this.scope = new ModuleScope(this.graph.scope, this.astContext);
this.namespace = new NamespaceVariable(this.astContext);
this.ast = new Program(
ast,
{ context: this.astContext, type: 'Module' },
this.scope
);
this.info.ast = ast;
timeEnd('analyse ast', 3);
}
Above the most noteworthy is the module ast
build process. rollup
internally implements a large number of node constructor
, through acorn
generated ast
recursively instantiate node constructor
.
class NodeBase extends ExpressionEntity {
constructor(esTreeNode, parent, parentScope) {
super();
/**
* Nodes can apply custom deoptimizations once they become part of the
* executed code. To do this, they must initialize this as false, implement
* applyDeoptimizations and call this from include and hasEffects if they have
* custom handlers
*/
this.deoptimized = false;
this.esTreeNode = esTreeNode;
this.keys = keys[esTreeNode.type] || getAndCreateKeys(esTreeNode);
this.parent = parent;
this.context = parent.context;
// Build executable context
this.createScope(parentScope);
// Instantiate node constructor according to ast type
this.parseNode(esTreeNode);
// Initialize node constructor instance according to ast node information
this.initialise();
this.context.magicString.addSourcemapLocation(this.start);
this.context.magicString.addSourcemapLocation(this.end);
}
}
From the above code, it can be clearly understood that the build ast
process mainly executes Build executable context
, Instantiate node constructor
, Initialize node constructor instance
.
Build executable context
The scope construction is either kept consistent with the parent scope or building a new scope. Through createScope
you can know that when encountering the following node types, it will build a new scope.
class BlockStatement extends NodeBase {
createScope(parentScope) {
this.scope = this.parent.preventChildBlockScope
? parentScope
: new BlockScope(parentScope);
}
}
// for in scope
class ForInStatement extends NodeBase {
createScope(parentScope) {
this.scope = new BlockScope(parentScope);
}
}
// for of scope
class ForOfStatement extends NodeBase {
createScope(parentScope) {
this.scope = new BlockScope(parentScope);
}
}
// for scope
class ForStatement extends NodeBase {
createScope(parentScope) {
this.scope = new BlockScope(parentScope);
}
}
// static block scope
class StaticBlock extends NodeBase {
createScope(parentScope) {
this.scope = new BlockScope(parentScope);
}
}
// switch scope
class SwitchStatement extends NodeBase {
createScope(parentScope) {
this.scope = new BlockScope(parentScope);
}
}
// arrow function expression scope
class ArrowFunctionExpression extends FunctionBase {
createScope(parentScope) {
this.scope = new ReturnValueScope(parentScope, this.context);
}
}
// function scope
class FunctionNode extends FunctionBase {
createScope(parentScope) {
this.scope = new FunctionScope(parentScope, this.context);
}
}
class CatchClause extends NodeBase {
createScope(parentScope) {
this.scope = new CatchScope(parentScope, this.context);
}
}
class ClassBody extends NodeBase {
createScope(parentScope) {
this.scope = new ClassBodyScope(parentScope, this.parent, this.context);
}
}
class ClassNode extends NodeBase {
createScope(parentScope) {
this.scope = new ChildScope(parentScope);
}
}
The scope constructor function ultimately inherits from the Scope$1
base class. In different scenarios, it will build the corresponding scope, and subsequent declarations will build variable
objects and store them in the corresponding context.
Instantiate node constructor
Recursively parse ast
and instantiate corresponding nodes. When reading this block of source code, it is recommended to use AST Explorer to assist reading, which can clearly show which ast node
corresponds to which code
segment.
class NodeBase extends ExpressionEntity {
parseNode(esTreeNode) {
for (const [key, value] of Object.entries(esTreeNode)) {
// That way, we can override this function to add custom initialisation and then call super.parseNode
// Processed key does not need to be processed again
if (this.hasOwnProperty(key)) continue;
// Special ast node processing.
if (key.charCodeAt(0) === 95 /* _ */) {
if (key === ANNOTATION_KEY) {
this.annotations = value;
} else if (key === INVALID_COMMENT_KEY) {
for (const { start, end } of value)
this.context.magicString.remove(start, end);
}
} else if (typeof value !== 'object' || value === null) {
// If the value is a basic data type or null
this[key] = value;
} else if (Array.isArray(value)) {
// If the value is an array, then instantiate nodes according to the type of each ast node.
this[key] = [];
for (const child of value) {
this[key].push(
child === null
? null
: new (this.context.getNodeConstructor(child.type))(
child,
this,
this.scope
)
);
}
} else {
// If the value is an object, then instantiate nodes according to the ast node type.
this[key] = new (this.context.getNodeConstructor(value.type))(
value,
this,
this.scope
);
}
}
}
}
From the above code, it can be clearly understood that the process is to collect node data from the current ast node
and traverse, if the value exists in the object (array indicates multiple subast node
, pure object indicates only one subast node
) instantiate subast node
objects. subast node
object instantiation continues to follow the above process to instantiate subast node
's subast node
, through recursion to instantiate all ast node
.
TIP
ParseNode parseNode
process is to recursively collect all subast node
data for the current ast node
and instantiate ast
node objects. It is also the basis for the next initialise
initialization process.
Initialize node constructor instance
This is also the most important stage in the initialization process. The specific initialization process depends on the different ast
node types. Specific analysis is performed, take an example.
const hello = 'world';
Converted to JSON
structure:
{
"type": "Program",
"start": 0,
"end": 22,
"body": [
{
"type": "VariableDeclaration",
"start": 0,
"end": 22,
"declarations": [
{
"type": "VariableDeclarator",
"start": 6,
"end": 21,
"id": {
"type": "Identifier",
"start": 6,
"end": 11,
"name": "hello"
},
"init": {
"type": "Literal",
"start": 14,
"end": 21,
"value": "world",
"raw": "'world'"
}
}
],
"kind": "const"
}
],
"sourceType": "module"
}
From the recursive process, it can be seen that the first execution of initialise
is the ast node
structure:
{
"type": "Identifier",
"start": 6,
"end": 11,
"name": "hello"
}
For the Identifier
node, no initialization is required in the initialization.
The second execution of ast node
is the Literal
node, the structure is as follows:
{
"type": "Literal",
"start": 14,
"end": 21,
"value": "world",
"raw": "'world'"
}
For the Literal
node, the initialization process handles the simplified logic as follows:
function getLiteralMembersForValue(value) {
switch (typeof value) {
case 'boolean':
return literalBooleanMembers;
case 'number':
return literalNumberMembers;
case 'string':
return literalStringMembers;
}
return Object.create(null);
}
class Literal extends NodeBase {
initialise() {
this.members = getLiteralMembersForValue(this.value);
}
}
From the code logic, it will return the literal description information according to the literal type.
The third execution of ast node
is the VariableDeclarator
node, the structure is as follows:
{
"type": "VariableDeclarator",
"start": 6,
"end": 21,
"id": {
"type": "Identifier",
"start": 6,
"end": 11,
"name": "hello"
},
"init": {
"type": "Literal",
"start": 14,
"end": 21,
"value": "world",
"raw": "'world'"
}
}
For the VariableDeclarator
node, no initialization is required in the initialization.
The fourth execution of ast node
is the VariableDeclaration
node, the structure is as follows:
{
"type": "VariableDeclaration",
"start": 0,
"end": 22,
"declarations": [
{
"type": "VariableDeclarator",
"start": 6,
"end": 21,
"id": {
"type": "Identifier",
"start": 6,
"end": 11,
"name": "hello"
},
"init": {
"type": "Literal",
"start": 14,
"end": 21,
"value": "world",
"raw": "'world'"
}
}
],
"kind": "const"
}
For the VariableDeclaration
node, the simplified logic is as follows:
class Scope$1 {
addDeclaration(identifier, context, init, _isHoisted) {
const name = identifier.name;
let variable = this.variables.get(name);
if (variable) {
variable.addDeclaration(identifier, init);
} else {
variable = new LocalVariable(
identifier.name,
identifier,
init || UNDEFINED_EXPRESSION,
context
);
this.variables.set(name, variable);
}
return variable;
}
}
class Identifier extends NodeBase {
declare(kind, init) {
let variable;
const { treeshake } = this.context.options;
switch (kind) {
case 'var':
variable = this.scope.addDeclaration(
this,
this.context,
init,
true
);
if (treeshake && treeshake.correctVarValueBeforeDeclaration) {
// Necessary to make sure the init is deoptimized. We cannot call deoptimizePath here.
variable.markInitializersForDeoptimization();
}
break;
case 'function':
// in strict mode, functions are only hoisted within a scope but not across block scopes
variable = this.scope.addDeclaration(
this,
this.context,
init,
false
);
break;
case 'let':
case 'const':
case 'class':
variable = this.scope.addDeclaration(
this,
this.context,
init,
false
);
break;
case 'parameter':
variable = this.scope.addParameterDeclaration(this);
break;
/* istanbul ignore next */
default:
/* istanbul ignore next */
throw new Error(
`Internal Error: Unexpected identifier kind ${kind}.`
);
}
variable.kind = kind;
return [(this.variable = variable)];
}
}
class VariableDeclarator extends NodeBase {
declareDeclarator(kind) {
this.id.declare(kind, this.init || UNDEFINED_EXPRESSION);
}
}
class VariableDeclaration extends NodeBase {
initialise() {
for (const declarator of this.declarations) {
declarator.declareDeclarator(this.kind);
}
}
}
From the above code logic, it will register all variables declared in the current statement
to the current scope
context, of course, the keyword declaration is different, the processing logic will also be different.
Collecting and Building Child Dependencies
Before recursively instantiating ast node
, we will find that we inject the following capabilities into the context
this.astContext = {
addDynamicImport: this.addDynamicImport.bind(this),
addExport: this.addExport.bind(this),
addImport: this.addImport.bind(this),
addImportMeta: this.addImportMeta.bind(this),
code,
deoptimizationTracker: this.graph.deoptimizationTracker,
error: this.error.bind(this),
fileName,
getExports: this.getExports.bind(this),
getModuleExecIndex: () => this.execIndex,
getModuleName: this.basename.bind(this),
getNodeConstructor: name =>
nodeConstructors[name] || nodeConstructors.UnknownNode,
getReexports: this.getReexports.bind(this),
importDescriptions: this.importDescriptions,
includeAllExports: () => this.includeAllExports(true),
includeDynamicImport: this.includeDynamicImport.bind(this),
includeVariableInModule: this.includeVariableInModule.bind(this),
magicString: this.magicString,
module: this,
moduleContext: this.context,
options: this.options,
requestTreeshakingPass: () => (this.graph.needsTreeshakingPass = true),
traceExport: name => this.getVariableForExportName(name)[0],
traceVariable: this.traceVariable.bind(this),
usesTopLevelAwait: false,
warn: this.warn.bind(this)
};
this.ast = new Program(
ast,
{ context: this.astContext, type: 'Module' },
this.scope
);
For addDynamicImport
, addExport
, addImport
, it needs to focus on the specific implementation source code.
For addDynamicImport
processing:
In the parsing process, the processing flow is as follows:
class Module {
addDynamicImport(node) {
let argument = node.source;
// Template string import(`react`)
if (argument instanceof TemplateLiteral) {
if (argument.quasis.length === 1 && argument.quasis[0].value.cooked) {
argument = argument.quasis[0].value.cooked;
}
}
// String import('react')
else if (
argument instanceof Literal &&
typeof argument.value === 'string'
) {
argument = argument.value;
}
this.dynamicImports.push({
argument,
id: null,
node,
resolution: null
});
}
}
When processing ImportExpression
type ast
node, it will enter this process
import('demo');
class ImportExpression extends NodeBase {
initialise() {
this.context.addDynamicImport(this);
}
}
It can be seen that when processing ImportExpression
node initialization, it will add dynamic import information to the current module
to the dynamicImports
variable.
For addExport
processing:
The processing of addExport
is more complex than the above. export
export methods can be divided into the following几种:
- Re-export method, corresponding to the
ExportAllDeclaration
typeast node
.
/**
* Export demo module all named exports and default exports.
* Can be considered:
* == demo.js ==
* export const a = 1, b = 2;
* export default function Demo () {}
*
* == index.js ==
* import Demo, { a, b } from './demo.js';
* export { a, b, Demo };
*
* OR
*
* == index.js ==
* import { a, b, default as Demo } from './demo.js';
* export { a, b, Demo };
*/
export * as demo from 'demo';
/**
* Export demo module all named exports, so importing cannot use default import method ( import demo from './demo.js' )。
* Can be considered:
* == demo.js ==
* export const a = 1, b = 2;
* export default function Demo () {}
*
* == index.js ==
* import { a, b } from './demo.js';
* export { a, b };
*/
export * from 'demo';
In the parsing process, the processing flow is as follows:
class Module {
addExport(node) {
if (node instanceof ExportAllDeclaration) {
const source = node.source.value;
this.sources.add(source);
if (node.exported) {
// export * as name from './other'
const name = node.exported.name;
this.reexportDescriptions.set(name, {
localName: '*',
module: null,
source,
start: node.start
});
} else {
// export * from './other'
this.exportAllSources.add(source);
}
}
// ...
}
}
When processing ExportAllDeclaration
type ast
node, it will enter this process
class ExportAllDeclaration extends NodeBase {
initialise() {
this.context.addExport(this);
}
}
- Default export, corresponding to the
ExportDefaultDeclaration
typeast node
.
export default 'demo';
In the parsing process, the processing flow is as follows:
class Module {
addExport(node) {
if (node instanceof ExportDefaultDeclaration) {
// export default foo;
this.exports.set('default', {
identifier: node.variable.getAssignedVariableName(),
localName: 'default'
});
}
// ...
}
}
When processing ExportAllDeclaration
type ast
node, it will enter this process
class ExportDefaultDeclaration extends NodeBase {
initialise() {
const declaration = this.declaration;
this.declarationName =
(declaration.id && declaration.id.name) || this.declaration.name;
this.variable = this.scope.addExportDefaultDeclaration(
this.declarationName || this.context.getModuleName(),
this,
this.context
);
this.context.addExport(this);
}
}
- Named export, corresponding to the
ExportNamedDeclaration
typeast node
.
export { demo } from 'demo';
export var demo = 1,
foo = 2;
export function demo() {}
export { demo };
In the parsing process, the processing flow is as follows:
class Module {
addExport(node) {
if (node.source instanceof Literal) {
// export { name } from './other'
const source = node.source.value;
this.sources.add(source);
for (const specifier of node.specifiers) {
const name = specifier.exported.name;
this.reexportDescriptions.set(name, {
localName: specifier.local.name,
module: null,
source,
start: specifier.start
});
}
} else if (node.declaration) {
const declaration = node.declaration;
if (declaration instanceof VariableDeclaration) {
// export var { foo, bar } = ...
// export var foo = 1, bar = 2;
for (const declarator of declaration.declarations) {
for (const localName of extractAssignedNames(declarator.id)) {
this.exports.set(localName, { identifier: null, localName });
}
}
} else {
// export function foo () {}
const localName = declaration.id.name;
this.exports.set(localName, { identifier: null, localName });
}
} else {
// export { foo, bar, baz }
for (const specifier of node.specifiers) {
const localName = specifier.local.name;
const exportedName = specifier.exported.name;
this.exports.set(exportedName, { identifier: null, localName });
}
}
}
// ...
}
When processing ExportNamedDeclaration
type ast
node, it will enter this process
class ExportNamedDeclaration extends NodeBase {
initialise() {
this.context.addExport(this);
}
}
For addImport
processing:
In the parsing process, the processing flow is as follows:
class Module {
addImport(node) {
const source = node.source.value;
this.sources.add(source);
for (const specifier of node.specifiers) {
const isDefault = specifier.type === ImportDefaultSpecifier$1;
const isNamespace = specifier.type === ImportNamespaceSpecifier$1;
const name = isDefault
? 'default'
: isNamespace
? '*'
: specifier.imported.name;
this.importDescriptions.set(specifier.local.name, {
module: null,
name,
source,
start: specifier.start
});
}
}
}
When processing ImportDeclaration
type ast
node, it will enter this process
import demo from 'demo';
import { a, default as demo } from 'demo';
import * as demo from 'demo';
class ImportDeclaration extends NodeBase {
initialise() {
this.context.addImport(this);
}
}
TIP
Child dependency collection summary rollup
injects context ( astContext
) before recursively ast
, context provides the ability to collect dependency information. Recursively ast
detects node ( import
、 export
) and extracts node information through context ( astContext
) to inject child dependency information into module
.
Child dependency collection process has been completed, the next step is to introduce child module build process.
From the source code, it can be seen that after building the current module, it will execute child module path resolution
Simplified code as follows:
class ModuleLoader {
getResolveStaticDependencyPromises(module) {
return Array.from(module.sources, async source => [
source,
(module.resolvedIds[source] =
module.resolvedIds[source] ||
this.handleResolveId(
await this.resolveId(source, module.id, EMPTY_OBJECT, false),
source,
module.id
))
]);
}
getResolveDynamicImportPromises(module) {
return module.dynamicImports.map(async dynamicImport => {
const resolvedId = await this.resolveDynamicImport(
module,
typeof dynamicImport.argument === 'string'
? dynamicImport.argument
: dynamicImport.argument.esTreeNode,
module.id
);
if (resolvedId && typeof resolvedId === 'object') {
dynamicImport.id = resolvedId.id;
}
return [dynamicImport, resolvedId];
});
}
async fetchModule(
{ id, meta, moduleSideEffects, syntheticNamedExports },
importer,
isEntry,
isPreload
) {
const loadPromise = this.addModuleSource(id, importer, module).then(
() => [
// Get child dependency module path
this.getResolveStaticDependencyPromises(module),
// Get child dependency module dynamic path
this.getResolveDynamicImportPromises(module),
// Load child module dependency path resolution completed flag, which triggers moduleParsed hook ( meaning current module parsing completed (itself module + child dependency module path) )。
loadAndResolveDependenciesPromise
]
);
const loadAndResolveDependenciesPromise = waitForDependencyResolution(
loadPromise
).then(() =>
this.pluginDriver.hookParallel('moduleParsed', [module.info])
);
// ...
}
}
Child dependency module path resolution completed, enter child module build stage
Simplified code as follows:
class ModuleLoader {
async fetchModule(
{ id, meta, moduleSideEffects, syntheticNamedExports },
importer,
isEntry,
isPreload
) {
// ...
const resolveDependencyPromises = await loadPromise;
if (!isPreload) {
// Build all child module dependencies of the current module.
await this.fetchModuleDependencies(
module,
...resolveDependencyPromises
);
}
// ...
}
async fetchModuleDependencies(
module,
resolveStaticDependencyPromises,
resolveDynamicDependencyPromises,
loadAndResolveDependenciesPromise
) {
// If the current module is already in the child dependency module parsing stage, no subsequent processing is performed.
if (this.modulesWithLoadedDependencies.has(module)) {
return;
}
// Mark the current module as entering the child dependency module build stage.
this.modulesWithLoadedDependencies.add(module);
// rollup distinguishes between import methods (static import or dynamic import) when building child dependency modules.
await Promise.all([
this.fetchStaticDependencies(module, resolveStaticDependencyPromises),
this.fetchDynamicDependencies(
module,
resolveDynamicDependencyPromises
)
]);
// ...
}
fetchResolvedDependency(source, importer, resolvedId) {
// For external modules, there is a special processing plan, no fetchModule process is needed.
if (resolvedId.external) {
const { external, id, moduleSideEffects, meta } = resolvedId;
if (!this.modulesById.has(id)) {
this.modulesById.set(
id,
new ExternalModule(
this.options,
id,
moduleSideEffects,
meta,
external !== 'absolute' && isAbsolute(id)
)
);
}
const externalModule = this.modulesById.get(id);
if (!(externalModule instanceof ExternalModule)) {
return error(errInternalIdCannotBeExternal(source, importer));
}
return Promise.resolve(externalModule);
}
return this.fetchModule(resolvedId, importer, false, false);
}
async fetchStaticDependencies(module, resolveStaticDependencyPromises) {
for (const dependency of await Promise.all(
resolveStaticDependencyPromises.map(resolveStaticDependencyPromise =>
resolveStaticDependencyPromise.then(([source, resolvedId]) =>
this.fetchResolvedDependency(source, module.id, resolvedId)
)
)
)) {
// Current module binds dependency relationship with child dependency module, i.e. module module depends on which static module.
module.dependencies.add(dependency);
// Child dependency module binds dependency relationship with parent module, i.e. dependency module is referenced by which static module.
dependency.importers.push(module.id);
}
// If the module does not need treeshaking processing, mark all child dependency modules of the current module importedFromNotTreeshaken = true.
if (
!this.options.treeshake ||
module.info.moduleSideEffects === 'no-treeshake'
) {
for (const dependency of module.dependencies) {
if (dependency instanceof Module) {
dependency.importedFromNotTreeshaken = true;
}
}
}
}
async fetchDynamicDependencies(module, resolveDynamicImportPromises) {
const dependencies = await Promise.all(
resolveDynamicImportPromises.map(resolveDynamicImportPromise =>
resolveDynamicImportPromise.then(
async ([dynamicImport, resolvedId]) => {
// If the parsing path does not exist, no subsequent process is needed
if (resolvedId === null) return null;
if (typeof resolvedId === 'string') {
dynamicImport.resolution = resolvedId;
return null;
}
return (dynamicImport.resolution =
await this.fetchResolvedDependency(
relativeId(resolvedId.id),
module.id,
resolvedId
));
}
)
)
);
for (const dependency of dependencies) {
if (dependency) {
// Current module binds dependency relationship with child dependency module, i.e. module module depends on which dynamic module.
module.dynamicDependencies.add(dependency);
// Child dependency module binds dependency relationship with parent module, i.e. dependency module is referenced by which static module.
dependency.dynamicImporters.push(module.id);
}
}
}
}
Summary
Static import and dynamic import as the entry point for child dependency parsing of the current module, rollup
distinguishes between the two import methods, this is also for subsequent independent chunk
generation. Finally, it is still recursive (depth-first search) call this.fetchModule(resolvedId, importer, false, false)
to load and build child dependency module information. Here, it should be noted that the entry
value of the child dependency module execution fetchModule
is false
, indicating that the module is not an entry module.
Determining Dependency Relationships
After concurrently obtaining child dependency modules, you can bind module and child dependency module relationship. Static import module and dynamic import module bind attributes will be different.
Simplified code as follows:
class ModuleLoader {
async fetchStaticDependencies(module, resolveStaticDependencyPromises) {
for (const dependency of await Promise.all(
resolveStaticDependencyPromises.map(resolveStaticDependencyPromise =>
resolveStaticDependencyPromise.then(([source, resolvedId]) =>
this.fetchResolvedDependency(source, module.id, resolvedId)
)
)
)) {
// Current module binds dependency relationship with child dependency module, i.e. module module depends on which static module.
module.dependencies.add(dependency);
// Child dependency module binds dependency relationship with parent module, i.e. dependency module is referenced by which static module.
dependency.importers.push(module.id);
}
// ...
}
async fetchDynamicDependencies(module, resolveDynamicImportPromises) {
// ...
for (const dependency of dependencies) {
if (dependency) {
// Current module binds dependency relationship with child dependency module, i.e. module module depends on which dynamic module.
module.dynamicDependencies.add(dependency);
// Child dependency module binds dependency relationship with parent module, i.e. dependency module is referenced by which static module.
dependency.dynamicImporters.push(module.id);
}
}
}
}