summaryrefslogtreecommitdiffstats
path: root/node_modules/webpack/lib/optimize
diff options
context:
space:
mode:
Diffstat (limited to 'node_modules/webpack/lib/optimize')
-rw-r--r--node_modules/webpack/lib/optimize/AggressiveMergingPlugin.js87
-rw-r--r--node_modules/webpack/lib/optimize/AggressiveSplittingPlugin.js294
-rw-r--r--node_modules/webpack/lib/optimize/ChunkModuleIdRangePlugin.js66
-rw-r--r--node_modules/webpack/lib/optimize/ConcatenatedModule.js1477
-rw-r--r--node_modules/webpack/lib/optimize/EnsureChunkConditionsPlugin.js70
-rw-r--r--node_modules/webpack/lib/optimize/FlagIncludedChunksPlugin.js99
-rw-r--r--node_modules/webpack/lib/optimize/LimitChunkCountPlugin.js231
-rw-r--r--node_modules/webpack/lib/optimize/MergeDuplicateChunksPlugin.js78
-rw-r--r--node_modules/webpack/lib/optimize/MinChunkSizePlugin.js82
-rw-r--r--node_modules/webpack/lib/optimize/MinMaxSizeWarning.js29
-rw-r--r--node_modules/webpack/lib/optimize/ModuleConcatenationPlugin.js485
-rw-r--r--node_modules/webpack/lib/optimize/NaturalChunkOrderPlugin.js41
-rw-r--r--node_modules/webpack/lib/optimize/OccurrenceChunkOrderPlugin.js66
-rw-r--r--node_modules/webpack/lib/optimize/OccurrenceModuleOrderPlugin.js112
-rw-r--r--node_modules/webpack/lib/optimize/OccurrenceOrderPlugin.js135
-rw-r--r--node_modules/webpack/lib/optimize/RemoveEmptyChunksPlugin.js42
-rw-r--r--node_modules/webpack/lib/optimize/RemoveParentModulesPlugin.js127
-rw-r--r--node_modules/webpack/lib/optimize/RuntimeChunkPlugin.js41
-rw-r--r--node_modules/webpack/lib/optimize/SideEffectsFlagPlugin.js352
-rw-r--r--node_modules/webpack/lib/optimize/SplitChunksPlugin.js968
20 files changed, 0 insertions, 4882 deletions
diff --git a/node_modules/webpack/lib/optimize/AggressiveMergingPlugin.js b/node_modules/webpack/lib/optimize/AggressiveMergingPlugin.js
deleted file mode 100644
index e3a4c37..0000000
--- a/node_modules/webpack/lib/optimize/AggressiveMergingPlugin.js
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- MIT License http://www.opensource.org/licenses/mit-license.php
- Author Tobias Koppers @sokra
-*/
-"use strict";
-
-class AggressiveMergingPlugin {
- constructor(options) {
- if (
- (options !== undefined && typeof options !== "object") ||
- Array.isArray(options)
- ) {
- throw new Error(
- "Argument should be an options object. To use defaults, pass in nothing.\nFor more info on options, see https://webpack.js.org/plugins/"
- );
- }
- this.options = options || {};
- }
-
- apply(compiler) {
- const options = this.options;
- const minSizeReduce = options.minSizeReduce || 1.5;
-
- compiler.hooks.thisCompilation.tap(
- "AggressiveMergingPlugin",
- compilation => {
- compilation.hooks.optimizeChunksAdvanced.tap(
- "AggressiveMergingPlugin",
- chunks => {
- let combinations = [];
- chunks.forEach((a, idx) => {
- if (a.canBeInitial()) return;
- for (let i = 0; i < idx; i++) {
- const b = chunks[i];
- if (b.canBeInitial()) continue;
- combinations.push({
- a,
- b,
- improvement: undefined
- });
- }
- });
-
- for (const pair of combinations) {
- const a = pair.b.size({
- chunkOverhead: 0
- });
- const b = pair.a.size({
- chunkOverhead: 0
- });
- const ab = pair.b.integratedSize(pair.a, {
- chunkOverhead: 0
- });
- let newSize;
- if (ab === false) {
- pair.improvement = false;
- return;
- } else {
- newSize = ab;
- }
-
- pair.improvement = (a + b) / newSize;
- }
- combinations = combinations.filter(pair => {
- return pair.improvement !== false;
- });
- combinations.sort((a, b) => {
- return b.improvement - a.improvement;
- });
-
- const pair = combinations[0];
-
- if (!pair) return;
- if (pair.improvement < minSizeReduce) return;
-
- if (pair.b.integrate(pair.a, "aggressive-merge")) {
- chunks.splice(chunks.indexOf(pair.a), 1);
- return true;
- }
- }
- );
- }
- );
- }
-}
-
-module.exports = AggressiveMergingPlugin;
diff --git a/node_modules/webpack/lib/optimize/AggressiveSplittingPlugin.js b/node_modules/webpack/lib/optimize/AggressiveSplittingPlugin.js
deleted file mode 100644
index 3843878..0000000
--- a/node_modules/webpack/lib/optimize/AggressiveSplittingPlugin.js
+++ /dev/null
@@ -1,294 +0,0 @@
-/*
- MIT License http://www.opensource.org/licenses/mit-license.php
- Author Tobias Koppers @sokra
-*/
-"use strict";
-
-const identifierUtils = require("../util/identifier");
-const { intersect } = require("../util/SetHelpers");
-const validateOptions = require("schema-utils");
-const schema = require("../../schemas/plugins/optimize/AggressiveSplittingPlugin.json");
-
-/** @typedef {import("../../declarations/plugins/optimize/AggressiveSplittingPlugin").AggressiveSplittingPluginOptions} AggressiveSplittingPluginOptions */
-
-const moveModuleBetween = (oldChunk, newChunk) => {
- return module => {
- oldChunk.moveModule(module, newChunk);
- };
-};
-
-const isNotAEntryModule = entryModule => {
- return module => {
- return entryModule !== module;
- };
-};
-
-class AggressiveSplittingPlugin {
- /**
- * @param {AggressiveSplittingPluginOptions=} options options object
- */
- constructor(options) {
- if (!options) options = {};
-
- validateOptions(schema, options, "Aggressive Splitting Plugin");
-
- this.options = options;
- if (typeof this.options.minSize !== "number") {
- this.options.minSize = 30 * 1024;
- }
- if (typeof this.options.maxSize !== "number") {
- this.options.maxSize = 50 * 1024;
- }
- if (typeof this.options.chunkOverhead !== "number") {
- this.options.chunkOverhead = 0;
- }
- if (typeof this.options.entryChunkMultiplicator !== "number") {
- this.options.entryChunkMultiplicator = 1;
- }
- }
- apply(compiler) {
- compiler.hooks.thisCompilation.tap(
- "AggressiveSplittingPlugin",
- compilation => {
- let needAdditionalSeal = false;
- let newSplits;
- let fromAggressiveSplittingSet;
- let chunkSplitDataMap;
- compilation.hooks.optimize.tap("AggressiveSplittingPlugin", () => {
- newSplits = [];
- fromAggressiveSplittingSet = new Set();
- chunkSplitDataMap = new Map();
- });
- compilation.hooks.optimizeChunksAdvanced.tap(
- "AggressiveSplittingPlugin",
- chunks => {
- // Precompute stuff
- const nameToModuleMap = new Map();
- const moduleToNameMap = new Map();
- for (const m of compilation.modules) {
- const name = identifierUtils.makePathsRelative(
- compiler.context,
- m.identifier(),
- compilation.cache
- );
- nameToModuleMap.set(name, m);
- moduleToNameMap.set(m, name);
- }
-
- // Check used chunk ids
- const usedIds = new Set();
- for (const chunk of chunks) {
- usedIds.add(chunk.id);
- }
-
- const recordedSplits =
- (compilation.records && compilation.records.aggressiveSplits) ||
- [];
- const usedSplits = newSplits
- ? recordedSplits.concat(newSplits)
- : recordedSplits;
-
- const minSize = this.options.minSize;
- const maxSize = this.options.maxSize;
-
- const applySplit = splitData => {
- // Cannot split if id is already taken
- if (splitData.id !== undefined && usedIds.has(splitData.id)) {
- return false;
- }
-
- // Get module objects from names
- const selectedModules = splitData.modules.map(name =>
- nameToModuleMap.get(name)
- );
-
- // Does the modules exist at all?
- if (!selectedModules.every(Boolean)) return false;
-
- // Check if size matches (faster than waiting for hash)
- const size = selectedModules.reduce(
- (sum, m) => sum + m.size(),
- 0
- );
- if (size !== splitData.size) return false;
-
- // get chunks with all modules
- const selectedChunks = intersect(
- selectedModules.map(m => new Set(m.chunksIterable))
- );
-
- // No relevant chunks found
- if (selectedChunks.size === 0) return false;
-
- // The found chunk is already the split or similar
- if (
- selectedChunks.size === 1 &&
- Array.from(selectedChunks)[0].getNumberOfModules() ===
- selectedModules.length
- ) {
- const chunk = Array.from(selectedChunks)[0];
- if (fromAggressiveSplittingSet.has(chunk)) return false;
- fromAggressiveSplittingSet.add(chunk);
- chunkSplitDataMap.set(chunk, splitData);
- return true;
- }
-
- // split the chunk into two parts
- const newChunk = compilation.addChunk();
- newChunk.chunkReason = "aggressive splitted";
- for (const chunk of selectedChunks) {
- selectedModules.forEach(moveModuleBetween(chunk, newChunk));
- chunk.split(newChunk);
- chunk.name = null;
- }
- fromAggressiveSplittingSet.add(newChunk);
- chunkSplitDataMap.set(newChunk, splitData);
-
- if (splitData.id !== null && splitData.id !== undefined) {
- newChunk.id = splitData.id;
- }
- return true;
- };
-
- // try to restore to recorded splitting
- let changed = false;
- for (let j = 0; j < usedSplits.length; j++) {
- const splitData = usedSplits[j];
- if (applySplit(splitData)) changed = true;
- }
-
- // for any chunk which isn't splitted yet, split it and create a new entry
- // start with the biggest chunk
- const sortedChunks = chunks.slice().sort((a, b) => {
- const diff1 = b.modulesSize() - a.modulesSize();
- if (diff1) return diff1;
- const diff2 = a.getNumberOfModules() - b.getNumberOfModules();
- if (diff2) return diff2;
- const modulesA = Array.from(a.modulesIterable);
- const modulesB = Array.from(b.modulesIterable);
- modulesA.sort();
- modulesB.sort();
- const aI = modulesA[Symbol.iterator]();
- const bI = modulesB[Symbol.iterator]();
- // eslint-disable-next-line no-constant-condition
- while (true) {
- const aItem = aI.next();
- const bItem = bI.next();
- if (aItem.done) return 0;
- const aModuleIdentifier = aItem.value.identifier();
- const bModuleIdentifier = bItem.value.identifier();
- if (aModuleIdentifier > bModuleIdentifier) return -1;
- if (aModuleIdentifier < bModuleIdentifier) return 1;
- }
- });
- for (const chunk of sortedChunks) {
- if (fromAggressiveSplittingSet.has(chunk)) continue;
- const size = chunk.modulesSize();
- if (size > maxSize && chunk.getNumberOfModules() > 1) {
- const modules = chunk
- .getModules()
- .filter(isNotAEntryModule(chunk.entryModule))
- .sort((a, b) => {
- a = a.identifier();
- b = b.identifier();
- if (a > b) return 1;
- if (a < b) return -1;
- return 0;
- });
- const selectedModules = [];
- let selectedModulesSize = 0;
- for (let k = 0; k < modules.length; k++) {
- const module = modules[k];
- const newSize = selectedModulesSize + module.size();
- if (newSize > maxSize && selectedModulesSize >= minSize) {
- break;
- }
- selectedModulesSize = newSize;
- selectedModules.push(module);
- }
- if (selectedModules.length === 0) continue;
- const splitData = {
- modules: selectedModules
- .map(m => moduleToNameMap.get(m))
- .sort(),
- size: selectedModulesSize
- };
-
- if (applySplit(splitData)) {
- newSplits = (newSplits || []).concat(splitData);
- changed = true;
- }
- }
- }
- if (changed) return true;
- }
- );
- compilation.hooks.recordHash.tap(
- "AggressiveSplittingPlugin",
- records => {
- // 4. save made splittings to records
- const allSplits = new Set();
- const invalidSplits = new Set();
-
- // Check if some splittings are invalid
- // We remove invalid splittings and try again
- for (const chunk of compilation.chunks) {
- const splitData = chunkSplitDataMap.get(chunk);
- if (splitData !== undefined) {
- if (splitData.hash && chunk.hash !== splitData.hash) {
- // Split was successful, but hash doesn't equal
- // We can throw away the split since it's useless now
- invalidSplits.add(splitData);
- }
- }
- }
-
- if (invalidSplits.size > 0) {
- records.aggressiveSplits = records.aggressiveSplits.filter(
- splitData => !invalidSplits.has(splitData)
- );
- needAdditionalSeal = true;
- } else {
- // set hash and id values on all (new) splittings
- for (const chunk of compilation.chunks) {
- const splitData = chunkSplitDataMap.get(chunk);
- if (splitData !== undefined) {
- splitData.hash = chunk.hash;
- splitData.id = chunk.id;
- allSplits.add(splitData);
- // set flag for stats
- chunk.recorded = true;
- }
- }
-
- // Also add all unused historial splits (after the used ones)
- // They can still be used in some future compilation
- const recordedSplits =
- compilation.records && compilation.records.aggressiveSplits;
- if (recordedSplits) {
- for (const splitData of recordedSplits) {
- if (!invalidSplits.has(splitData)) allSplits.add(splitData);
- }
- }
-
- // record all splits
- records.aggressiveSplits = Array.from(allSplits);
-
- needAdditionalSeal = false;
- }
- }
- );
- compilation.hooks.needAdditionalSeal.tap(
- "AggressiveSplittingPlugin",
- () => {
- if (needAdditionalSeal) {
- needAdditionalSeal = false;
- return true;
- }
- }
- );
- }
- );
- }
-}
-module.exports = AggressiveSplittingPlugin;
diff --git a/node_modules/webpack/lib/optimize/ChunkModuleIdRangePlugin.js b/node_modules/webpack/lib/optimize/ChunkModuleIdRangePlugin.js
deleted file mode 100644
index 9e3abd3..0000000
--- a/node_modules/webpack/lib/optimize/ChunkModuleIdRangePlugin.js
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- MIT License http://www.opensource.org/licenses/mit-license.php
- Author Tobias Koppers @sokra
-*/
-"use strict";
-
-const sortByIndex = (a, b) => {
- return a.index - b.index;
-};
-
-const sortByIndex2 = (a, b) => {
- return a.index2 - b.index2;
-};
-
-class ChunkModuleIdRangePlugin {
- constructor(options) {
- this.options = options;
- }
-
- apply(compiler) {
- const options = this.options;
- compiler.hooks.compilation.tap("ChunkModuleIdRangePlugin", compilation => {
- compilation.hooks.moduleIds.tap("ChunkModuleIdRangePlugin", modules => {
- const chunk = compilation.chunks.find(
- chunk => chunk.name === options.name
- );
- if (!chunk) {
- throw new Error(
- `ChunkModuleIdRangePlugin: Chunk with name '${options.name}"' was not found`
- );
- }
-
- let chunkModules;
- if (options.order) {
- chunkModules = Array.from(chunk.modulesIterable);
- switch (options.order) {
- case "index":
- chunkModules.sort(sortByIndex);
- break;
- case "index2":
- chunkModules.sort(sortByIndex2);
- break;
- default:
- throw new Error(
- "ChunkModuleIdRangePlugin: unexpected value of order"
- );
- }
- } else {
- chunkModules = modules.filter(m => {
- return m.chunksIterable.has(chunk);
- });
- }
-
- let currentId = options.start || 0;
- for (let i = 0; i < chunkModules.length; i++) {
- const m = chunkModules[i];
- if (m.id === null) {
- m.id = currentId++;
- }
- if (options.end && currentId > options.end) break;
- }
- });
- });
- }
-}
-module.exports = ChunkModuleIdRangePlugin;
diff --git a/node_modules/webpack/lib/optimize/ConcatenatedModule.js b/node_modules/webpack/lib/optimize/ConcatenatedModule.js
deleted file mode 100644
index df4d216..0000000
--- a/node_modules/webpack/lib/optimize/ConcatenatedModule.js
+++ /dev/null
@@ -1,1477 +0,0 @@
-/*
- MIT License http://www.opensource.org/licenses/mit-license.php
- Author Tobias Koppers @sokra
-*/
-"use strict";
-
-const Module = require("../Module");
-const Template = require("../Template");
-const Parser = require("../Parser");
-const eslintScope = require("eslint-scope");
-const { ConcatSource, ReplaceSource } = require("webpack-sources");
-const DependencyReference = require("../dependencies/DependencyReference");
-const HarmonyImportDependency = require("../dependencies/HarmonyImportDependency");
-const HarmonyImportSideEffectDependency = require("../dependencies/HarmonyImportSideEffectDependency");
-const HarmonyImportSpecifierDependency = require("../dependencies/HarmonyImportSpecifierDependency");
-const HarmonyExportSpecifierDependency = require("../dependencies/HarmonyExportSpecifierDependency");
-const HarmonyExportExpressionDependency = require("../dependencies/HarmonyExportExpressionDependency");
-const HarmonyExportImportedSpecifierDependency = require("../dependencies/HarmonyExportImportedSpecifierDependency");
-const HarmonyCompatibilityDependency = require("../dependencies/HarmonyCompatibilityDependency");
-const createHash = require("../util/createHash");
-
-/** @typedef {import("../Dependency")} Dependency */
-/** @typedef {import("../Compilation")} Compilation */
-/** @typedef {import("../util/createHash").Hash} Hash */
-/** @typedef {import("../RequestShortener")} RequestShortener */
-
-const joinIterableWithComma = iterable => {
- // This is more performant than Array.from().join(", ")
- // as it doesn't create an array
- let str = "";
- let first = true;
- for (const item of iterable) {
- if (first) {
- first = false;
- } else {
- str += ", ";
- }
- str += item;
- }
- return str;
-};
-
-/**
- * @typedef {Object} ConcatenationEntry
- * @property {"concatenated" | "external"} type
- * @property {Module} module
- */
-
-const ensureNsObjSource = (
- info,
- moduleToInfoMap,
- requestShortener,
- strictHarmonyModule
-) => {
- if (!info.hasNamespaceObject) {
- info.hasNamespaceObject = true;
- const name = info.exportMap.get(true);
- const nsObj = [`var ${name} = {};`, `__webpack_require__.r(${name});`];
- for (const exportName of info.module.buildMeta.providedExports) {
- const finalName = getFinalName(
- info,
- exportName,
- moduleToInfoMap,
- requestShortener,
- false,
- strictHarmonyModule
- );
- nsObj.push(
- `__webpack_require__.d(${name}, ${JSON.stringify(
- exportName
- )}, function() { return ${finalName}; });`
- );
- }
- info.namespaceObjectSource = nsObj.join("\n") + "\n";
- }
-};
-
-const getExternalImport = (
- importedModule,
- info,
- exportName,
- asCall,
- strictHarmonyModule
-) => {
- const used = importedModule.isUsed(exportName);
- if (!used) return "/* unused reexport */undefined";
- const comment =
- used !== exportName ? ` ${Template.toNormalComment(exportName)}` : "";
- switch (importedModule.buildMeta.exportsType) {
- case "named":
- if (exportName === "default") {
- return info.name;
- } else if (exportName === true) {
- info.interopNamespaceObjectUsed = true;
- return info.interopNamespaceObjectName;
- } else {
- break;
- }
- case "namespace":
- if (exportName === true) {
- return info.name;
- } else {
- break;
- }
- default:
- if (strictHarmonyModule) {
- if (exportName === "default") {
- return info.name;
- } else if (exportName === true) {
- info.interopNamespaceObjectUsed = true;
- return info.interopNamespaceObjectName;
- } else {
- return "/* non-default import from non-esm module */undefined";
- }
- } else {
- if (exportName === "default") {
- info.interopDefaultAccessUsed = true;
- return asCall
- ? `${info.interopDefaultAccessName}()`
- : `${info.interopDefaultAccessName}.a`;
- } else if (exportName === true) {
- return info.name;
- } else {
- break;
- }
- }
- }
- const reference = `${info.name}[${JSON.stringify(used)}${comment}]`;
- if (asCall) return `Object(${reference})`;
- return reference;
-};
-
-const getFinalName = (
- info,
- exportName,
- moduleToInfoMap,
- requestShortener,
- asCall,
- strictHarmonyModule,
- alreadyVisited = new Set()
-) => {
- switch (info.type) {
- case "concatenated": {
- const directExport = info.exportMap.get(exportName);
- if (directExport) {
- if (exportName === true) {
- ensureNsObjSource(
- info,
- moduleToInfoMap,
- requestShortener,
- strictHarmonyModule
- );
- } else if (!info.module.isUsed(exportName)) {
- return "/* unused export */ undefined";
- }
- if (info.globalExports.has(directExport)) {
- return directExport;
- }
- const name = info.internalNames.get(directExport);
- if (!name) {
- throw new Error(
- `The export "${directExport}" in "${info.module.readableIdentifier(
- requestShortener
- )}" has no internal name`
- );
- }
- return name;
- }
- const reexport = info.reexportMap.get(exportName);
- if (reexport) {
- if (alreadyVisited.has(reexport)) {
- throw new Error(
- `Circular reexports ${Array.from(
- alreadyVisited,
- e =>
- `"${e.module.readableIdentifier(requestShortener)}".${
- e.exportName
- }`
- ).join(
- " --> "
- )} -(circular)-> "${reexport.module.readableIdentifier(
- requestShortener
- )}".${reexport.exportName}`
- );
- }
- alreadyVisited.add(reexport);
- const refInfo = moduleToInfoMap.get(reexport.module);
- if (refInfo) {
- // module is in the concatenation
- return getFinalName(
- refInfo,
- reexport.exportName,
- moduleToInfoMap,
- requestShortener,
- asCall,
- strictHarmonyModule,
- alreadyVisited
- );
- }
- }
- const problem =
- `Cannot get final name for export "${exportName}" in "${info.module.readableIdentifier(
- requestShortener
- )}"` +
- ` (known exports: ${Array.from(info.exportMap.keys())
- .filter(name => name !== true)
- .join(" ")}, ` +
- `known reexports: ${Array.from(info.reexportMap.keys()).join(" ")})`;
- return `${Template.toNormalComment(problem)} undefined`;
- }
- case "external": {
- const importedModule = info.module;
- return getExternalImport(
- importedModule,
- info,
- exportName,
- asCall,
- strictHarmonyModule
- );
- }
- }
-};
-
-const addScopeSymbols1 = (s, nameSet, scopeSet) => {
- let scope = s;
- while (scope) {
- if (scopeSet.has(scope)) break;
- scopeSet.add(scope);
- for (const variable of scope.variables) {
- nameSet.add(variable.name);
- }
- scope = scope.upper;
- }
-};
-
-const addScopeSymbols2 = (s, nameSet, scopeSet1, scopeSet2) => {
- let scope = s;
- while (scope) {
- if (scopeSet1.has(scope)) break;
- if (scopeSet2.has(scope)) break;
- scopeSet1.add(scope);
- for (const variable of scope.variables) {
- nameSet.add(variable.name);
- }
- scope = scope.upper;
- }
-};
-
-const getAllReferences = variable => {
- let set = variable.references;
- // Look for inner scope variables too (like in class Foo { t() { Foo } })
- const identifiers = new Set(variable.identifiers);
- for (const scope of variable.scope.childScopes) {
- for (const innerVar of scope.variables) {
- if (innerVar.identifiers.some(id => identifiers.has(id))) {
- set = set.concat(innerVar.references);
- break;
- }
- }
- }
- return set;
-};
-
-const getPathInAst = (ast, node) => {
- if (ast === node) {
- return [];
- }
-
- const nr = node.range;
-
- const enterNode = n => {
- if (!n) return undefined;
- const r = n.range;
- if (r) {
- if (r[0] <= nr[0] && r[1] >= nr[1]) {
- const path = getPathInAst(n, node);
- if (path) {
- path.push(n);
- return path;
- }
- }
- }
- return undefined;
- };
-
- var i;
- if (Array.isArray(ast)) {
- for (i = 0; i < ast.length; i++) {
- const enterResult = enterNode(ast[i]);
- if (enterResult !== undefined) return enterResult;
- }
- } else if (ast && typeof ast === "object") {
- const keys = Object.keys(ast);
- for (i = 0; i < keys.length; i++) {
- const value = ast[keys[i]];
- if (Array.isArray(value)) {
- const pathResult = getPathInAst(value, node);
- if (pathResult !== undefined) return pathResult;
- } else if (value && typeof value === "object") {
- const enterResult = enterNode(value);
- if (enterResult !== undefined) return enterResult;
- }
- }
- }
-};
-
-const getHarmonyExportImportedSpecifierDependencyExports = dep => {
- const importModule = dep._module;
- if (!importModule) return [];
- if (dep._id) {
- // export { named } from "module"
- return [
- {
- name: dep.name,
- id: dep._id,
- module: importModule
- }
- ];
- }
- if (dep.name) {
- // export * as abc from "module"
- return [
- {
- name: dep.name,
- id: true,
- module: importModule
- }
- ];
- }
- // export * from "module"
- return importModule.buildMeta.providedExports
- .filter(exp => exp !== "default" && !dep.activeExports.has(exp))
- .map(exp => {
- return {
- name: exp,
- id: exp,
- module: importModule
- };
- });
-};
-
-class ConcatenatedModule extends Module {
- constructor(rootModule, modules, concatenationList) {
- super("javascript/esm", null);
- super.setChunks(rootModule._chunks);
-
- // Info from Factory
- this.rootModule = rootModule;
- this.factoryMeta = rootModule.factoryMeta;
-
- // Info from Compilation
- this.index = rootModule.index;
- this.index2 = rootModule.index2;
- this.depth = rootModule.depth;
-
- // Info from Optimization
- this.used = rootModule.used;
- this.usedExports = rootModule.usedExports;
-
- // Info from Build
- this.buildInfo = {
- strict: true,
- cacheable: modules.every(m => m.buildInfo.cacheable),
- moduleArgument: rootModule.buildInfo.moduleArgument,
- exportsArgument: rootModule.buildInfo.exportsArgument,
- fileDependencies: new Set(),
- contextDependencies: new Set(),
- assets: undefined
- };
- this.built = modules.some(m => m.built);
- this.buildMeta = rootModule.buildMeta;
-
- // Caching
- this._numberOfConcatenatedModules = modules.length;
-
- // Graph
- const modulesSet = new Set(modules);
- this.reasons = rootModule.reasons.filter(
- reason =>
- !(reason.dependency instanceof HarmonyImportDependency) ||
- !modulesSet.has(reason.module)
- );
-
- this.dependencies = [];
- this.blocks = [];
-
- this.warnings = [];
- this.errors = [];
- this._orderedConcatenationList =
- concatenationList ||
- ConcatenatedModule.createConcatenationList(rootModule, modulesSet, null);
- for (const info of this._orderedConcatenationList) {
- if (info.type === "concatenated") {
- const m = info.module;
-
- // populate dependencies
- for (const d of m.dependencies.filter(
- dep =>
- !(dep instanceof HarmonyImportDependency) ||
- !modulesSet.has(dep._module)
- )) {
- this.dependencies.push(d);
- }
- // populate blocks
- for (const d of m.blocks) {
- this.blocks.push(d);
- }
- // populate file dependencies
- if (m.buildInfo.fileDependencies) {
- for (const file of m.buildInfo.fileDependencies) {
- this.buildInfo.fileDependencies.add(file);
- }
- }
- // populate context dependencies
- if (m.buildInfo.contextDependencies) {
- for (const context of m.buildInfo.contextDependencies) {
- this.buildInfo.contextDependencies.add(context);
- }
- }
- // populate warnings
- for (const warning of m.warnings) {
- this.warnings.push(warning);
- }
- // populate errors
- for (const error of m.errors) {
- this.errors.push(error);
- }
-
- if (m.buildInfo.assets) {
- if (this.buildInfo.assets === undefined) {
- this.buildInfo.assets = Object.create(null);
- }
- Object.assign(this.buildInfo.assets, m.buildInfo.assets);
- }
- if (m.buildInfo.assetsInfo) {
- if (this.buildInfo.assetsInfo === undefined) {
- this.buildInfo.assetsInfo = new Map();
- }
- for (const [key, value] of m.buildInfo.assetsInfo) {
- this.buildInfo.assetsInfo.set(key, value);
- }
- }
- }
- }
- this._identifier = this._createIdentifier();
- }
-
- get modules() {
- return this._orderedConcatenationList
- .filter(info => info.type === "concatenated")
- .map(info => info.module);
- }
-
- identifier() {
- return this._identifier;
- }
-
- readableIdentifier(requestShortener) {
- return (
- this.rootModule.readableIdentifier(requestShortener) +
- ` + ${this._numberOfConcatenatedModules - 1} modules`
- );
- }
-
- libIdent(options) {
- return this.rootModule.libIdent(options);
- }
-
- nameForCondition() {
- return this.rootModule.nameForCondition();
- }
-
- build(options, compilation, resolver, fs, callback) {
- throw new Error("Cannot build this module. It should be already built.");
- }
-
- size() {
- // Guess size from embedded modules
- return this._orderedConcatenationList.reduce((sum, info) => {
- switch (info.type) {
- case "concatenated":
- return sum + info.module.size();
- case "external":
- return sum + 5;
- }
- return sum;
- }, 0);
- }
-
- /**
- * @param {Module} rootModule the root of the concatenation
- * @param {Set<Module>} modulesSet a set of modules which should be concatenated
- * @param {Compilation} compilation the compilation context
- * @returns {ConcatenationEntry[]} concatenation list
- */
- static createConcatenationList(rootModule, modulesSet, compilation) {
- const list = [];
- const set = new Set();
-
- /**
- * @param {Module} module a module
- * @returns {(function(): Module)[]} imported modules in order
- */
- const getConcatenatedImports = module => {
- /** @type {WeakMap<DependencyReference, Dependency>} */
- const map = new WeakMap();
- const references = module.dependencies
- .filter(dep => dep instanceof HarmonyImportDependency)
- .map(dep => {
- const ref = compilation.getDependencyReference(module, dep);
- if (ref) map.set(ref, dep);
- return ref;
- })
- .filter(ref => ref);
- DependencyReference.sort(references);
- // TODO webpack 5: remove this hack, see also DependencyReference
- return references.map(ref => {
- const dep = map.get(ref);
- return () => compilation.getDependencyReference(module, dep).module;
- });
- };
-
- const enterModule = getModule => {
- const module = getModule();
- if (!module) return;
- if (set.has(module)) return;
- set.add(module);
- if (modulesSet.has(module)) {
- const imports = getConcatenatedImports(module);
- imports.forEach(enterModule);
- list.push({
- type: "concatenated",
- module
- });
- } else {
- list.push({
- type: "external",
- get module() {
- // We need to use a getter here, because the module in the dependency
- // could be replaced by some other process (i. e. also replaced with a
- // concatenated module)
- return getModule();
- }
- });
- }
- };
-
- enterModule(() => rootModule);
-
- return list;
- }
-
- _createIdentifier() {
- let orderedConcatenationListIdentifiers = "";
- for (let i = 0; i < this._orderedConcatenationList.length; i++) {
- if (this._orderedConcatenationList[i].type === "concatenated") {
- orderedConcatenationListIdentifiers += this._orderedConcatenationList[
- i
- ].module.identifier();
- orderedConcatenationListIdentifiers += " ";
- }
- }
- const hash = createHash("md4");
- hash.update(orderedConcatenationListIdentifiers);
- return this.rootModule.identifier() + " " + hash.digest("hex");
- }
-
- source(dependencyTemplates, runtimeTemplate) {
- const requestShortener = runtimeTemplate.requestShortener;
- // Metainfo for each module
- const modulesWithInfo = this._orderedConcatenationList.map((info, idx) => {
- switch (info.type) {
- case "concatenated": {
- const exportMap = new Map();
- const reexportMap = new Map();
- for (const dep of info.module.dependencies) {
- if (dep instanceof HarmonyExportSpecifierDependency) {
- if (!exportMap.has(dep.name)) {
- exportMap.set(dep.name, dep.id);
- }
- } else if (dep instanceof HarmonyExportExpressionDependency) {
- if (!exportMap.has("default")) {
- exportMap.set("default", "__WEBPACK_MODULE_DEFAULT_EXPORT__");
- }
- } else if (
- dep instanceof HarmonyExportImportedSpecifierDependency
- ) {
- const exportName = dep.name;
- const importName = dep._id;
- const importedModule = dep._module;
- if (exportName && importName) {
- if (!reexportMap.has(exportName)) {
- reexportMap.set(exportName, {
- module: importedModule,
- exportName: importName,
- dependency: dep
- });
- }
- } else if (exportName) {
- if (!reexportMap.has(exportName)) {
- reexportMap.set(exportName, {
- module: importedModule,
- exportName: true,
- dependency: dep
- });
- }
- } else if (importedModule) {
- for (const name of importedModule.buildMeta.providedExports) {
- if (dep.activeExports.has(name) || name === "default") {
- continue;
- }
- if (!reexportMap.has(name)) {
- reexportMap.set(name, {
- module: importedModule,
- exportName: name,
- dependency: dep
- });
- }
- }
- }
- }
- }
- return {
- type: "concatenated",
- module: info.module,
- index: idx,
- ast: undefined,
- internalSource: undefined,
- source: undefined,
- globalScope: undefined,
- moduleScope: undefined,
- internalNames: new Map(),
- globalExports: new Set(),
- exportMap: exportMap,
- reexportMap: reexportMap,
- hasNamespaceObject: false,
- namespaceObjectSource: null
- };
- }
- case "external":
- return {
- type: "external",
- module: info.module,
- index: idx,
- name: undefined,
- interopNamespaceObjectUsed: false,
- interopNamespaceObjectName: undefined,
- interopDefaultAccessUsed: false,
- interopDefaultAccessName: undefined
- };
- default:
- throw new Error(`Unsupported concatenation entry type ${info.type}`);
- }
- });
-
- // Create mapping from module to info
- const moduleToInfoMap = new Map();
- for (const m of modulesWithInfo) {
- moduleToInfoMap.set(m.module, m);
- }
-
- // Configure template decorators for dependencies
- const innerDependencyTemplates = new Map(dependencyTemplates);
-
- innerDependencyTemplates.set(
- HarmonyImportSpecifierDependency,
- new HarmonyImportSpecifierDependencyConcatenatedTemplate(
- dependencyTemplates.get(HarmonyImportSpecifierDependency),
- moduleToInfoMap
- )
- );
- innerDependencyTemplates.set(
- HarmonyImportSideEffectDependency,
- new HarmonyImportSideEffectDependencyConcatenatedTemplate(
- dependencyTemplates.get(HarmonyImportSideEffectDependency),
- moduleToInfoMap
- )
- );
- innerDependencyTemplates.set(
- HarmonyExportSpecifierDependency,
- new NullTemplate()
- );
- innerDependencyTemplates.set(
- HarmonyExportExpressionDependency,
- new HarmonyExportExpressionDependencyConcatenatedTemplate(
- dependencyTemplates.get(HarmonyExportExpressionDependency),
- this.rootModule
- )
- );
- innerDependencyTemplates.set(
- HarmonyExportImportedSpecifierDependency,
- new NullTemplate()
- );
- innerDependencyTemplates.set(
- HarmonyCompatibilityDependency,
- new NullTemplate()
- );
-
- // Must use full identifier in our cache here to ensure that the source
- // is updated should our dependencies list change.
- // TODO webpack 5 refactor
- innerDependencyTemplates.set(
- "hash",
- innerDependencyTemplates.get("hash") + this.identifier()
- );
-
- // Generate source code and analyse scopes
- // Prepare a ReplaceSource for the final source
- for (const info of modulesWithInfo) {
- if (info.type === "concatenated") {
- const m = info.module;
- const source = m.source(innerDependencyTemplates, runtimeTemplate);
- const code = source.source();
- let ast;
- try {
- ast = Parser.parse(code, {
- sourceType: "module"
- });
- } catch (err) {
- if (
- err.loc &&
- typeof err.loc === "object" &&
- typeof err.loc.line === "number"
- ) {
- const lineNumber = err.loc.line;
- const lines = code.split("\n");
- err.message +=
- "\n| " +
- lines
- .slice(Math.max(0, lineNumber - 3), lineNumber + 2)
- .join("\n| ");
- }
- throw err;
- }
- const scopeManager = eslintScope.analyze(ast, {
- ecmaVersion: 6,
- sourceType: "module",
- optimistic: true,
- ignoreEval: true,
- impliedStrict: true
- });
- const globalScope = scopeManager.acquire(ast);
- const moduleScope = globalScope.childScopes[0];
- const resultSource = new ReplaceSource(source);
- info.ast = ast;
- info.internalSource = source;
- info.source = resultSource;
- info.globalScope = globalScope;
- info.moduleScope = moduleScope;
- }
- }
-
- // List of all used names to avoid conflicts
- const allUsedNames = new Set([
- "__WEBPACK_MODULE_DEFAULT_EXPORT__", // avoid using this internal name
-
- "abstract",
- "arguments",
- "async",
- "await",
- "boolean",
- "break",
- "byte",
- "case",
- "catch",
- "char",
- "class",
- "const",
- "continue",
- "debugger",
- "default",
- "delete",
- "do",
- "double",
- "else",
- "enum",
- "eval",
- "export",
- "extends",
- "false",
- "final",
- "finally",
- "float",
- "for",
- "function",
- "goto",
- "if",
- "implements",
- "import",
- "in",
- "instanceof",
- "int",
- "interface",
- "let",
- "long",
- "native",
- "new",
- "null",
- "package",
- "private",
- "protected",
- "public",
- "return",
- "short",
- "static",
- "super",
- "switch",
- "synchronized",
- "this",
- "throw",
- "throws",
- "transient",
- "true",
- "try",
- "typeof",
- "var",
- "void",
- "volatile",
- "while",
- "with",
- "yield",
-
- "module",
- "__dirname",
- "__filename",
- "exports",
-
- "Array",
- "Date",
- "eval",
- "function",
- "hasOwnProperty",
- "Infinity",
- "isFinite",
- "isNaN",
- "isPrototypeOf",
- "length",
- "Math",
- "NaN",
- "name",
- "Number",
- "Object",
- "prototype",
- "String",
- "toString",
- "undefined",
- "valueOf",
-
- "alert",
- "all",
- "anchor",
- "anchors",
- "area",
- "assign",
- "blur",
- "button",
- "checkbox",
- "clearInterval",
- "clearTimeout",
- "clientInformation",
- "close",
- "closed",
- "confirm",
- "constructor",
- "crypto",
- "decodeURI",
- "decodeURIComponent",
- "defaultStatus",
- "document",
- "element",
- "elements",
- "embed",
- "embeds",
- "encodeURI",
- "encodeURIComponent",
- "escape",
- "event",
- "fileUpload",
- "focus",
- "form",
- "forms",
- "frame",
- "innerHeight",
- "innerWidth",
- "layer",
- "layers",
- "link",
- "location",
- "mimeTypes",
- "navigate",
- "navigator",
- "frames",
- "frameRate",
- "hidden",
- "history",
- "image",
- "images",
- "offscreenBuffering",
- "open",
- "opener",
- "option",
- "outerHeight",
- "outerWidth",
- "packages",
- "pageXOffset",
- "pageYOffset",
- "parent",
- "parseFloat",
- "parseInt",
- "password",
- "pkcs11",
- "plugin",
- "prompt",
- "propertyIsEnum",
- "radio",
- "reset",
- "screenX",
- "screenY",
- "scroll",
- "secure",
- "select",
- "self",
- "setInterval",
- "setTimeout",
- "status",
- "submit",
- "taint",
- "text",
- "textarea",
- "top",
- "unescape",
- "untaint",
- "window",
-
- "onblur",
- "onclick",
- "onerror",
- "onfocus",
- "onkeydown",
- "onkeypress",
- "onkeyup",
- "onmouseover",
- "onload",
- "onmouseup",
- "onmousedown",
- "onsubmit"
- ]);
-
- // Set of already checked scopes
- const alreadyCheckedScopes = new Set();
-
- // get all global names
- for (const info of modulesWithInfo) {
- const superClassExpressions = [];
-
- // ignore symbols from moduleScope
- if (info.moduleScope) {
- alreadyCheckedScopes.add(info.moduleScope);
-
- // The super class expression in class scopes behaves weird
- // We store ranges of all super class expressions to make
- // renaming to work correctly
- for (const childScope of info.moduleScope.childScopes) {
- if (childScope.type !== "class") continue;
- if (!childScope.block.superClass) continue;
- superClassExpressions.push({
- range: childScope.block.superClass.range,
- variables: childScope.variables
- });
- }
- }
-
- // add global symbols
- if (info.globalScope) {
- for (const reference of info.globalScope.through) {
- const name = reference.identifier.name;
- if (
- /^__WEBPACK_MODULE_REFERENCE__\d+_([\da-f]+|ns)(_call)?(_strict)?__$/.test(
- name
- )
- ) {
- for (const expr of superClassExpressions) {
- if (
- expr.range[0] <= reference.identifier.range[0] &&
- expr.range[1] >= reference.identifier.range[1]
- ) {
- for (const variable of expr.variables) {
- allUsedNames.add(variable.name);
- }
- }
- }
- addScopeSymbols1(
- reference.from,
- allUsedNames,
- alreadyCheckedScopes
- );
- } else {
- allUsedNames.add(name);
- }
- }
- }
-
- // add exported globals
- if (info.type === "concatenated") {
- const variables = new Set();
- for (const variable of info.moduleScope.variables) {
- variables.add(variable.name);
- }
- for (const [, variable] of info.exportMap) {
- if (!variables.has(variable)) {
- info.globalExports.add(variable);
- }
- }
- }
- }
-
- // generate names for symbols
- for (const info of modulesWithInfo) {
- switch (info.type) {
- case "concatenated": {
- const namespaceObjectName = this.findNewName(
- "namespaceObject",
- allUsedNames,
- null,
- info.module.readableIdentifier(requestShortener)
- );
- allUsedNames.add(namespaceObjectName);
- info.internalNames.set(namespaceObjectName, namespaceObjectName);
- info.exportMap.set(true, namespaceObjectName);
- for (const variable of info.moduleScope.variables) {
- const name = variable.name;
- if (allUsedNames.has(name)) {
- const references = getAllReferences(variable);
- const symbolsInReferences = new Set();
- const alreadyCheckedInnerScopes = new Set();
- for (const ref of references) {
- addScopeSymbols2(
- ref.from,
- symbolsInReferences,
- alreadyCheckedInnerScopes,
- alreadyCheckedScopes
- );
- }
- const newName = this.findNewName(
- name,
- allUsedNames,
- symbolsInReferences,
- info.module.readableIdentifier(requestShortener)
- );
- allUsedNames.add(newName);
- info.internalNames.set(name, newName);
- const source = info.source;
- const allIdentifiers = new Set(
- references.map(r => r.identifier).concat(variable.identifiers)
- );
- for (const identifier of allIdentifiers) {
- const r = identifier.range;
- const path = getPathInAst(info.ast, identifier);
- if (
- path &&
- path.length > 1 &&
- path[1].type === "Property" &&
- path[1].shorthand
- ) {
- source.insert(r[1], `: ${newName}`);
- } else {
- source.replace(r[0], r[1] - 1, newName);
- }
- }
- } else {
- allUsedNames.add(name);
- info.internalNames.set(name, name);
- }
- }
- break;
- }
- case "external": {
- const externalName = this.findNewName(
- "",
- allUsedNames,
- null,
- info.module.readableIdentifier(requestShortener)
- );
- allUsedNames.add(externalName);
- info.name = externalName;
- if (
- info.module.buildMeta.exportsType === "named" ||
- !info.module.buildMeta.exportsType
- ) {
- const externalNameInterop = this.findNewName(
- "namespaceObject",
- allUsedNames,
- null,
- info.module.readableIdentifier(requestShortener)
- );
- allUsedNames.add(externalNameInterop);
- info.interopNamespaceObjectName = externalNameInterop;
- }
- if (!info.module.buildMeta.exportsType) {
- const externalNameInterop = this.findNewName(
- "default",
- allUsedNames,
- null,
- info.module.readableIdentifier(requestShortener)
- );
- allUsedNames.add(externalNameInterop);
- info.interopDefaultAccessName = externalNameInterop;
- }
- break;
- }
- }
- }
-
- // Find and replace referenced to modules
- for (const info of modulesWithInfo) {
- if (info.type === "concatenated") {
- for (const reference of info.globalScope.through) {
- const name = reference.identifier.name;
- const match = /^__WEBPACK_MODULE_REFERENCE__(\d+)_([\da-f]+|ns)(_call)?(_strict)?__$/.exec(
- name
- );
- if (match) {
- const referencedModule = modulesWithInfo[+match[1]];
- let exportName;
- if (match[2] === "ns") {
- exportName = true;
- } else {
- const exportData = match[2];
- exportName = Buffer.from(exportData, "hex").toString("utf-8");
- }
- const asCall = !!match[3];
- const strictHarmonyModule = !!match[4];
- const finalName = getFinalName(
- referencedModule,
- exportName,
- moduleToInfoMap,
- requestShortener,
- asCall,
- strictHarmonyModule
- );
- const r = reference.identifier.range;
- const source = info.source;
- source.replace(r[0], r[1] - 1, finalName);
- }
- }
- }
- }
-
- // Map with all root exposed used exports
- /** @type {Map<string, function(RequestShortener): string>} */
- const exportsMap = new Map();
-
- // Set with all root exposed unused exports
- /** @type {Set<string>} */
- const unusedExports = new Set();
-
- for (const dep of this.rootModule.dependencies) {
- if (dep instanceof HarmonyExportSpecifierDependency) {
- const used = this.rootModule.isUsed(dep.name);
- if (used) {
- const info = moduleToInfoMap.get(this.rootModule);
- if (!exportsMap.has(used)) {
- exportsMap.set(
- used,
- () => `/* binding */ ${info.internalNames.get(dep.id)}`
- );
- }
- } else {
- unusedExports.add(dep.name || "namespace");
- }
- } else if (dep instanceof HarmonyExportImportedSpecifierDependency) {
- const exportDefs = getHarmonyExportImportedSpecifierDependencyExports(
- dep
- );
- for (const def of exportDefs) {
- const info = moduleToInfoMap.get(def.module);
- const used = dep.originModule.isUsed(def.name);
- if (used) {
- if (!exportsMap.has(used)) {
- exportsMap.set(used, requestShortener => {
- const finalName = getFinalName(
- info,
- def.id,
- moduleToInfoMap,
- requestShortener,
- false,
- this.rootModule.buildMeta.strictHarmonyModule
- );
- return `/* reexport */ ${finalName}`;
- });
- }
- } else {
- unusedExports.add(def.name);
- }
- }
- }
- }
-
- const result = new ConcatSource();
-
- // add harmony compatibility flag (must be first because of possible circular dependencies)
- const usedExports = this.rootModule.usedExports;
- if (usedExports === true || usedExports === null) {
- result.add(`// ESM COMPAT FLAG\n`);
- result.add(
- runtimeTemplate.defineEsModuleFlagStatement({
- exportsArgument: this.exportsArgument
- })
- );
- }
-
- // define exports
- if (exportsMap.size > 0) {
- result.add(`\n// EXPORTS\n`);
- for (const [key, value] of exportsMap) {
- result.add(
- `__webpack_require__.d(${this.exportsArgument}, ${JSON.stringify(
- key
- )}, function() { return ${value(requestShortener)}; });\n`
- );
- }
- }
-
- // list unused exports
- if (unusedExports.size > 0) {
- result.add(
- `\n// UNUSED EXPORTS: ${joinIterableWithComma(unusedExports)}\n`
- );
- }
-
- // define required namespace objects (must be before evaluation modules)
- for (const info of modulesWithInfo) {
- if (info.namespaceObjectSource) {
- result.add(
- `\n// NAMESPACE OBJECT: ${info.module.readableIdentifier(
- requestShortener
- )}\n`
- );
- result.add(info.namespaceObjectSource);
- }
- }
-
- // evaluate modules in order
- for (const info of modulesWithInfo) {
- switch (info.type) {
- case "concatenated":
- result.add(
- `\n// CONCATENATED MODULE: ${info.module.readableIdentifier(
- requestShortener
- )}\n`
- );
- result.add(info.source);
- break;
- case "external":
- result.add(
- `\n// EXTERNAL MODULE: ${info.module.readableIdentifier(
- requestShortener
- )}\n`
- );
- result.add(
- `var ${info.name} = __webpack_require__(${JSON.stringify(
- info.module.id
- )});\n`
- );
- if (info.interopNamespaceObjectUsed) {
- if (info.module.buildMeta.exportsType === "named") {
- result.add(
- `var ${info.interopNamespaceObjectName} = /*#__PURE__*/__webpack_require__.t(${info.name}, 2);\n`
- );
- } else if (!info.module.buildMeta.exportsType) {
- result.add(
- `var ${info.interopNamespaceObjectName} = /*#__PURE__*/__webpack_require__.t(${info.name});\n`
- );
- }
- }
- if (info.interopDefaultAccessUsed) {
- result.add(
- `var ${info.interopDefaultAccessName} = /*#__PURE__*/__webpack_require__.n(${info.name});\n`
- );
- }
- break;
- default:
- throw new Error(`Unsupported concatenation entry type ${info.type}`);
- }
- }
-
- return result;
- }
-
- findNewName(oldName, usedNamed1, usedNamed2, extraInfo) {
- let name = oldName;
-
- if (name === "__WEBPACK_MODULE_DEFAULT_EXPORT__") name = "";
-
- // Remove uncool stuff
- extraInfo = extraInfo.replace(
- /\.+\/|(\/index)?\.([a-zA-Z0-9]{1,4})($|\s|\?)|\s*\+\s*\d+\s*modules/g,
- ""
- );
-
- const splittedInfo = extraInfo.split("/");
- while (splittedInfo.length) {
- name = splittedInfo.pop() + (name ? "_" + name : "");
- const nameIdent = Template.toIdentifier(name);
- if (
- !usedNamed1.has(nameIdent) &&
- (!usedNamed2 || !usedNamed2.has(nameIdent))
- )
- return nameIdent;
- }
-
- let i = 0;
- let nameWithNumber = Template.toIdentifier(`${name}_${i}`);
- while (
- usedNamed1.has(nameWithNumber) ||
- (usedNamed2 && usedNamed2.has(nameWithNumber))
- ) {
- i++;
- nameWithNumber = Template.toIdentifier(`${name}_${i}`);
- }
- return nameWithNumber;
- }
-
- /**
- * @param {Hash} hash the hash used to track dependencies
- * @returns {void}
- */
- updateHash(hash) {
- for (const info of this._orderedConcatenationList) {
- switch (info.type) {
- case "concatenated":
- info.module.updateHash(hash);
- break;
- case "external":
- hash.update(`${info.module.id}`);
- break;
- }
- }
- super.updateHash(hash);
- }
-}
-
-class HarmonyImportSpecifierDependencyConcatenatedTemplate {
- constructor(originalTemplate, modulesMap) {
- this.originalTemplate = originalTemplate;
- this.modulesMap = modulesMap;
- }
-
- getHarmonyInitOrder(dep) {
- const module = dep._module;
- const info = this.modulesMap.get(module);
- if (!info) {
- return this.originalTemplate.getHarmonyInitOrder(dep);
- }
- return NaN;
- }
-
- harmonyInit(dep, source, runtimeTemplate, dependencyTemplates) {
- const module = dep._module;
- const info = this.modulesMap.get(module);
- if (!info) {
- this.originalTemplate.harmonyInit(
- dep,
- source,
- runtimeTemplate,
- dependencyTemplates
- );
- return;
- }
- }
-
- apply(dep, source, runtime, dependencyTemplates) {
- const module = dep._module;
- const info = this.modulesMap.get(module);
- if (!info) {
- this.originalTemplate.apply(dep, source, runtime, dependencyTemplates);
- return;
- }
- let content;
- const callFlag = dep.call ? "_call" : "";
- const strictFlag = dep.originModule.buildMeta.strictHarmonyModule
- ? "_strict"
- : "";
- if (dep._id === null) {
- content = `__WEBPACK_MODULE_REFERENCE__${info.index}_ns${strictFlag}__`;
- } else if (dep.namespaceObjectAsContext) {
- content = `__WEBPACK_MODULE_REFERENCE__${
- info.index
- }_ns${strictFlag}__[${JSON.stringify(dep._id)}]`;
- } else {
- const exportData = Buffer.from(dep._id, "utf-8").toString("hex");
- content = `__WEBPACK_MODULE_REFERENCE__${info.index}_${exportData}${callFlag}${strictFlag}__`;
- }
- if (dep.shorthand) {
- content = dep.name + ": " + content;
- }
- source.replace(dep.range[0], dep.range[1] - 1, content);
- }
-}
-
-class HarmonyImportSideEffectDependencyConcatenatedTemplate {
- constructor(originalTemplate, modulesMap) {
- this.originalTemplate = originalTemplate;
- this.modulesMap = modulesMap;
- }
-
- getHarmonyInitOrder(dep) {
- const module = dep._module;
- const info = this.modulesMap.get(module);
- if (!info) {
- return this.originalTemplate.getHarmonyInitOrder(dep);
- }
- return NaN;
- }
-
- harmonyInit(dep, source, runtime, dependencyTemplates) {
- const module = dep._module;
- const info = this.modulesMap.get(module);
- if (!info) {
- this.originalTemplate.harmonyInit(
- dep,
- source,
- runtime,
- dependencyTemplates
- );
- return;
- }
- }
-
- apply(dep, source, runtime, dependencyTemplates) {
- const module = dep._module;
- const info = this.modulesMap.get(module);
- if (!info) {
- this.originalTemplate.apply(dep, source, runtime, dependencyTemplates);
- return;
- }
- }
-}
-
-class HarmonyExportExpressionDependencyConcatenatedTemplate {
- constructor(originalTemplate, rootModule) {
- this.originalTemplate = originalTemplate;
- this.rootModule = rootModule;
- }
-
- apply(dep, source, runtime, dependencyTemplates) {
- let content =
- "/* harmony default export */ var __WEBPACK_MODULE_DEFAULT_EXPORT__ = ";
- if (dep.originModule === this.rootModule) {
- const used = dep.originModule.isUsed("default");
- const exportsName = dep.originModule.exportsArgument;
- if (used) content += `${exportsName}[${JSON.stringify(used)}] = `;
- }
-
- if (dep.range) {
- source.replace(
- dep.rangeStatement[0],
- dep.range[0] - 1,
- content + "(" + dep.prefix
- );
- source.replace(dep.range[1], dep.rangeStatement[1] - 1, ");");
- return;
- }
-
- source.replace(
- dep.rangeStatement[0],
- dep.rangeStatement[1] - 1,
- content + dep.prefix
- );
- }
-}
-
-class NullTemplate {
- apply() {}
-}
-
-module.exports = ConcatenatedModule;
diff --git a/node_modules/webpack/lib/optimize/EnsureChunkConditionsPlugin.js b/node_modules/webpack/lib/optimize/EnsureChunkConditionsPlugin.js
deleted file mode 100644
index 5d05ec8..0000000
--- a/node_modules/webpack/lib/optimize/EnsureChunkConditionsPlugin.js
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- MIT License http://www.opensource.org/licenses/mit-license.php
- Author Tobias Koppers @sokra
-*/
-"use strict";
-
-const GraphHelpers = require("../GraphHelpers");
-
-class EnsureChunkConditionsPlugin {
- apply(compiler) {
- compiler.hooks.compilation.tap(
- "EnsureChunkConditionsPlugin",
- compilation => {
- const handler = chunks => {
- let changed = false;
- for (const module of compilation.modules) {
- if (!module.chunkCondition) continue;
- const sourceChunks = new Set();
- const chunkGroups = new Set();
- for (const chunk of module.chunksIterable) {
- if (!module.chunkCondition(chunk)) {
- sourceChunks.add(chunk);
- for (const group of chunk.groupsIterable) {
- chunkGroups.add(group);
- }
- }
- }
- if (sourceChunks.size === 0) continue;
- const targetChunks = new Set();
- chunkGroupLoop: for (const chunkGroup of chunkGroups) {
- // Can module be placed in a chunk of this group?
- for (const chunk of chunkGroup.chunks) {
- if (module.chunkCondition(chunk)) {
- targetChunks.add(chunk);
- continue chunkGroupLoop;
- }
- }
- // We reached the entrypoint: fail
- if (chunkGroup.isInitial()) {
- throw new Error(
- "Cannot fullfil chunk condition of " + module.identifier()
- );
- }
- // Try placing in all parents
- for (const group of chunkGroup.parentsIterable) {
- chunkGroups.add(group);
- }
- }
- for (const sourceChunk of sourceChunks) {
- GraphHelpers.disconnectChunkAndModule(sourceChunk, module);
- }
- for (const targetChunk of targetChunks) {
- GraphHelpers.connectChunkAndModule(targetChunk, module);
- }
- }
- if (changed) return true;
- };
- compilation.hooks.optimizeChunksBasic.tap(
- "EnsureChunkConditionsPlugin",
- handler
- );
- compilation.hooks.optimizeExtractedChunksBasic.tap(
- "EnsureChunkConditionsPlugin",
- handler
- );
- }
- );
- }
-}
-module.exports = EnsureChunkConditionsPlugin;
diff --git a/node_modules/webpack/lib/optimize/FlagIncludedChunksPlugin.js b/node_modules/webpack/lib/optimize/FlagIncludedChunksPlugin.js
deleted file mode 100644
index 1890f05..0000000
--- a/node_modules/webpack/lib/optimize/FlagIncludedChunksPlugin.js
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- MIT License http://www.opensource.org/licenses/mit-license.php
- Author Tobias Koppers @sokra
-*/
-"use strict";
-
-class FlagIncludedChunksPlugin {
- apply(compiler) {
- compiler.hooks.compilation.tap("FlagIncludedChunksPlugin", compilation => {
- compilation.hooks.optimizeChunkIds.tap(
- "FlagIncludedChunksPlugin",
- chunks => {
- // prepare two bit integers for each module
- // 2^31 is the max number represented as SMI in v8
- // we want the bits distributed this way:
- // the bit 2^31 is pretty rar and only one module should get it
- // so it has a probability of 1 / modulesCount
- // the first bit (2^0) is the easiest and every module could get it
- // if it doesn't get a better bit
- // from bit 2^n to 2^(n+1) there is a probability of p
- // so 1 / modulesCount == p^31
- // <=> p = sqrt31(1 / modulesCount)
- // so we use a modulo of 1 / sqrt31(1 / modulesCount)
- const moduleBits = new WeakMap();
- const modulesCount = compilation.modules.length;
-
- // precalculate the modulo values for each bit
- const modulo = 1 / Math.pow(1 / modulesCount, 1 / 31);
- const modulos = Array.from(
- { length: 31 },
- (x, i) => Math.pow(modulo, i) | 0
- );
-
- // iterate all modules to generate bit values
- let i = 0;
- for (const module of compilation.modules) {
- let bit = 30;
- while (i % modulos[bit] !== 0) {
- bit--;
- }
- moduleBits.set(module, 1 << bit);
- i++;
- }
-
- // interate all chunks to generate bitmaps
- const chunkModulesHash = new WeakMap();
- for (const chunk of chunks) {
- let hash = 0;
- for (const module of chunk.modulesIterable) {
- hash |= moduleBits.get(module);
- }
- chunkModulesHash.set(chunk, hash);
- }
-
- for (const chunkA of chunks) {
- const chunkAHash = chunkModulesHash.get(chunkA);
- const chunkAModulesCount = chunkA.getNumberOfModules();
- if (chunkAModulesCount === 0) continue;
- let bestModule = undefined;
- for (const module of chunkA.modulesIterable) {
- if (
- bestModule === undefined ||
- bestModule.getNumberOfChunks() > module.getNumberOfChunks()
- )
- bestModule = module;
- }
- loopB: for (const chunkB of bestModule.chunksIterable) {
- // as we iterate the same iterables twice
- // skip if we find ourselves
- if (chunkA === chunkB) continue;
-
- const chunkBModulesCount = chunkB.getNumberOfModules();
-
- // ids for empty chunks are not included
- if (chunkBModulesCount === 0) continue;
-
- // instead of swapping A and B just bail
- // as we loop twice the current A will be B and B then A
- if (chunkAModulesCount > chunkBModulesCount) continue;
-
- // is chunkA in chunkB?
-
- // we do a cheap check for the hash value
- const chunkBHash = chunkModulesHash.get(chunkB);
- if ((chunkBHash & chunkAHash) !== chunkAHash) continue;
-
- // compare all modules
- for (const m of chunkA.modulesIterable) {
- if (!chunkB.containsModule(m)) continue loopB;
- }
- chunkB.ids.push(chunkA.id);
- }
- }
- }
- );
- });
- }
-}
-module.exports = FlagIncludedChunksPlugin;
diff --git a/node_modules/webpack/lib/optimize/LimitChunkCountPlugin.js b/node_modules/webpack/lib/optimize/LimitChunkCountPlugin.js
deleted file mode 100644
index 87f2849..0000000
--- a/node_modules/webpack/lib/optimize/LimitChunkCountPlugin.js
+++ /dev/null
@@ -1,231 +0,0 @@
-/*
- MIT License http://www.opensource.org/licenses/mit-license.php
- Author Tobias Koppers @sokra
-*/
-"use strict";
-
-const validateOptions = require("schema-utils");
-const schema = require("../../schemas/plugins/optimize/LimitChunkCountPlugin.json");
-const LazyBucketSortedSet = require("../util/LazyBucketSortedSet");
-
-/** @typedef {import("../../declarations/plugins/optimize/LimitChunkCountPlugin").LimitChunkCountPluginOptions} LimitChunkCountPluginOptions */
-/** @typedef {import("../Chunk")} Chunk */
-/** @typedef {import("../Compiler")} Compiler */
-
-/**
- * @typedef {Object} ChunkCombination
- * @property {boolean} deleted this is set to true when combination was removed
- * @property {number} sizeDiff
- * @property {number} integratedSize
- * @property {Chunk} a
- * @property {Chunk} b
- * @property {number} aIdx
- * @property {number} bIdx
- * @property {number} aSize
- * @property {number} bSize
- */
-
-const addToSetMap = (map, key, value) => {
- const set = map.get(key);
- if (set === undefined) {
- map.set(key, new Set([value]));
- } else {
- set.add(value);
- }
-};
-
-class LimitChunkCountPlugin {
- /**
- * @param {LimitChunkCountPluginOptions=} options options object
- */
- constructor(options) {
- if (!options) options = {};
-
- validateOptions(schema, options, "Limit Chunk Count Plugin");
- this.options = options;
- }
-
- /**
- * @param {Compiler} compiler the webpack compiler
- * @returns {void}
- */
- apply(compiler) {
- const options = this.options;
- compiler.hooks.compilation.tap("LimitChunkCountPlugin", compilation => {
- compilation.hooks.optimizeChunksAdvanced.tap(
- "LimitChunkCountPlugin",
- chunks => {
- const maxChunks = options.maxChunks;
- if (!maxChunks) return;
- if (maxChunks < 1) return;
- if (chunks.length <= maxChunks) return;
-
- let remainingChunksToMerge = chunks.length - maxChunks;
-
- // order chunks in a deterministic way
- const orderedChunks = chunks.slice().sort((a, b) => a.compareTo(b));
-
- // create a lazy sorted data structure to keep all combinations
- // this is large. Size = chunks * (chunks - 1) / 2
- // It uses a multi layer bucket sort plus normal sort in the last layer
- // It's also lazy so only accessed buckets are sorted
- const combinations = new LazyBucketSortedSet(
- // Layer 1: ordered by largest size benefit
- c => c.sizeDiff,
- (a, b) => b - a,
- // Layer 2: ordered by smallest combined size
- c => c.integratedSize,
- (a, b) => a - b,
- // Layer 3: ordered by position difference in orderedChunk (-> to be deterministic)
- c => c.bIdx - c.aIdx,
- (a, b) => a - b,
- // Layer 4: ordered by position in orderedChunk (-> to be deterministic)
- (a, b) => a.bIdx - b.bIdx
- );
-
- // we keep a mappng from chunk to all combinations
- // but this mapping is not kept up-to-date with deletions
- // so `deleted` flag need to be considered when iterating this
- /** @type {Map<Chunk, Set<ChunkCombination>>} */
- const combinationsByChunk = new Map();
-
- orderedChunks.forEach((b, bIdx) => {
- // create combination pairs with size and integrated size
- for (let aIdx = 0; aIdx < bIdx; aIdx++) {
- const a = orderedChunks[aIdx];
- const integratedSize = a.integratedSize(b, options);
-
- // filter pairs that do not have an integratedSize
- // meaning they can NOT be integrated!
- if (integratedSize === false) continue;
-
- const aSize = a.size(options);
- const bSize = b.size(options);
- const c = {
- deleted: false,
- sizeDiff: aSize + bSize - integratedSize,
- integratedSize,
- a,
- b,
- aIdx,
- bIdx,
- aSize,
- bSize
- };
- combinations.add(c);
- addToSetMap(combinationsByChunk, a, c);
- addToSetMap(combinationsByChunk, b, c);
- }
- return combinations;
- });
-
- // list of modified chunks during this run
- // combinations affected by this change are skipped to allow
- // futher optimizations
- /** @type {Set<Chunk>} */
- const modifiedChunks = new Set();
-
- let changed = false;
- // eslint-disable-next-line no-constant-condition
- loop: while (true) {
- const combination = combinations.popFirst();
- if (combination === undefined) break;
-
- combination.deleted = true;
- const { a, b, integratedSize } = combination;
-
- // skip over pair when
- // one of the already merged chunks is a parent of one of the chunks
- if (modifiedChunks.size > 0) {
- const queue = new Set(a.groupsIterable);
- for (const group of b.groupsIterable) {
- queue.add(group);
- }
- for (const group of queue) {
- for (const mChunk of modifiedChunks) {
- if (mChunk !== a && mChunk !== b && mChunk.isInGroup(group)) {
- // This is a potential pair which needs recalculation
- // We can't do that now, but it merge before following pairs
- // so we leave space for it, and consider chunks as modified
- // just for the worse case
- remainingChunksToMerge--;
- if (remainingChunksToMerge <= 0) break loop;
- modifiedChunks.add(a);
- modifiedChunks.add(b);
- continue loop;
- }
- }
- for (const parent of group.parentsIterable) {
- queue.add(parent);
- }
- }
- }
-
- // merge the chunks
- if (a.integrate(b, "limit")) {
- chunks.splice(chunks.indexOf(b), 1);
-
- // flag chunk a as modified as further optimization are possible for all children here
- modifiedChunks.add(a);
-
- changed = true;
- remainingChunksToMerge--;
- if (remainingChunksToMerge <= 0) break;
-
- // Update all affected combinations
- // delete all combination with the removed chunk
- // we will use combinations with the kept chunk instead
- for (const combination of combinationsByChunk.get(b)) {
- if (combination.deleted) continue;
- combination.deleted = true;
- combinations.delete(combination);
- }
-
- // Update combinations with the kept chunk with new sizes
- for (const combination of combinationsByChunk.get(a)) {
- if (combination.deleted) continue;
- if (combination.a === a) {
- // Update size
- const newIntegratedSize = a.integratedSize(
- combination.b,
- options
- );
- if (newIntegratedSize === false) {
- combination.deleted = true;
- combinations.delete(combination);
- continue;
- }
- const finishUpdate = combinations.startUpdate(combination);
- combination.integratedSize = newIntegratedSize;
- combination.aSize = integratedSize;
- combination.sizeDiff =
- combination.bSize + integratedSize - newIntegratedSize;
- finishUpdate();
- } else if (combination.b === a) {
- // Update size
- const newIntegratedSize = combination.a.integratedSize(
- a,
- options
- );
- if (newIntegratedSize === false) {
- combination.deleted = true;
- combinations.delete(combination);
- continue;
- }
- const finishUpdate = combinations.startUpdate(combination);
- combination.integratedSize = newIntegratedSize;
- combination.bSize = integratedSize;
- combination.sizeDiff =
- integratedSize + combination.aSize - newIntegratedSize;
- finishUpdate();
- }
- }
- }
- }
- if (changed) return true;
- }
- );
- });
- }
-}
-module.exports = LimitChunkCountPlugin;
diff --git a/node_modules/webpack/lib/optimize/MergeDuplicateChunksPlugin.js b/node_modules/webpack/lib/optimize/MergeDuplicateChunksPlugin.js
deleted file mode 100644
index 1c3e23a..0000000
--- a/node_modules/webpack/lib/optimize/MergeDuplicateChunksPlugin.js
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- MIT License http://www.opensource.org/licenses/mit-license.php
- Author Tobias Koppers @sokra
-*/
-"use strict";
-
-class MergeDuplicateChunksPlugin {
- apply(compiler) {
- compiler.hooks.compilation.tap(
- "MergeDuplicateChunksPlugin",
- compilation => {
- compilation.hooks.optimizeChunksBasic.tap(
- "MergeDuplicateChunksPlugin",
- chunks => {
- // remember already tested chunks for performance
- const notDuplicates = new Set();
-
- // for each chunk
- for (const chunk of chunks) {
- // track a Set of all chunk that could be duplicates
- let possibleDuplicates;
- for (const module of chunk.modulesIterable) {
- if (possibleDuplicates === undefined) {
- // when possibleDuplicates is not yet set,
- // create a new Set from chunks of the current module
- // including only chunks with the same number of modules
- for (const dup of module.chunksIterable) {
- if (
- dup !== chunk &&
- chunk.getNumberOfModules() === dup.getNumberOfModules() &&
- !notDuplicates.has(dup)
- ) {
- // delay allocating the new Set until here, reduce memory pressure
- if (possibleDuplicates === undefined) {
- possibleDuplicates = new Set();
- }
- possibleDuplicates.add(dup);
- }
- }
- // when no chunk is possible we can break here
- if (possibleDuplicates === undefined) break;
- } else {
- // validate existing possible duplicates
- for (const dup of possibleDuplicates) {
- // remove possible duplicate when module is not contained
- if (!dup.containsModule(module)) {
- possibleDuplicates.delete(dup);
- }
- }
- // when all chunks has been removed we can break here
- if (possibleDuplicates.size === 0) break;
- }
- }
-
- // when we found duplicates
- if (
- possibleDuplicates !== undefined &&
- possibleDuplicates.size > 0
- ) {
- for (const otherChunk of possibleDuplicates) {
- if (otherChunk.hasRuntime() !== chunk.hasRuntime()) continue;
- // merge them
- if (chunk.integrate(otherChunk, "duplicate")) {
- chunks.splice(chunks.indexOf(otherChunk), 1);
- }
- }
- }
-
- // don't check already processed chunks twice
- notDuplicates.add(chunk);
- }
- }
- );
- }
- );
- }
-}
-module.exports = MergeDuplicateChunksPlugin;
diff --git a/node_modules/webpack/lib/optimize/MinChunkSizePlugin.js b/node_modules/webpack/lib/optimize/MinChunkSizePlugin.js
deleted file mode 100644
index 144af27..0000000
--- a/node_modules/webpack/lib/optimize/MinChunkSizePlugin.js
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- MIT License http://www.opensource.org/licenses/mit-license.php
- Author Tobias Koppers @sokra
-*/
-"use strict";
-
-const validateOptions = require("schema-utils");
-const schema = require("../../schemas/plugins/optimize/MinChunkSizePlugin.json");
-
-/** @typedef {import("../../declarations/plugins/optimize/MinChunkSizePlugin").MinChunkSizePluginOptions} MinChunkSizePluginOptions */
-
-class MinChunkSizePlugin {
- /**
- * @param {MinChunkSizePluginOptions} options options object
- */
- constructor(options) {
- validateOptions(schema, options, "Min Chunk Size Plugin");
- this.options = options;
- }
-
- apply(compiler) {
- const options = this.options;
- const minChunkSize = options.minChunkSize;
- compiler.hooks.compilation.tap("MinChunkSizePlugin", compilation => {
- compilation.hooks.optimizeChunksAdvanced.tap(
- "MinChunkSizePlugin",
- chunks => {
- const equalOptions = {
- chunkOverhead: 1,
- entryChunkMultiplicator: 1
- };
-
- const sortedSizeFilteredExtendedPairCombinations = chunks
- .reduce((combinations, a, idx) => {
- // create combination pairs
- for (let i = 0; i < idx; i++) {
- const b = chunks[i];
- combinations.push([b, a]);
- }
- return combinations;
- }, [])
- .filter(pair => {
- // check if one of the chunks sizes is smaller than the minChunkSize
- const p0SmallerThanMinChunkSize =
- pair[0].size(equalOptions) < minChunkSize;
- const p1SmallerThanMinChunkSize =
- pair[1].size(equalOptions) < minChunkSize;
- return p0SmallerThanMinChunkSize || p1SmallerThanMinChunkSize;
- })
- .map(pair => {
- // extend combination pairs with size and integrated size
- const a = pair[0].size(options);
- const b = pair[1].size(options);
- const ab = pair[0].integratedSize(pair[1], options);
- return [a + b - ab, ab, pair[0], pair[1]];
- })
- .filter(pair => {
- // filter pairs that do not have an integratedSize
- // meaning they can NOT be integrated!
- return pair[1] !== false;
- })
- .sort((a, b) => {
- // sadly javascript does an inplace sort here
- // sort by size
- const diff = b[0] - a[0];
- if (diff !== 0) return diff;
- return a[1] - b[1];
- });
-
- if (sortedSizeFilteredExtendedPairCombinations.length === 0) return;
-
- const pair = sortedSizeFilteredExtendedPairCombinations[0];
-
- pair[2].integrate(pair[3], "min-size");
- chunks.splice(chunks.indexOf(pair[3]), 1);
- return true;
- }
- );
- });
- }
-}
-module.exports = MinChunkSizePlugin;
diff --git a/node_modules/webpack/lib/optimize/MinMaxSizeWarning.js b/node_modules/webpack/lib/optimize/MinMaxSizeWarning.js
deleted file mode 100644
index 255e918..0000000
--- a/node_modules/webpack/lib/optimize/MinMaxSizeWarning.js
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- MIT License http://www.opensource.org/licenses/mit-license.php
- Author Tobias Koppers @sokra
-*/
-"use strict";
-
-const WebpackError = require("../WebpackError");
-const SizeFormatHelpers = require("../SizeFormatHelpers");
-
-class MinMaxSizeWarning extends WebpackError {
- constructor(keys, minSize, maxSize) {
- let keysMessage = "Fallback cache group";
- if (keys) {
- keysMessage =
- keys.length > 1
- ? `Cache groups ${keys.sort().join(", ")}`
- : `Cache group ${keys[0]}`;
- }
- super(
- `SplitChunksPlugin\n` +
- `${keysMessage}\n` +
- `Configured minSize (${SizeFormatHelpers.formatSize(minSize)}) is ` +
- `bigger than maxSize (${SizeFormatHelpers.formatSize(maxSize)}).\n` +
- "This seem to be a invalid optimiziation.splitChunks configuration."
- );
- }
-}
-
-module.exports = MinMaxSizeWarning;
diff --git a/node_modules/webpack/lib/optimize/ModuleConcatenationPlugin.js b/node_modules/webpack/lib/optimize/ModuleConcatenationPlugin.js
deleted file mode 100644
index 0c5bfd3..0000000
--- a/node_modules/webpack/lib/optimize/ModuleConcatenationPlugin.js
+++ /dev/null
@@ -1,485 +0,0 @@
-/*
- MIT License http://www.opensource.org/licenses/mit-license.php
- Author Tobias Koppers @sokra
-*/
-"use strict";
-
-const HarmonyImportDependency = require("../dependencies/HarmonyImportDependency");
-const ModuleHotAcceptDependency = require("../dependencies/ModuleHotAcceptDependency");
-const ModuleHotDeclineDependency = require("../dependencies/ModuleHotDeclineDependency");
-const ConcatenatedModule = require("./ConcatenatedModule");
-const HarmonyCompatibilityDependency = require("../dependencies/HarmonyCompatibilityDependency");
-const StackedSetMap = require("../util/StackedSetMap");
-
-const formatBailoutReason = msg => {
- return "ModuleConcatenation bailout: " + msg;
-};
-
-class ModuleConcatenationPlugin {
- constructor(options) {
- if (typeof options !== "object") options = {};
- this.options = options;
- }
-
- apply(compiler) {
- compiler.hooks.compilation.tap(
- "ModuleConcatenationPlugin",
- (compilation, { normalModuleFactory }) => {
- const handler = (parser, parserOptions) => {
- parser.hooks.call.for("eval").tap("ModuleConcatenationPlugin", () => {
- // Because of variable renaming we can't use modules with eval.
- parser.state.module.buildMeta.moduleConcatenationBailout = "eval()";
- });
- };
-
- normalModuleFactory.hooks.parser
- .for("javascript/auto")
- .tap("ModuleConcatenationPlugin", handler);
- normalModuleFactory.hooks.parser
- .for("javascript/dynamic")
- .tap("ModuleConcatenationPlugin", handler);
- normalModuleFactory.hooks.parser
- .for("javascript/esm")
- .tap("ModuleConcatenationPlugin", handler);
-
- const bailoutReasonMap = new Map();
-
- const setBailoutReason = (module, reason) => {
- bailoutReasonMap.set(module, reason);
- module.optimizationBailout.push(
- typeof reason === "function"
- ? rs => formatBailoutReason(reason(rs))
- : formatBailoutReason(reason)
- );
- };
-
- const getBailoutReason = (module, requestShortener) => {
- const reason = bailoutReasonMap.get(module);
- if (typeof reason === "function") return reason(requestShortener);
- return reason;
- };
-
- compilation.hooks.optimizeChunkModules.tap(
- "ModuleConcatenationPlugin",
- (allChunks, modules) => {
- const relevantModules = [];
- const possibleInners = new Set();
- for (const module of modules) {
- // Only harmony modules are valid for optimization
- if (
- !module.buildMeta ||
- module.buildMeta.exportsType !== "namespace" ||
- !module.dependencies.some(
- d => d instanceof HarmonyCompatibilityDependency
- )
- ) {
- setBailoutReason(module, "Module is not an ECMAScript module");
- continue;
- }
-
- // Some expressions are not compatible with module concatenation
- // because they may produce unexpected results. The plugin bails out
- // if some were detected upfront.
- if (
- module.buildMeta &&
- module.buildMeta.moduleConcatenationBailout
- ) {
- setBailoutReason(
- module,
- `Module uses ${module.buildMeta.moduleConcatenationBailout}`
- );
- continue;
- }
-
- // Exports must be known (and not dynamic)
- if (!Array.isArray(module.buildMeta.providedExports)) {
- setBailoutReason(module, "Module exports are unknown");
- continue;
- }
-
- // Using dependency variables is not possible as this wraps the code in a function
- if (module.variables.length > 0) {
- setBailoutReason(
- module,
- `Module uses injected variables (${module.variables
- .map(v => v.name)
- .join(", ")})`
- );
- continue;
- }
-
- // Hot Module Replacement need it's own module to work correctly
- if (
- module.dependencies.some(
- dep =>
- dep instanceof ModuleHotAcceptDependency ||
- dep instanceof ModuleHotDeclineDependency
- )
- ) {
- setBailoutReason(module, "Module uses Hot Module Replacement");
- continue;
- }
-
- relevantModules.push(module);
-
- // Module must not be the entry points
- if (module.isEntryModule()) {
- setBailoutReason(module, "Module is an entry point");
- continue;
- }
-
- // Module must be in any chunk (we don't want to do useless work)
- if (module.getNumberOfChunks() === 0) {
- setBailoutReason(module, "Module is not in any chunk");
- continue;
- }
-
- // Module must only be used by Harmony Imports
- const nonHarmonyReasons = module.reasons.filter(
- reason =>
- !reason.dependency ||
- !(reason.dependency instanceof HarmonyImportDependency)
- );
- if (nonHarmonyReasons.length > 0) {
- const importingModules = new Set(
- nonHarmonyReasons.map(r => r.module).filter(Boolean)
- );
- const importingExplanations = new Set(
- nonHarmonyReasons.map(r => r.explanation).filter(Boolean)
- );
- const importingModuleTypes = new Map(
- Array.from(importingModules).map(
- m => /** @type {[string, Set]} */ ([
- m,
- new Set(
- nonHarmonyReasons
- .filter(r => r.module === m)
- .map(r => r.dependency.type)
- .sort()
- )
- ])
- )
- );
- setBailoutReason(module, requestShortener => {
- const names = Array.from(importingModules)
- .map(
- m =>
- `${m.readableIdentifier(
- requestShortener
- )} (referenced with ${Array.from(
- importingModuleTypes.get(m)
- ).join(", ")})`
- )
- .sort();
- const explanations = Array.from(importingExplanations).sort();
- if (names.length > 0 && explanations.length === 0) {
- return `Module is referenced from these modules with unsupported syntax: ${names.join(
- ", "
- )}`;
- } else if (names.length === 0 && explanations.length > 0) {
- return `Module is referenced by: ${explanations.join(
- ", "
- )}`;
- } else if (names.length > 0 && explanations.length > 0) {
- return `Module is referenced from these modules with unsupported syntax: ${names.join(
- ", "
- )} and by: ${explanations.join(", ")}`;
- } else {
- return "Module is referenced in a unsupported way";
- }
- });
- continue;
- }
-
- possibleInners.add(module);
- }
- // sort by depth
- // modules with lower depth are more likely suited as roots
- // this improves performance, because modules already selected as inner are skipped
- relevantModules.sort((a, b) => {
- return a.depth - b.depth;
- });
- const concatConfigurations = [];
- const usedAsInner = new Set();
- for (const currentRoot of relevantModules) {
- // when used by another configuration as inner:
- // the other configuration is better and we can skip this one
- if (usedAsInner.has(currentRoot)) continue;
-
- // create a configuration with the root
- const currentConfiguration = new ConcatConfiguration(currentRoot);
-
- // cache failures to add modules
- const failureCache = new Map();
-
- // try to add all imports
- for (const imp of this._getImports(compilation, currentRoot)) {
- const problem = this._tryToAdd(
- compilation,
- currentConfiguration,
- imp,
- possibleInners,
- failureCache
- );
- if (problem) {
- failureCache.set(imp, problem);
- currentConfiguration.addWarning(imp, problem);
- }
- }
- if (!currentConfiguration.isEmpty()) {
- concatConfigurations.push(currentConfiguration);
- for (const module of currentConfiguration.getModules()) {
- if (module !== currentConfiguration.rootModule) {
- usedAsInner.add(module);
- }
- }
- }
- }
- // HACK: Sort configurations by length and start with the longest one
- // to get the biggers groups possible. Used modules are marked with usedModules
- // TODO: Allow to reuse existing configuration while trying to add dependencies.
- // This would improve performance. O(n^2) -> O(n)
- concatConfigurations.sort((a, b) => {
- return b.modules.size - a.modules.size;
- });
- const usedModules = new Set();
- for (const concatConfiguration of concatConfigurations) {
- if (usedModules.has(concatConfiguration.rootModule)) continue;
- const modules = concatConfiguration.getModules();
- const rootModule = concatConfiguration.rootModule;
- const newModule = new ConcatenatedModule(
- rootModule,
- Array.from(modules),
- ConcatenatedModule.createConcatenationList(
- rootModule,
- modules,
- compilation
- )
- );
- for (const warning of concatConfiguration.getWarningsSorted()) {
- newModule.optimizationBailout.push(requestShortener => {
- const reason = getBailoutReason(warning[0], requestShortener);
- const reasonWithPrefix = reason ? ` (<- ${reason})` : "";
- if (warning[0] === warning[1]) {
- return formatBailoutReason(
- `Cannot concat with ${warning[0].readableIdentifier(
- requestShortener
- )}${reasonWithPrefix}`
- );
- } else {
- return formatBailoutReason(
- `Cannot concat with ${warning[0].readableIdentifier(
- requestShortener
- )} because of ${warning[1].readableIdentifier(
- requestShortener
- )}${reasonWithPrefix}`
- );
- }
- });
- }
- const chunks = concatConfiguration.rootModule.getChunks();
- for (const m of modules) {
- usedModules.add(m);
- for (const chunk of chunks) {
- chunk.removeModule(m);
- }
- }
- for (const chunk of chunks) {
- chunk.addModule(newModule);
- newModule.addChunk(chunk);
- }
- for (const chunk of allChunks) {
- if (chunk.entryModule === concatConfiguration.rootModule) {
- chunk.entryModule = newModule;
- }
- }
- compilation.modules.push(newModule);
- for (const reason of newModule.reasons) {
- if (reason.dependency.module === concatConfiguration.rootModule)
- reason.dependency.module = newModule;
- if (
- reason.dependency.redirectedModule ===
- concatConfiguration.rootModule
- )
- reason.dependency.redirectedModule = newModule;
- }
- // TODO: remove when LTS node version contains fixed v8 version
- // @see https://github.com/webpack/webpack/pull/6613
- // Turbofan does not correctly inline for-of loops with polymorphic input arrays.
- // Work around issue by using a standard for loop and assigning dep.module.reasons
- for (let i = 0; i < newModule.dependencies.length; i++) {
- let dep = newModule.dependencies[i];
- if (dep.module) {
- let reasons = dep.module.reasons;
- for (let j = 0; j < reasons.length; j++) {
- let reason = reasons[j];
- if (reason.dependency === dep) {
- reason.module = newModule;
- }
- }
- }
- }
- }
- compilation.modules = compilation.modules.filter(
- m => !usedModules.has(m)
- );
- }
- );
- }
- );
- }
-
- _getImports(compilation, module) {
- return new Set(
- module.dependencies
-
- // Get reference info only for harmony Dependencies
- .map(dep => {
- if (!(dep instanceof HarmonyImportDependency)) return null;
- if (!compilation) return dep.getReference();
- return compilation.getDependencyReference(module, dep);
- })
-
- // Reference is valid and has a module
- // Dependencies are simple enough to concat them
- .filter(
- ref =>
- ref &&
- ref.module &&
- (Array.isArray(ref.importedNames) ||
- Array.isArray(ref.module.buildMeta.providedExports))
- )
-
- // Take the imported module
- .map(ref => ref.module)
- );
- }
-
- _tryToAdd(compilation, config, module, possibleModules, failureCache) {
- const cacheEntry = failureCache.get(module);
- if (cacheEntry) {
- return cacheEntry;
- }
-
- // Already added?
- if (config.has(module)) {
- return null;
- }
-
- // Not possible to add?
- if (!possibleModules.has(module)) {
- failureCache.set(module, module); // cache failures for performance
- return module;
- }
-
- // module must be in the same chunks
- if (!config.rootModule.hasEqualsChunks(module)) {
- failureCache.set(module, module); // cache failures for performance
- return module;
- }
-
- // Clone config to make experimental changes
- const testConfig = config.clone();
-
- // Add the module
- testConfig.add(module);
-
- // Every module which depends on the added module must be in the configuration too.
- for (const reason of module.reasons) {
- // Modules that are not used can be ignored
- if (
- reason.module.factoryMeta.sideEffectFree &&
- reason.module.used === false
- )
- continue;
-
- const problem = this._tryToAdd(
- compilation,
- testConfig,
- reason.module,
- possibleModules,
- failureCache
- );
- if (problem) {
- failureCache.set(module, problem); // cache failures for performance
- return problem;
- }
- }
-
- // Commit experimental changes
- config.set(testConfig);
-
- // Eagerly try to add imports too if possible
- for (const imp of this._getImports(compilation, module)) {
- const problem = this._tryToAdd(
- compilation,
- config,
- imp,
- possibleModules,
- failureCache
- );
- if (problem) {
- config.addWarning(imp, problem);
- }
- }
- return null;
- }
-}
-
-class ConcatConfiguration {
- constructor(rootModule, cloneFrom) {
- this.rootModule = rootModule;
- if (cloneFrom) {
- this.modules = cloneFrom.modules.createChild(5);
- this.warnings = cloneFrom.warnings.createChild(5);
- } else {
- this.modules = new StackedSetMap();
- this.modules.add(rootModule);
- this.warnings = new StackedSetMap();
- }
- }
-
- add(module) {
- this.modules.add(module);
- }
-
- has(module) {
- return this.modules.has(module);
- }
-
- isEmpty() {
- return this.modules.size === 1;
- }
-
- addWarning(module, problem) {
- this.warnings.set(module, problem);
- }
-
- getWarningsSorted() {
- return new Map(
- this.warnings.asPairArray().sort((a, b) => {
- const ai = a[0].identifier();
- const bi = b[0].identifier();
- if (ai < bi) return -1;
- if (ai > bi) return 1;
- return 0;
- })
- );
- }
-
- getModules() {
- return this.modules.asSet();
- }
-
- clone() {
- return new ConcatConfiguration(this.rootModule, this);
- }
-
- set(config) {
- this.rootModule = config.rootModule;
- this.modules = config.modules;
- this.warnings = config.warnings;
- }
-}
-
-module.exports = ModuleConcatenationPlugin;
diff --git a/node_modules/webpack/lib/optimize/NaturalChunkOrderPlugin.js b/node_modules/webpack/lib/optimize/NaturalChunkOrderPlugin.js
deleted file mode 100644
index 00f8010..0000000
--- a/node_modules/webpack/lib/optimize/NaturalChunkOrderPlugin.js
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- MIT License http://www.opensource.org/licenses/mit-license.php
- Author Tobias Koppers @sokra
-*/
-"use strict";
-
-/** @typedef {import("../Compiler")} Compiler */
-
-class NaturalChunkOrderPlugin {
- /**
- * @param {Compiler} compiler webpack compiler
- * @returns {void}
- */
- apply(compiler) {
- compiler.hooks.compilation.tap("NaturalChunkOrderPlugin", compilation => {
- compilation.hooks.optimizeChunkOrder.tap(
- "NaturalChunkOrderPlugin",
- chunks => {
- chunks.sort((chunkA, chunkB) => {
- const a = chunkA.modulesIterable[Symbol.iterator]();
- const b = chunkB.modulesIterable[Symbol.iterator]();
- // eslint-disable-next-line no-constant-condition
- while (true) {
- const aItem = a.next();
- const bItem = b.next();
- if (aItem.done && bItem.done) return 0;
- if (aItem.done) return -1;
- if (bItem.done) return 1;
- const aModuleId = aItem.value.id;
- const bModuleId = bItem.value.id;
- if (aModuleId < bModuleId) return -1;
- if (aModuleId > bModuleId) return 1;
- }
- });
- }
- );
- });
- }
-}
-
-module.exports = NaturalChunkOrderPlugin;
diff --git a/node_modules/webpack/lib/optimize/OccurrenceChunkOrderPlugin.js b/node_modules/webpack/lib/optimize/OccurrenceChunkOrderPlugin.js
deleted file mode 100644
index 4f7ec9a..0000000
--- a/node_modules/webpack/lib/optimize/OccurrenceChunkOrderPlugin.js
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- MIT License http://www.opensource.org/licenses/mit-license.php
- Author Tobias Koppers @sokra
-*/
-"use strict";
-
-const validateOptions = require("schema-utils");
-const schema = require("../../schemas/plugins/optimize/OccurrenceOrderChunkIdsPlugin.json");
-
-/** @typedef {import("../../declarations/plugins/optimize/OccurrenceOrderChunkIdsPlugin").OccurrenceOrderChunkIdsPluginOptions} OccurrenceOrderChunkIdsPluginOptions */
-
-class OccurrenceOrderChunkIdsPlugin {
- /**
- * @param {OccurrenceOrderChunkIdsPluginOptions=} options options object
- */
- constructor(options = {}) {
- validateOptions(schema, options, "Occurrence Order Chunk Ids Plugin");
- this.options = options;
- }
-
- apply(compiler) {
- const prioritiseInitial = this.options.prioritiseInitial;
- compiler.hooks.compilation.tap(
- "OccurrenceOrderChunkIdsPlugin",
- compilation => {
- compilation.hooks.optimizeChunkOrder.tap(
- "OccurrenceOrderChunkIdsPlugin",
- chunks => {
- const occursInInitialChunksMap = new Map();
- const originalOrder = new Map();
-
- let i = 0;
- for (const c of chunks) {
- let occurs = 0;
- for (const chunkGroup of c.groupsIterable) {
- for (const parent of chunkGroup.parentsIterable) {
- if (parent.isInitial()) occurs++;
- }
- }
- occursInInitialChunksMap.set(c, occurs);
- originalOrder.set(c, i++);
- }
-
- chunks.sort((a, b) => {
- if (prioritiseInitial) {
- const aEntryOccurs = occursInInitialChunksMap.get(a);
- const bEntryOccurs = occursInInitialChunksMap.get(b);
- if (aEntryOccurs > bEntryOccurs) return -1;
- if (aEntryOccurs < bEntryOccurs) return 1;
- }
- const aOccurs = a.getNumberOfGroups();
- const bOccurs = b.getNumberOfGroups();
- if (aOccurs > bOccurs) return -1;
- if (aOccurs < bOccurs) return 1;
- const orgA = originalOrder.get(a);
- const orgB = originalOrder.get(b);
- return orgA - orgB;
- });
- }
- );
- }
- );
- }
-}
-
-module.exports = OccurrenceOrderChunkIdsPlugin;
diff --git a/node_modules/webpack/lib/optimize/OccurrenceModuleOrderPlugin.js b/node_modules/webpack/lib/optimize/OccurrenceModuleOrderPlugin.js
deleted file mode 100644
index cb9a3e8..0000000
--- a/node_modules/webpack/lib/optimize/OccurrenceModuleOrderPlugin.js
+++ /dev/null
@@ -1,112 +0,0 @@
-/*
- MIT License http://www.opensource.org/licenses/mit-license.php
- Author Tobias Koppers @sokra
-*/
-"use strict";
-
-const validateOptions = require("schema-utils");
-const schema = require("../../schemas/plugins/optimize/OccurrenceOrderModuleIdsPlugin.json");
-
-/** @typedef {import("../../declarations/plugins/optimize/OccurrenceOrderModuleIdsPlugin").OccurrenceOrderModuleIdsPluginOptions} OccurrenceOrderModuleIdsPluginOptions */
-
-class OccurrenceOrderModuleIdsPlugin {
- /**
- * @param {OccurrenceOrderModuleIdsPluginOptions=} options options object
- */
- constructor(options = {}) {
- validateOptions(schema, options, "Occurrence Order Module Ids Plugin");
- this.options = options;
- }
-
- apply(compiler) {
- const prioritiseInitial = this.options.prioritiseInitial;
- compiler.hooks.compilation.tap(
- "OccurrenceOrderModuleIdsPlugin",
- compilation => {
- compilation.hooks.optimizeModuleOrder.tap(
- "OccurrenceOrderModuleIdsPlugin",
- modules => {
- const occursInInitialChunksMap = new Map();
- const occursInAllChunksMap = new Map();
-
- const initialChunkChunkMap = new Map();
- const entryCountMap = new Map();
- for (const m of modules) {
- let initial = 0;
- let entry = 0;
- for (const c of m.chunksIterable) {
- if (c.canBeInitial()) initial++;
- if (c.entryModule === m) entry++;
- }
- initialChunkChunkMap.set(m, initial);
- entryCountMap.set(m, entry);
- }
-
- const countOccursInEntry = (sum, r) => {
- if (!r.module) {
- return sum;
- }
- const count = initialChunkChunkMap.get(r.module);
- if (!count) {
- return sum;
- }
- return sum + count;
- };
- const countOccurs = (sum, r) => {
- if (!r.module) {
- return sum;
- }
- let factor = 1;
- if (typeof r.dependency.getNumberOfIdOccurrences === "function") {
- factor = r.dependency.getNumberOfIdOccurrences();
- }
- if (factor === 0) {
- return sum;
- }
- return sum + factor * r.module.getNumberOfChunks();
- };
-
- if (prioritiseInitial) {
- for (const m of modules) {
- const result =
- m.reasons.reduce(countOccursInEntry, 0) +
- initialChunkChunkMap.get(m) +
- entryCountMap.get(m);
- occursInInitialChunksMap.set(m, result);
- }
- }
-
- const originalOrder = new Map();
- let i = 0;
- for (const m of modules) {
- const result =
- m.reasons.reduce(countOccurs, 0) +
- m.getNumberOfChunks() +
- entryCountMap.get(m);
- occursInAllChunksMap.set(m, result);
- originalOrder.set(m, i++);
- }
-
- modules.sort((a, b) => {
- if (prioritiseInitial) {
- const aEntryOccurs = occursInInitialChunksMap.get(a);
- const bEntryOccurs = occursInInitialChunksMap.get(b);
- if (aEntryOccurs > bEntryOccurs) return -1;
- if (aEntryOccurs < bEntryOccurs) return 1;
- }
- const aOccurs = occursInAllChunksMap.get(a);
- const bOccurs = occursInAllChunksMap.get(b);
- if (aOccurs > bOccurs) return -1;
- if (aOccurs < bOccurs) return 1;
- const orgA = originalOrder.get(a);
- const orgB = originalOrder.get(b);
- return orgA - orgB;
- });
- }
- );
- }
- );
- }
-}
-
-module.exports = OccurrenceOrderModuleIdsPlugin;
diff --git a/node_modules/webpack/lib/optimize/OccurrenceOrderPlugin.js b/node_modules/webpack/lib/optimize/OccurrenceOrderPlugin.js
deleted file mode 100644
index c73ec8e..0000000
--- a/node_modules/webpack/lib/optimize/OccurrenceOrderPlugin.js
+++ /dev/null
@@ -1,135 +0,0 @@
-/*
- MIT License http://www.opensource.org/licenses/mit-license.php
- Author Tobias Koppers @sokra
-*/
-"use strict";
-
-// TODO webpack 5 remove this plugin
-// It has been splitted into separate plugins for modules and chunks
-class OccurrenceOrderPlugin {
- constructor(preferEntry) {
- if (preferEntry !== undefined && typeof preferEntry !== "boolean") {
- throw new Error(
- "Argument should be a boolean.\nFor more info on this plugin, see https://webpack.js.org/plugins/"
- );
- }
- this.preferEntry = preferEntry;
- }
- apply(compiler) {
- const preferEntry = this.preferEntry;
- compiler.hooks.compilation.tap("OccurrenceOrderPlugin", compilation => {
- compilation.hooks.optimizeModuleOrder.tap(
- "OccurrenceOrderPlugin",
- modules => {
- const occursInInitialChunksMap = new Map();
- const occursInAllChunksMap = new Map();
-
- const initialChunkChunkMap = new Map();
- const entryCountMap = new Map();
- for (const m of modules) {
- let initial = 0;
- let entry = 0;
- for (const c of m.chunksIterable) {
- if (c.canBeInitial()) initial++;
- if (c.entryModule === m) entry++;
- }
- initialChunkChunkMap.set(m, initial);
- entryCountMap.set(m, entry);
- }
-
- const countOccursInEntry = (sum, r) => {
- if (!r.module) {
- return sum;
- }
- return sum + initialChunkChunkMap.get(r.module);
- };
- const countOccurs = (sum, r) => {
- if (!r.module) {
- return sum;
- }
- let factor = 1;
- if (typeof r.dependency.getNumberOfIdOccurrences === "function") {
- factor = r.dependency.getNumberOfIdOccurrences();
- }
- if (factor === 0) {
- return sum;
- }
- return sum + factor * r.module.getNumberOfChunks();
- };
-
- if (preferEntry) {
- for (const m of modules) {
- const result =
- m.reasons.reduce(countOccursInEntry, 0) +
- initialChunkChunkMap.get(m) +
- entryCountMap.get(m);
- occursInInitialChunksMap.set(m, result);
- }
- }
-
- const originalOrder = new Map();
- let i = 0;
- for (const m of modules) {
- const result =
- m.reasons.reduce(countOccurs, 0) +
- m.getNumberOfChunks() +
- entryCountMap.get(m);
- occursInAllChunksMap.set(m, result);
- originalOrder.set(m, i++);
- }
-
- modules.sort((a, b) => {
- if (preferEntry) {
- const aEntryOccurs = occursInInitialChunksMap.get(a);
- const bEntryOccurs = occursInInitialChunksMap.get(b);
- if (aEntryOccurs > bEntryOccurs) return -1;
- if (aEntryOccurs < bEntryOccurs) return 1;
- }
- const aOccurs = occursInAllChunksMap.get(a);
- const bOccurs = occursInAllChunksMap.get(b);
- if (aOccurs > bOccurs) return -1;
- if (aOccurs < bOccurs) return 1;
- const orgA = originalOrder.get(a);
- const orgB = originalOrder.get(b);
- return orgA - orgB;
- });
- }
- );
- compilation.hooks.optimizeChunkOrder.tap(
- "OccurrenceOrderPlugin",
- chunks => {
- const occursInInitialChunksMap = new Map();
- const originalOrder = new Map();
-
- let i = 0;
- for (const c of chunks) {
- let occurs = 0;
- for (const chunkGroup of c.groupsIterable) {
- for (const parent of chunkGroup.parentsIterable) {
- if (parent.isInitial()) occurs++;
- }
- }
- occursInInitialChunksMap.set(c, occurs);
- originalOrder.set(c, i++);
- }
-
- chunks.sort((a, b) => {
- const aEntryOccurs = occursInInitialChunksMap.get(a);
- const bEntryOccurs = occursInInitialChunksMap.get(b);
- if (aEntryOccurs > bEntryOccurs) return -1;
- if (aEntryOccurs < bEntryOccurs) return 1;
- const aOccurs = a.getNumberOfGroups();
- const bOccurs = b.getNumberOfGroups();
- if (aOccurs > bOccurs) return -1;
- if (aOccurs < bOccurs) return 1;
- const orgA = originalOrder.get(a);
- const orgB = originalOrder.get(b);
- return orgA - orgB;
- });
- }
- );
- });
- }
-}
-
-module.exports = OccurrenceOrderPlugin;
diff --git a/node_modules/webpack/lib/optimize/RemoveEmptyChunksPlugin.js b/node_modules/webpack/lib/optimize/RemoveEmptyChunksPlugin.js
deleted file mode 100644
index 42ba24a..0000000
--- a/node_modules/webpack/lib/optimize/RemoveEmptyChunksPlugin.js
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- MIT License http://www.opensource.org/licenses/mit-license.php
- Author Tobias Koppers @sokra
-*/
-"use strict";
-
-class RemoveEmptyChunksPlugin {
- apply(compiler) {
- compiler.hooks.compilation.tap("RemoveEmptyChunksPlugin", compilation => {
- const handler = chunks => {
- for (let i = chunks.length - 1; i >= 0; i--) {
- const chunk = chunks[i];
- if (
- chunk.isEmpty() &&
- !chunk.hasRuntime() &&
- !chunk.hasEntryModule()
- ) {
- chunk.remove("empty");
- chunks.splice(i, 1);
- }
- }
- };
- compilation.hooks.optimizeChunksBasic.tap(
- "RemoveEmptyChunksPlugin",
- handler
- );
- compilation.hooks.optimizeChunksAdvanced.tap(
- "RemoveEmptyChunksPlugin",
- handler
- );
- compilation.hooks.optimizeExtractedChunksBasic.tap(
- "RemoveEmptyChunksPlugin",
- handler
- );
- compilation.hooks.optimizeExtractedChunksAdvanced.tap(
- "RemoveEmptyChunksPlugin",
- handler
- );
- });
- }
-}
-module.exports = RemoveEmptyChunksPlugin;
diff --git a/node_modules/webpack/lib/optimize/RemoveParentModulesPlugin.js b/node_modules/webpack/lib/optimize/RemoveParentModulesPlugin.js
deleted file mode 100644
index 7fff592..0000000
--- a/node_modules/webpack/lib/optimize/RemoveParentModulesPlugin.js
+++ /dev/null
@@ -1,127 +0,0 @@
-/*
- MIT License http://www.opensource.org/licenses/mit-license.php
- Author Tobias Koppers @sokra
-*/
-"use strict";
-
-const Queue = require("../util/Queue");
-const { intersect } = require("../util/SetHelpers");
-
-const getParentChunksWithModule = (currentChunk, module) => {
- const chunks = [];
- const stack = new Set(currentChunk.parentsIterable);
-
- for (const chunk of stack) {
- if (chunk.containsModule(module)) {
- chunks.push(chunk);
- } else {
- for (const parent of chunk.parentsIterable) {
- stack.add(parent);
- }
- }
- }
-
- return chunks;
-};
-
-class RemoveParentModulesPlugin {
- apply(compiler) {
- compiler.hooks.compilation.tap("RemoveParentModulesPlugin", compilation => {
- const handler = (chunks, chunkGroups) => {
- const queue = new Queue();
- const availableModulesMap = new WeakMap();
-
- for (const chunkGroup of compilation.entrypoints.values()) {
- // initialize available modules for chunks without parents
- availableModulesMap.set(chunkGroup, new Set());
- for (const child of chunkGroup.childrenIterable) {
- queue.enqueue(child);
- }
- }
-
- while (queue.length > 0) {
- const chunkGroup = queue.dequeue();
- let availableModules = availableModulesMap.get(chunkGroup);
- let changed = false;
- for (const parent of chunkGroup.parentsIterable) {
- const availableModulesInParent = availableModulesMap.get(parent);
- if (availableModulesInParent !== undefined) {
- // If we know the available modules in parent: process these
- if (availableModules === undefined) {
- // if we have not own info yet: create new entry
- availableModules = new Set(availableModulesInParent);
- for (const chunk of parent.chunks) {
- for (const m of chunk.modulesIterable) {
- availableModules.add(m);
- }
- }
- availableModulesMap.set(chunkGroup, availableModules);
- changed = true;
- } else {
- for (const m of availableModules) {
- if (
- !parent.containsModule(m) &&
- !availableModulesInParent.has(m)
- ) {
- availableModules.delete(m);
- changed = true;
- }
- }
- }
- }
- }
- if (changed) {
- // if something changed: enqueue our children
- for (const child of chunkGroup.childrenIterable) {
- queue.enqueue(child);
- }
- }
- }
-
- // now we have available modules for every chunk
- for (const chunk of chunks) {
- const availableModulesSets = Array.from(
- chunk.groupsIterable,
- chunkGroup => availableModulesMap.get(chunkGroup)
- );
- if (availableModulesSets.some(s => s === undefined)) continue; // No info about this chunk group
- const availableModules =
- availableModulesSets.length === 1
- ? availableModulesSets[0]
- : intersect(availableModulesSets);
- const numberOfModules = chunk.getNumberOfModules();
- const toRemove = new Set();
- if (numberOfModules < availableModules.size) {
- for (const m of chunk.modulesIterable) {
- if (availableModules.has(m)) {
- toRemove.add(m);
- }
- }
- } else {
- for (const m of availableModules) {
- if (chunk.containsModule(m)) {
- toRemove.add(m);
- }
- }
- }
- for (const module of toRemove) {
- module.rewriteChunkInReasons(
- chunk,
- getParentChunksWithModule(chunk, module)
- );
- chunk.removeModule(module);
- }
- }
- };
- compilation.hooks.optimizeChunksBasic.tap(
- "RemoveParentModulesPlugin",
- handler
- );
- compilation.hooks.optimizeExtractedChunksBasic.tap(
- "RemoveParentModulesPlugin",
- handler
- );
- });
- }
-}
-module.exports = RemoveParentModulesPlugin;
diff --git a/node_modules/webpack/lib/optimize/RuntimeChunkPlugin.js b/node_modules/webpack/lib/optimize/RuntimeChunkPlugin.js
deleted file mode 100644
index 1325dfc..0000000
--- a/node_modules/webpack/lib/optimize/RuntimeChunkPlugin.js
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- MIT License http://www.opensource.org/licenses/mit-license.php
- Author Tobias Koppers @sokra
-*/
-"use strict";
-
-module.exports = class RuntimeChunkPlugin {
- constructor(options) {
- this.options = Object.assign(
- {
- name: entrypoint => `runtime~${entrypoint.name}`
- },
- options
- );
- }
-
- apply(compiler) {
- compiler.hooks.thisCompilation.tap("RuntimeChunkPlugin", compilation => {
- compilation.hooks.optimizeChunksAdvanced.tap("RuntimeChunkPlugin", () => {
- for (const entrypoint of compilation.entrypoints.values()) {
- const chunk = entrypoint.getRuntimeChunk();
- let name = this.options.name;
- if (typeof name === "function") {
- name = name(entrypoint);
- }
- if (
- chunk.getNumberOfModules() > 0 ||
- !chunk.preventIntegration ||
- chunk.name !== name
- ) {
- const newChunk = compilation.addChunk(name);
- newChunk.preventIntegration = true;
- entrypoint.unshiftChunk(newChunk);
- newChunk.addGroup(entrypoint);
- entrypoint.setRuntimeChunk(newChunk);
- }
- }
- });
- });
- }
-};
diff --git a/node_modules/webpack/lib/optimize/SideEffectsFlagPlugin.js b/node_modules/webpack/lib/optimize/SideEffectsFlagPlugin.js
deleted file mode 100644
index 5db780c..0000000
--- a/node_modules/webpack/lib/optimize/SideEffectsFlagPlugin.js
+++ /dev/null
@@ -1,352 +0,0 @@
-/*
- MIT License http://www.opensource.org/licenses/mit-license.php
- Author Tobias Koppers @sokra
-*/
-"use strict";
-
-const mm = require("micromatch");
-const HarmonyExportImportedSpecifierDependency = require("../dependencies/HarmonyExportImportedSpecifierDependency");
-const HarmonyImportSideEffectDependency = require("../dependencies/HarmonyImportSideEffectDependency");
-const HarmonyImportSpecifierDependency = require("../dependencies/HarmonyImportSpecifierDependency");
-
-/** @typedef {import("../Module")} Module */
-/** @typedef {import("../Dependency")} Dependency */
-
-/**
- * @typedef {Object} ExportInModule
- * @property {Module} module the module
- * @property {string} exportName the name of the export
- * @property {boolean} checked if the export is conditional
- */
-
-/**
- * @typedef {Object} ReexportInfo
- * @property {Map<string, ExportInModule[]>} static
- * @property {Map<Module, Set<string>>} dynamic
- */
-
-/**
- * @param {ReexportInfo} info info object
- * @param {string} exportName name of export
- * @returns {ExportInModule | undefined} static export
- */
-const getMappingFromInfo = (info, exportName) => {
- const staticMappings = info.static.get(exportName);
- if (staticMappings !== undefined) {
- if (staticMappings.length === 1) return staticMappings[0];
- return undefined;
- }
- const dynamicMappings = Array.from(info.dynamic).filter(
- ([_, ignored]) => !ignored.has(exportName)
- );
- if (dynamicMappings.length === 1) {
- return {
- module: dynamicMappings[0][0],
- exportName,
- checked: true
- };
- }
- return undefined;
-};
-
-/**
- * @param {ReexportInfo} info info object
- * @param {string} exportName name of export of source module
- * @param {Module} module the target module
- * @param {string} innerExportName name of export of target module
- * @param {boolean} checked true, if existence of target module is checked
- */
-const addStaticReexport = (
- info,
- exportName,
- module,
- innerExportName,
- checked
-) => {
- let mappings = info.static.get(exportName);
- if (mappings !== undefined) {
- for (const mapping of mappings) {
- if (mapping.module === module && mapping.exportName === innerExportName) {
- mapping.checked = mapping.checked && checked;
- return;
- }
- }
- } else {
- mappings = [];
- info.static.set(exportName, mappings);
- }
- mappings.push({
- module,
- exportName: innerExportName,
- checked
- });
-};
-
-/**
- * @param {ReexportInfo} info info object
- * @param {Module} module the reexport module
- * @param {Set<string>} ignored ignore list
- * @returns {void}
- */
-const addDynamicReexport = (info, module, ignored) => {
- const existingList = info.dynamic.get(module);
- if (existingList !== undefined) {
- for (const key of existingList) {
- if (!ignored.has(key)) existingList.delete(key);
- }
- } else {
- info.dynamic.set(module, new Set(ignored));
- }
-};
-
-class SideEffectsFlagPlugin {
- apply(compiler) {
- compiler.hooks.normalModuleFactory.tap("SideEffectsFlagPlugin", nmf => {
- nmf.hooks.module.tap("SideEffectsFlagPlugin", (module, data) => {
- const resolveData = data.resourceResolveData;
- if (
- resolveData &&
- resolveData.descriptionFileData &&
- resolveData.relativePath
- ) {
- const sideEffects = resolveData.descriptionFileData.sideEffects;
- const hasSideEffects = SideEffectsFlagPlugin.moduleHasSideEffects(
- resolveData.relativePath,
- sideEffects
- );
- if (!hasSideEffects) {
- module.factoryMeta.sideEffectFree = true;
- }
- }
-
- return module;
- });
- nmf.hooks.module.tap("SideEffectsFlagPlugin", (module, data) => {
- if (data.settings.sideEffects === false) {
- module.factoryMeta.sideEffectFree = true;
- } else if (data.settings.sideEffects === true) {
- module.factoryMeta.sideEffectFree = false;
- }
- });
- });
- compiler.hooks.compilation.tap("SideEffectsFlagPlugin", compilation => {
- compilation.hooks.optimizeDependencies.tap(
- "SideEffectsFlagPlugin",
- modules => {
- /** @type {Map<Module, ReexportInfo>} */
- const reexportMaps = new Map();
-
- // Capture reexports of sideEffectFree modules
- for (const module of modules) {
- /** @type {Dependency[]} */
- const removeDependencies = [];
- for (const dep of module.dependencies) {
- if (dep instanceof HarmonyImportSideEffectDependency) {
- if (dep.module && dep.module.factoryMeta.sideEffectFree) {
- removeDependencies.push(dep);
- }
- } else if (
- dep instanceof HarmonyExportImportedSpecifierDependency
- ) {
- if (module.factoryMeta.sideEffectFree) {
- const mode = dep.getMode(true);
- if (
- mode.type === "safe-reexport" ||
- mode.type === "checked-reexport" ||
- mode.type === "dynamic-reexport" ||
- mode.type === "reexport-non-harmony-default" ||
- mode.type === "reexport-non-harmony-default-strict" ||
- mode.type === "reexport-named-default"
- ) {
- let info = reexportMaps.get(module);
- if (!info) {
- reexportMaps.set(
- module,
- (info = {
- static: new Map(),
- dynamic: new Map()
- })
- );
- }
- const targetModule = dep._module;
- switch (mode.type) {
- case "safe-reexport":
- for (const [key, id] of mode.map) {
- if (id) {
- addStaticReexport(
- info,
- key,
- targetModule,
- id,
- false
- );
- }
- }
- break;
- case "checked-reexport":
- for (const [key, id] of mode.map) {
- if (id) {
- addStaticReexport(
- info,
- key,
- targetModule,
- id,
- true
- );
- }
- }
- break;
- case "dynamic-reexport":
- addDynamicReexport(info, targetModule, mode.ignored);
- break;
- case "reexport-non-harmony-default":
- case "reexport-non-harmony-default-strict":
- case "reexport-named-default":
- addStaticReexport(
- info,
- mode.name,
- targetModule,
- "default",
- false
- );
- break;
- }
- }
- }
- }
- }
- }
-
- // Flatten reexports
- for (const info of reexportMaps.values()) {
- const dynamicReexports = info.dynamic;
- info.dynamic = new Map();
- for (const reexport of dynamicReexports) {
- let [targetModule, ignored] = reexport;
- for (;;) {
- const innerInfo = reexportMaps.get(targetModule);
- if (!innerInfo) break;
-
- for (const [key, reexports] of innerInfo.static) {
- if (ignored.has(key)) continue;
- for (const { module, exportName, checked } of reexports) {
- addStaticReexport(info, key, module, exportName, checked);
- }
- }
-
- // Follow dynamic reexport if there is only one
- if (innerInfo.dynamic.size !== 1) {
- // When there are more then one, we don't know which one
- break;
- }
-
- ignored = new Set(ignored);
- for (const [innerModule, innerIgnored] of innerInfo.dynamic) {
- for (const key of innerIgnored) {
- if (ignored.has(key)) continue;
- // This reexports ends here
- addStaticReexport(info, key, targetModule, key, true);
- ignored.add(key);
- }
- targetModule = innerModule;
- }
- }
-
- // Update reexport as all other cases has been handled
- addDynamicReexport(info, targetModule, ignored);
- }
- }
-
- for (const info of reexportMaps.values()) {
- const staticReexports = info.static;
- info.static = new Map();
- for (const [key, reexports] of staticReexports) {
- for (let mapping of reexports) {
- for (;;) {
- const innerInfo = reexportMaps.get(mapping.module);
- if (!innerInfo) break;
-
- const newMapping = getMappingFromInfo(
- innerInfo,
- mapping.exportName
- );
- if (!newMapping) break;
- mapping = newMapping;
- }
- addStaticReexport(
- info,
- key,
- mapping.module,
- mapping.exportName,
- mapping.checked
- );
- }
- }
- }
-
- // Update imports along the reexports from sideEffectFree modules
- for (const pair of reexportMaps) {
- const module = pair[0];
- const info = pair[1];
- let newReasons = undefined;
- for (let i = 0; i < module.reasons.length; i++) {
- const reason = module.reasons[i];
- const dep = reason.dependency;
- if (
- (dep instanceof HarmonyExportImportedSpecifierDependency ||
- (dep instanceof HarmonyImportSpecifierDependency &&
- !dep.namespaceObjectAsContext)) &&
- dep._id
- ) {
- const mapping = getMappingFromInfo(info, dep._id);
- if (mapping) {
- dep.redirectedModule = mapping.module;
- dep.redirectedId = mapping.exportName;
- mapping.module.addReason(
- reason.module,
- dep,
- reason.explanation
- ? reason.explanation +
- " (skipped side-effect-free modules)"
- : "(skipped side-effect-free modules)"
- );
- // removing the currect reason, by not adding it to the newReasons array
- // lazily create the newReasons array
- if (newReasons === undefined) {
- newReasons = i === 0 ? [] : module.reasons.slice(0, i);
- }
- continue;
- }
- }
- if (newReasons !== undefined) newReasons.push(reason);
- }
- if (newReasons !== undefined) {
- module.reasons = newReasons;
- }
- }
- }
- );
- });
- }
-
- static moduleHasSideEffects(moduleName, flagValue) {
- switch (typeof flagValue) {
- case "undefined":
- return true;
- case "boolean":
- return flagValue;
- case "string":
- if (process.platform === "win32") {
- flagValue = flagValue.replace(/\\/g, "/");
- }
- return mm.isMatch(moduleName, flagValue, {
- matchBase: true
- });
- case "object":
- return flagValue.some(glob =>
- SideEffectsFlagPlugin.moduleHasSideEffects(moduleName, glob)
- );
- }
- }
-}
-module.exports = SideEffectsFlagPlugin;
diff --git a/node_modules/webpack/lib/optimize/SplitChunksPlugin.js b/node_modules/webpack/lib/optimize/SplitChunksPlugin.js
deleted file mode 100644
index 439e407..0000000
--- a/node_modules/webpack/lib/optimize/SplitChunksPlugin.js
+++ /dev/null
@@ -1,968 +0,0 @@
-/*
- MIT License http://www.opensource.org/licenses/mit-license.php
- Author Tobias Koppers @sokra
-*/
-"use strict";
-
-const crypto = require("crypto");
-const SortableSet = require("../util/SortableSet");
-const GraphHelpers = require("../GraphHelpers");
-const { isSubset } = require("../util/SetHelpers");
-const deterministicGrouping = require("../util/deterministicGrouping");
-const MinMaxSizeWarning = require("./MinMaxSizeWarning");
-const contextify = require("../util/identifier").contextify;
-
-/** @typedef {import("../Compiler")} Compiler */
-/** @typedef {import("../Chunk")} Chunk */
-/** @typedef {import("../Module")} Module */
-/** @typedef {import("../util/deterministicGrouping").Options<Module>} DeterministicGroupingOptionsForModule */
-/** @typedef {import("../util/deterministicGrouping").GroupedItems<Module>} DeterministicGroupingGroupedItemsForModule */
-
-const deterministicGroupingForModules = /** @type {function(DeterministicGroupingOptionsForModule): DeterministicGroupingGroupedItemsForModule[]} */ (deterministicGrouping);
-
-const hashFilename = name => {
- return crypto
- .createHash("md4")
- .update(name)
- .digest("hex")
- .slice(0, 8);
-};
-
-const sortByIdentifier = (a, b) => {
- if (a.identifier() > b.identifier()) return 1;
- if (a.identifier() < b.identifier()) return -1;
- return 0;
-};
-
-const getRequests = chunk => {
- let requests = 0;
- for (const chunkGroup of chunk.groupsIterable) {
- requests = Math.max(requests, chunkGroup.chunks.length);
- }
- return requests;
-};
-
-const getModulesSize = modules => {
- let sum = 0;
- for (const m of modules) {
- sum += m.size();
- }
- return sum;
-};
-
-/**
- * @template T
- * @param {Set<T>} a set
- * @param {Set<T>} b other set
- * @returns {boolean} true if at least one item of a is in b
- */
-const isOverlap = (a, b) => {
- for (const item of a) {
- if (b.has(item)) return true;
- }
- return false;
-};
-
-const compareEntries = (a, b) => {
- // 1. by priority
- const diffPriority = a.cacheGroup.priority - b.cacheGroup.priority;
- if (diffPriority) return diffPriority;
- // 2. by number of chunks
- const diffCount = a.chunks.size - b.chunks.size;
- if (diffCount) return diffCount;
- // 3. by size reduction
- const aSizeReduce = a.size * (a.chunks.size - 1);
- const bSizeReduce = b.size * (b.chunks.size - 1);
- const diffSizeReduce = aSizeReduce - bSizeReduce;
- if (diffSizeReduce) return diffSizeReduce;
- // 4. by cache group index
- const indexDiff = a.cacheGroupIndex - b.cacheGroupIndex;
- if (indexDiff) return indexDiff;
- // 5. by number of modules (to be able to compare by identifier)
- const modulesA = a.modules;
- const modulesB = b.modules;
- const diff = modulesA.size - modulesB.size;
- if (diff) return diff;
- // 6. by module identifiers
- modulesA.sort();
- modulesB.sort();
- const aI = modulesA[Symbol.iterator]();
- const bI = modulesB[Symbol.iterator]();
- // eslint-disable-next-line no-constant-condition
- while (true) {
- const aItem = aI.next();
- const bItem = bI.next();
- if (aItem.done) return 0;
- const aModuleIdentifier = aItem.value.identifier();
- const bModuleIdentifier = bItem.value.identifier();
- if (aModuleIdentifier > bModuleIdentifier) return -1;
- if (aModuleIdentifier < bModuleIdentifier) return 1;
- }
-};
-
-const compareNumbers = (a, b) => a - b;
-
-const INITIAL_CHUNK_FILTER = chunk => chunk.canBeInitial();
-const ASYNC_CHUNK_FILTER = chunk => !chunk.canBeInitial();
-const ALL_CHUNK_FILTER = chunk => true;
-
-module.exports = class SplitChunksPlugin {
- constructor(options) {
- this.options = SplitChunksPlugin.normalizeOptions(options);
- }
-
- static normalizeOptions(options = {}) {
- return {
- chunksFilter: SplitChunksPlugin.normalizeChunksFilter(
- options.chunks || "all"
- ),
- minSize: options.minSize || 0,
- enforceSizeThreshold: options.enforceSizeThreshold || 0,
- maxSize: options.maxSize || 0,
- minChunks: options.minChunks || 1,
- maxAsyncRequests: options.maxAsyncRequests || 1,
- maxInitialRequests: options.maxInitialRequests || 1,
- hidePathInfo: options.hidePathInfo || false,
- filename: options.filename || undefined,
- getCacheGroups: SplitChunksPlugin.normalizeCacheGroups({
- cacheGroups: options.cacheGroups,
- name: options.name,
- automaticNameDelimiter: options.automaticNameDelimiter,
- automaticNameMaxLength: options.automaticNameMaxLength
- }),
- automaticNameDelimiter: options.automaticNameDelimiter,
- automaticNameMaxLength: options.automaticNameMaxLength || 109,
- fallbackCacheGroup: SplitChunksPlugin.normalizeFallbackCacheGroup(
- options.fallbackCacheGroup || {},
- options
- )
- };
- }
-
- static normalizeName({
- name,
- automaticNameDelimiter,
- automaticNamePrefix,
- automaticNameMaxLength
- }) {
- if (name === true) {
- /** @type {WeakMap<Chunk[], Record<string, string>>} */
- const cache = new WeakMap();
- const fn = (module, chunks, cacheGroup) => {
- let cacheEntry = cache.get(chunks);
- if (cacheEntry === undefined) {
- cacheEntry = {};
- cache.set(chunks, cacheEntry);
- } else if (cacheGroup in cacheEntry) {
- return cacheEntry[cacheGroup];
- }
- const names = chunks.map(c => c.name);
- if (!names.every(Boolean)) {
- cacheEntry[cacheGroup] = undefined;
- return;
- }
- names.sort();
- const prefix =
- typeof automaticNamePrefix === "string"
- ? automaticNamePrefix
- : cacheGroup;
- const namePrefix = prefix ? prefix + automaticNameDelimiter : "";
- let name = namePrefix + names.join(automaticNameDelimiter);
- // Filenames and paths can't be too long otherwise an
- // ENAMETOOLONG error is raised. If the generated name if too
- // long, it is truncated and a hash is appended. The limit has
- // been set to 109 to prevent `[name].[chunkhash].[ext]` from
- // generating a 256+ character string.
- if (name.length > automaticNameMaxLength) {
- const hashedFilename = hashFilename(name);
- const sliceLength =
- automaticNameMaxLength -
- (automaticNameDelimiter.length + hashedFilename.length);
- name =
- name.slice(0, sliceLength) +
- automaticNameDelimiter +
- hashedFilename;
- }
- cacheEntry[cacheGroup] = name;
- return name;
- };
- return fn;
- }
- if (typeof name === "string") {
- const fn = () => {
- return name;
- };
- return fn;
- }
- if (typeof name === "function") return name;
- }
-
- static normalizeChunksFilter(chunks) {
- if (chunks === "initial") {
- return INITIAL_CHUNK_FILTER;
- }
- if (chunks === "async") {
- return ASYNC_CHUNK_FILTER;
- }
- if (chunks === "all") {
- return ALL_CHUNK_FILTER;
- }
- if (typeof chunks === "function") return chunks;
- }
-
- static normalizeFallbackCacheGroup(
- {
- minSize = undefined,
- maxSize = undefined,
- automaticNameDelimiter = undefined
- },
- {
- minSize: defaultMinSize = undefined,
- maxSize: defaultMaxSize = undefined,
- automaticNameDelimiter: defaultAutomaticNameDelimiter = undefined
- }
- ) {
- return {
- minSize: typeof minSize === "number" ? minSize : defaultMinSize || 0,
- maxSize: typeof maxSize === "number" ? maxSize : defaultMaxSize || 0,
- automaticNameDelimiter:
- automaticNameDelimiter || defaultAutomaticNameDelimiter || "~"
- };
- }
-
- static normalizeCacheGroups({
- cacheGroups,
- name,
- automaticNameDelimiter,
- automaticNameMaxLength
- }) {
- if (typeof cacheGroups === "function") {
- // TODO webpack 5 remove this
- if (cacheGroups.length !== 1) {
- return module => cacheGroups(module, module.getChunks());
- }
- return cacheGroups;
- }
- if (cacheGroups && typeof cacheGroups === "object") {
- const fn = module => {
- let results;
- for (const key of Object.keys(cacheGroups)) {
- let option = cacheGroups[key];
- if (option === false) continue;
- if (option instanceof RegExp || typeof option === "string") {
- option = {
- test: option
- };
- }
- if (typeof option === "function") {
- let result = option(module);
- if (result) {
- if (results === undefined) results = [];
- for (const r of Array.isArray(result) ? result : [result]) {
- const result = Object.assign({ key }, r);
- if (result.name) result.getName = () => result.name;
- if (result.chunks) {
- result.chunksFilter = SplitChunksPlugin.normalizeChunksFilter(
- result.chunks
- );
- }
- results.push(result);
- }
- }
- } else if (SplitChunksPlugin.checkTest(option.test, module)) {
- if (results === undefined) results = [];
- results.push({
- key: key,
- priority: option.priority,
- getName:
- SplitChunksPlugin.normalizeName({
- name: option.name || name,
- automaticNameDelimiter:
- typeof option.automaticNameDelimiter === "string"
- ? option.automaticNameDelimiter
- : automaticNameDelimiter,
- automaticNamePrefix: option.automaticNamePrefix,
- automaticNameMaxLength:
- option.automaticNameMaxLength || automaticNameMaxLength
- }) || (() => {}),
- chunksFilter: SplitChunksPlugin.normalizeChunksFilter(
- option.chunks
- ),
- enforce: option.enforce,
- minSize: option.minSize,
- enforceSizeThreshold: option.enforceSizeThreshold,
- maxSize: option.maxSize,
- minChunks: option.minChunks,
- maxAsyncRequests: option.maxAsyncRequests,
- maxInitialRequests: option.maxInitialRequests,
- filename: option.filename,
- reuseExistingChunk: option.reuseExistingChunk
- });
- }
- }
- return results;
- };
- return fn;
- }
- const fn = () => {};
- return fn;
- }
-
- static checkTest(test, module) {
- if (test === undefined) return true;
- if (typeof test === "function") {
- if (test.length !== 1) {
- return test(module, module.getChunks());
- }
- return test(module);
- }
- if (typeof test === "boolean") return test;
- if (typeof test === "string") {
- if (
- module.nameForCondition &&
- module.nameForCondition().startsWith(test)
- ) {
- return true;
- }
- for (const chunk of module.chunksIterable) {
- if (chunk.name && chunk.name.startsWith(test)) {
- return true;
- }
- }
- return false;
- }
- if (test instanceof RegExp) {
- if (module.nameForCondition && test.test(module.nameForCondition())) {
- return true;
- }
- for (const chunk of module.chunksIterable) {
- if (chunk.name && test.test(chunk.name)) {
- return true;
- }
- }
- return false;
- }
- return false;
- }
-
- /**
- * @param {Compiler} compiler webpack compiler
- * @returns {void}
- */
- apply(compiler) {
- compiler.hooks.thisCompilation.tap("SplitChunksPlugin", compilation => {
- let alreadyOptimized = false;
- compilation.hooks.unseal.tap("SplitChunksPlugin", () => {
- alreadyOptimized = false;
- });
- compilation.hooks.optimizeChunksAdvanced.tap(
- "SplitChunksPlugin",
- chunks => {
- if (alreadyOptimized) return;
- alreadyOptimized = true;
- // Give each selected chunk an index (to create strings from chunks)
- const indexMap = new Map();
- let index = 1;
- for (const chunk of chunks) {
- indexMap.set(chunk, index++);
- }
- const getKey = chunks => {
- return Array.from(chunks, c => indexMap.get(c))
- .sort(compareNumbers)
- .join();
- };
- /** @type {Map<string, Set<Chunk>>} */
- const chunkSetsInGraph = new Map();
- for (const module of compilation.modules) {
- const chunksKey = getKey(module.chunksIterable);
- if (!chunkSetsInGraph.has(chunksKey)) {
- chunkSetsInGraph.set(chunksKey, new Set(module.chunksIterable));
- }
- }
-
- // group these set of chunks by count
- // to allow to check less sets via isSubset
- // (only smaller sets can be subset)
- /** @type {Map<number, Array<Set<Chunk>>>} */
- const chunkSetsByCount = new Map();
- for (const chunksSet of chunkSetsInGraph.values()) {
- const count = chunksSet.size;
- let array = chunkSetsByCount.get(count);
- if (array === undefined) {
- array = [];
- chunkSetsByCount.set(count, array);
- }
- array.push(chunksSet);
- }
-
- // Create a list of possible combinations
- const combinationsCache = new Map(); // Map<string, Set<Chunk>[]>
-
- const getCombinations = key => {
- const chunksSet = chunkSetsInGraph.get(key);
- var array = [chunksSet];
- if (chunksSet.size > 1) {
- for (const [count, setArray] of chunkSetsByCount) {
- // "equal" is not needed because they would have been merge in the first step
- if (count < chunksSet.size) {
- for (const set of setArray) {
- if (isSubset(chunksSet, set)) {
- array.push(set);
- }
- }
- }
- }
- }
- return array;
- };
-
- /**
- * @typedef {Object} SelectedChunksResult
- * @property {Chunk[]} chunks the list of chunks
- * @property {string} key a key of the list
- */
-
- /**
- * @typedef {function(Chunk): boolean} ChunkFilterFunction
- */
-
- /** @type {WeakMap<Set<Chunk>, WeakMap<ChunkFilterFunction, SelectedChunksResult>>} */
- const selectedChunksCacheByChunksSet = new WeakMap();
-
- /**
- * get list and key by applying the filter function to the list
- * It is cached for performance reasons
- * @param {Set<Chunk>} chunks list of chunks
- * @param {ChunkFilterFunction} chunkFilter filter function for chunks
- * @returns {SelectedChunksResult} list and key
- */
- const getSelectedChunks = (chunks, chunkFilter) => {
- let entry = selectedChunksCacheByChunksSet.get(chunks);
- if (entry === undefined) {
- entry = new WeakMap();
- selectedChunksCacheByChunksSet.set(chunks, entry);
- }
- /** @type {SelectedChunksResult} */
- let entry2 = entry.get(chunkFilter);
- if (entry2 === undefined) {
- /** @type {Chunk[]} */
- const selectedChunks = [];
- for (const chunk of chunks) {
- if (chunkFilter(chunk)) selectedChunks.push(chunk);
- }
- entry2 = {
- chunks: selectedChunks,
- key: getKey(selectedChunks)
- };
- entry.set(chunkFilter, entry2);
- }
- return entry2;
- };
-
- /**
- * @typedef {Object} ChunksInfoItem
- * @property {SortableSet} modules
- * @property {TODO} cacheGroup
- * @property {number} cacheGroupIndex
- * @property {string} name
- * @property {number} size
- * @property {Set<Chunk>} chunks
- * @property {Set<Chunk>} reuseableChunks
- * @property {Set<string>} chunksKeys
- */
-
- // Map a list of chunks to a list of modules
- // For the key the chunk "index" is used, the value is a SortableSet of modules
- /** @type {Map<string, ChunksInfoItem>} */
- const chunksInfoMap = new Map();
-
- /**
- * @param {TODO} cacheGroup the current cache group
- * @param {number} cacheGroupIndex the index of the cache group of ordering
- * @param {Chunk[]} selectedChunks chunks selected for this module
- * @param {string} selectedChunksKey a key of selectedChunks
- * @param {Module} module the current module
- * @returns {void}
- */
- const addModuleToChunksInfoMap = (
- cacheGroup,
- cacheGroupIndex,
- selectedChunks,
- selectedChunksKey,
- module
- ) => {
- // Break if minimum number of chunks is not reached
- if (selectedChunks.length < cacheGroup.minChunks) return;
- // Determine name for split chunk
- const name = cacheGroup.getName(
- module,
- selectedChunks,
- cacheGroup.key
- );
- // Create key for maps
- // When it has a name we use the name as key
- // Elsewise we create the key from chunks and cache group key
- // This automatically merges equal names
- const key =
- cacheGroup.key +
- (name ? ` name:${name}` : ` chunks:${selectedChunksKey}`);
- // Add module to maps
- let info = chunksInfoMap.get(key);
- if (info === undefined) {
- chunksInfoMap.set(
- key,
- (info = {
- modules: new SortableSet(undefined, sortByIdentifier),
- cacheGroup,
- cacheGroupIndex,
- name,
- size: 0,
- chunks: new Set(),
- reuseableChunks: new Set(),
- chunksKeys: new Set()
- })
- );
- }
- info.modules.add(module);
- info.size += module.size();
- if (!info.chunksKeys.has(selectedChunksKey)) {
- info.chunksKeys.add(selectedChunksKey);
- for (const chunk of selectedChunks) {
- info.chunks.add(chunk);
- }
- }
- };
-
- // Walk through all modules
- for (const module of compilation.modules) {
- // Get cache group
- let cacheGroups = this.options.getCacheGroups(module);
- if (!Array.isArray(cacheGroups) || cacheGroups.length === 0) {
- continue;
- }
-
- // Prepare some values
- const chunksKey = getKey(module.chunksIterable);
- let combs = combinationsCache.get(chunksKey);
- if (combs === undefined) {
- combs = getCombinations(chunksKey);
- combinationsCache.set(chunksKey, combs);
- }
-
- let cacheGroupIndex = 0;
- for (const cacheGroupSource of cacheGroups) {
- const minSize =
- cacheGroupSource.minSize !== undefined
- ? cacheGroupSource.minSize
- : cacheGroupSource.enforce
- ? 0
- : this.options.minSize;
- const enforceSizeThreshold =
- cacheGroupSource.enforceSizeThreshold !== undefined
- ? cacheGroupSource.enforceSizeThreshold
- : cacheGroupSource.enforce
- ? 0
- : this.options.enforceSizeThreshold;
- const cacheGroup = {
- key: cacheGroupSource.key,
- priority: cacheGroupSource.priority || 0,
- chunksFilter:
- cacheGroupSource.chunksFilter || this.options.chunksFilter,
- minSize,
- minSizeForMaxSize:
- cacheGroupSource.minSize !== undefined
- ? cacheGroupSource.minSize
- : this.options.minSize,
- enforceSizeThreshold,
- maxSize:
- cacheGroupSource.maxSize !== undefined
- ? cacheGroupSource.maxSize
- : cacheGroupSource.enforce
- ? 0
- : this.options.maxSize,
- minChunks:
- cacheGroupSource.minChunks !== undefined
- ? cacheGroupSource.minChunks
- : cacheGroupSource.enforce
- ? 1
- : this.options.minChunks,
- maxAsyncRequests:
- cacheGroupSource.maxAsyncRequests !== undefined
- ? cacheGroupSource.maxAsyncRequests
- : cacheGroupSource.enforce
- ? Infinity
- : this.options.maxAsyncRequests,
- maxInitialRequests:
- cacheGroupSource.maxInitialRequests !== undefined
- ? cacheGroupSource.maxInitialRequests
- : cacheGroupSource.enforce
- ? Infinity
- : this.options.maxInitialRequests,
- getName:
- cacheGroupSource.getName !== undefined
- ? cacheGroupSource.getName
- : this.options.getName,
- filename:
- cacheGroupSource.filename !== undefined
- ? cacheGroupSource.filename
- : this.options.filename,
- automaticNameDelimiter:
- cacheGroupSource.automaticNameDelimiter !== undefined
- ? cacheGroupSource.automaticNameDelimiter
- : this.options.automaticNameDelimiter,
- reuseExistingChunk: cacheGroupSource.reuseExistingChunk,
- _validateSize: minSize > 0,
- _conditionalEnforce: enforceSizeThreshold > 0
- };
- // For all combination of chunk selection
- for (const chunkCombination of combs) {
- // Break if minimum number of chunks is not reached
- if (chunkCombination.size < cacheGroup.minChunks) continue;
- // Select chunks by configuration
- const {
- chunks: selectedChunks,
- key: selectedChunksKey
- } = getSelectedChunks(
- chunkCombination,
- cacheGroup.chunksFilter
- );
-
- addModuleToChunksInfoMap(
- cacheGroup,
- cacheGroupIndex,
- selectedChunks,
- selectedChunksKey,
- module
- );
- }
- cacheGroupIndex++;
- }
- }
-
- // Filter items were size < minSize
- for (const pair of chunksInfoMap) {
- const info = pair[1];
- if (
- info.cacheGroup._validateSize &&
- info.size < info.cacheGroup.minSize
- ) {
- chunksInfoMap.delete(pair[0]);
- }
- }
-
- /** @type {Map<Chunk, {minSize: number, maxSize: number, automaticNameDelimiter: string, keys: string[]}>} */
- const maxSizeQueueMap = new Map();
-
- while (chunksInfoMap.size > 0) {
- // Find best matching entry
- let bestEntryKey;
- let bestEntry;
- for (const pair of chunksInfoMap) {
- const key = pair[0];
- const info = pair[1];
- if (bestEntry === undefined) {
- bestEntry = info;
- bestEntryKey = key;
- } else if (compareEntries(bestEntry, info) < 0) {
- bestEntry = info;
- bestEntryKey = key;
- }
- }
-
- const item = bestEntry;
- chunksInfoMap.delete(bestEntryKey);
-
- let chunkName = item.name;
- // Variable for the new chunk (lazy created)
- /** @type {Chunk} */
- let newChunk;
- // When no chunk name, check if we can reuse a chunk instead of creating a new one
- let isReused = false;
- if (item.cacheGroup.reuseExistingChunk) {
- outer: for (const chunk of item.chunks) {
- if (chunk.getNumberOfModules() !== item.modules.size) continue;
- if (chunk.hasEntryModule()) continue;
- for (const module of item.modules) {
- if (!chunk.containsModule(module)) continue outer;
- }
- if (!newChunk || !newChunk.name) {
- newChunk = chunk;
- } else if (
- chunk.name &&
- chunk.name.length < newChunk.name.length
- ) {
- newChunk = chunk;
- } else if (
- chunk.name &&
- chunk.name.length === newChunk.name.length &&
- chunk.name < newChunk.name
- ) {
- newChunk = chunk;
- }
- chunkName = undefined;
- isReused = true;
- }
- }
- // Check if maxRequests condition can be fulfilled
-
- const selectedChunks = Array.from(item.chunks).filter(chunk => {
- // skip if we address ourself
- return (
- (!chunkName || chunk.name !== chunkName) && chunk !== newChunk
- );
- });
-
- const enforced =
- item.cacheGroup._conditionalEnforce &&
- item.size >= item.cacheGroup.enforceSizeThreshold;
-
- // Skip when no chunk selected
- if (selectedChunks.length === 0) continue;
-
- const usedChunks = new Set(selectedChunks);
-
- // Check if maxRequests condition can be fulfilled
- if (
- !enforced &&
- (Number.isFinite(item.cacheGroup.maxInitialRequests) ||
- Number.isFinite(item.cacheGroup.maxAsyncRequests))
- ) {
- for (const chunk of usedChunks) {
- // respect max requests
- const maxRequests = chunk.isOnlyInitial()
- ? item.cacheGroup.maxInitialRequests
- : chunk.canBeInitial()
- ? Math.min(
- item.cacheGroup.maxInitialRequests,
- item.cacheGroup.maxAsyncRequests
- )
- : item.cacheGroup.maxAsyncRequests;
- if (
- isFinite(maxRequests) &&
- getRequests(chunk) >= maxRequests
- ) {
- usedChunks.delete(chunk);
- }
- }
- }
-
- outer: for (const chunk of usedChunks) {
- for (const module of item.modules) {
- if (chunk.containsModule(module)) continue outer;
- }
- usedChunks.delete(chunk);
- }
-
- // Were some (invalid) chunks removed from usedChunks?
- // => readd all modules to the queue, as things could have been changed
- if (usedChunks.size < selectedChunks.length) {
- if (usedChunks.size >= item.cacheGroup.minChunks) {
- const chunksArr = Array.from(usedChunks);
- for (const module of item.modules) {
- addModuleToChunksInfoMap(
- item.cacheGroup,
- item.cacheGroupIndex,
- chunksArr,
- getKey(usedChunks),
- module
- );
- }
- }
- continue;
- }
-
- // Create the new chunk if not reusing one
- if (!isReused) {
- newChunk = compilation.addChunk(chunkName);
- }
- // Walk through all chunks
- for (const chunk of usedChunks) {
- // Add graph connections for splitted chunk
- chunk.split(newChunk);
- }
-
- // Add a note to the chunk
- newChunk.chunkReason = isReused
- ? "reused as split chunk"
- : "split chunk";
- if (item.cacheGroup.key) {
- newChunk.chunkReason += ` (cache group: ${item.cacheGroup.key})`;
- }
- if (chunkName) {
- newChunk.chunkReason += ` (name: ${chunkName})`;
- // If the chosen name is already an entry point we remove the entry point
- const entrypoint = compilation.entrypoints.get(chunkName);
- if (entrypoint) {
- compilation.entrypoints.delete(chunkName);
- entrypoint.remove();
- newChunk.entryModule = undefined;
- }
- }
- if (item.cacheGroup.filename) {
- if (!newChunk.isOnlyInitial()) {
- throw new Error(
- "SplitChunksPlugin: You are trying to set a filename for a chunk which is (also) loaded on demand. " +
- "The runtime can only handle loading of chunks which match the chunkFilename schema. " +
- "Using a custom filename would fail at runtime. " +
- `(cache group: ${item.cacheGroup.key})`
- );
- }
- newChunk.filenameTemplate = item.cacheGroup.filename;
- }
- if (!isReused) {
- // Add all modules to the new chunk
- for (const module of item.modules) {
- if (typeof module.chunkCondition === "function") {
- if (!module.chunkCondition(newChunk)) continue;
- }
- // Add module to new chunk
- GraphHelpers.connectChunkAndModule(newChunk, module);
- // Remove module from used chunks
- for (const chunk of usedChunks) {
- chunk.removeModule(module);
- module.rewriteChunkInReasons(chunk, [newChunk]);
- }
- }
- } else {
- // Remove all modules from used chunks
- for (const module of item.modules) {
- for (const chunk of usedChunks) {
- chunk.removeModule(module);
- module.rewriteChunkInReasons(chunk, [newChunk]);
- }
- }
- }
-
- if (item.cacheGroup.maxSize > 0) {
- const oldMaxSizeSettings = maxSizeQueueMap.get(newChunk);
- maxSizeQueueMap.set(newChunk, {
- minSize: Math.max(
- oldMaxSizeSettings ? oldMaxSizeSettings.minSize : 0,
- item.cacheGroup.minSizeForMaxSize
- ),
- maxSize: Math.min(
- oldMaxSizeSettings ? oldMaxSizeSettings.maxSize : Infinity,
- item.cacheGroup.maxSize
- ),
- automaticNameDelimiter: item.cacheGroup.automaticNameDelimiter,
- keys: oldMaxSizeSettings
- ? oldMaxSizeSettings.keys.concat(item.cacheGroup.key)
- : [item.cacheGroup.key]
- });
- }
-
- // remove all modules from other entries and update size
- for (const [key, info] of chunksInfoMap) {
- if (isOverlap(info.chunks, usedChunks)) {
- // update modules and total size
- // may remove it from the map when < minSize
- const oldSize = info.modules.size;
- for (const module of item.modules) {
- info.modules.delete(module);
- }
- if (info.modules.size !== oldSize) {
- if (info.modules.size === 0) {
- chunksInfoMap.delete(key);
- continue;
- }
- info.size = getModulesSize(info.modules);
- if (
- info.cacheGroup._validateSize &&
- info.size < info.cacheGroup.minSize
- ) {
- chunksInfoMap.delete(key);
- }
- if (info.modules.size === 0) {
- chunksInfoMap.delete(key);
- }
- }
- }
- }
- }
-
- const incorrectMinMaxSizeSet = new Set();
-
- // Make sure that maxSize is fulfilled
- for (const chunk of compilation.chunks.slice()) {
- const { minSize, maxSize, automaticNameDelimiter, keys } =
- maxSizeQueueMap.get(chunk) || this.options.fallbackCacheGroup;
- if (!maxSize) continue;
- if (minSize > maxSize) {
- const warningKey = `${keys && keys.join()} ${minSize} ${maxSize}`;
- if (!incorrectMinMaxSizeSet.has(warningKey)) {
- incorrectMinMaxSizeSet.add(warningKey);
- compilation.warnings.push(
- new MinMaxSizeWarning(keys, minSize, maxSize)
- );
- }
- }
- const results = deterministicGroupingForModules({
- maxSize: Math.max(minSize, maxSize),
- minSize,
- items: chunk.modulesIterable,
- getKey(module) {
- const ident = contextify(
- compilation.options.context,
- module.identifier()
- );
- const name = module.nameForCondition
- ? contextify(
- compilation.options.context,
- module.nameForCondition()
- )
- : ident.replace(/^.*!|\?[^?!]*$/g, "");
- const fullKey =
- name + automaticNameDelimiter + hashFilename(ident);
- return fullKey.replace(/[\\/?]/g, "_");
- },
- getSize(module) {
- return module.size();
- }
- });
- results.sort((a, b) => {
- if (a.key < b.key) return -1;
- if (a.key > b.key) return 1;
- return 0;
- });
- for (let i = 0; i < results.length; i++) {
- const group = results[i];
- const key = this.options.hidePathInfo
- ? hashFilename(group.key)
- : group.key;
- let name = chunk.name
- ? chunk.name + automaticNameDelimiter + key
- : null;
- if (name && name.length > 100) {
- name =
- name.slice(0, 100) +
- automaticNameDelimiter +
- hashFilename(name);
- }
- let newPart;
- if (i !== results.length - 1) {
- newPart = compilation.addChunk(name);
- chunk.split(newPart);
- newPart.chunkReason = chunk.chunkReason;
- // Add all modules to the new chunk
- for (const module of group.items) {
- if (typeof module.chunkCondition === "function") {
- if (!module.chunkCondition(newPart)) continue;
- }
- // Add module to new chunk
- GraphHelpers.connectChunkAndModule(newPart, module);
- // Remove module from used chunks
- chunk.removeModule(module);
- module.rewriteChunkInReasons(chunk, [newPart]);
- }
- } else {
- // change the chunk to be a part
- newPart = chunk;
- chunk.name = name;
- }
- }
- }
- }
- );
- });
- }
-};