summaryrefslogtreecommitdiffstats
path: root/node_modules/webpack/lib/optimize/MergeDuplicateChunksPlugin.js
blob: 1c3e23aa3e96dc0c25cd7b6b576db49c0de3aaee (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
/*
	MIT License http://www.opensource.org/licenses/mit-license.php
	Author Tobias Koppers @sokra
*/
"use strict";

class MergeDuplicateChunksPlugin {
	apply(compiler) {
		compiler.hooks.compilation.tap(
			"MergeDuplicateChunksPlugin",
			compilation => {
				compilation.hooks.optimizeChunksBasic.tap(
					"MergeDuplicateChunksPlugin",
					chunks => {
						// remember already tested chunks for performance
						const notDuplicates = new Set();

						// for each chunk
						for (const chunk of chunks) {
							// track a Set of all chunk that could be duplicates
							let possibleDuplicates;
							for (const module of chunk.modulesIterable) {
								if (possibleDuplicates === undefined) {
									// when possibleDuplicates is not yet set,
									// create a new Set from chunks of the current module
									// including only chunks with the same number of modules
									for (const dup of module.chunksIterable) {
										if (
											dup !== chunk &&
											chunk.getNumberOfModules() === dup.getNumberOfModules() &&
											!notDuplicates.has(dup)
										) {
											// delay allocating the new Set until here, reduce memory pressure
											if (possibleDuplicates === undefined) {
												possibleDuplicates = new Set();
											}
											possibleDuplicates.add(dup);
										}
									}
									// when no chunk is possible we can break here
									if (possibleDuplicates === undefined) break;
								} else {
									// validate existing possible duplicates
									for (const dup of possibleDuplicates) {
										// remove possible duplicate when module is not contained
										if (!dup.containsModule(module)) {
											possibleDuplicates.delete(dup);
										}
									}
									// when all chunks has been removed we can break here
									if (possibleDuplicates.size === 0) break;
								}
							}

							// when we found duplicates
							if (
								possibleDuplicates !== undefined &&
								possibleDuplicates.size > 0
							) {
								for (const otherChunk of possibleDuplicates) {
									if (otherChunk.hasRuntime() !== chunk.hasRuntime()) continue;
									// merge them
									if (chunk.integrate(otherChunk, "duplicate")) {
										chunks.splice(chunks.indexOf(otherChunk), 1);
									}
								}
							}

							// don't check already processed chunks twice
							notDuplicates.add(chunk);
						}
					}
				);
			}
		);
	}
}
module.exports = MergeDuplicateChunksPlugin;