AggressiveSplittingPlugin.js 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341
  1. /*
  2. MIT License http://www.opensource.org/licenses/mit-license.php
  3. Author Tobias Koppers @sokra
  4. */
  5. "use strict";
  6. const { STAGE_ADVANCED } = require("../OptimizationStages");
  7. const { intersect } = require("../util/SetHelpers");
  8. const {
  9. compareChunks,
  10. compareModulesByIdentifier
  11. } = require("../util/comparators");
  12. const createSchemaValidation = require("../util/create-schema-validation");
  13. const identifierUtils = require("../util/identifier");
  14. /** @typedef {import("../../declarations/plugins/optimize/AggressiveSplittingPlugin").AggressiveSplittingPluginOptions} AggressiveSplittingPluginOptions */
  15. /** @typedef {import("../Chunk")} Chunk */
  16. /** @typedef {import("../ChunkGraph")} ChunkGraph */
  17. /** @typedef {import("../Compiler")} Compiler */
  18. /** @typedef {import("../Module")} Module */
  19. const validate = createSchemaValidation(
  20. require("../../schemas/plugins/optimize/AggressiveSplittingPlugin.check"),
  21. () =>
  22. require("../../schemas/plugins/optimize/AggressiveSplittingPlugin.json"),
  23. {
  24. name: "Aggressive Splitting Plugin",
  25. baseDataPath: "options"
  26. }
  27. );
  28. /**
  29. * @param {ChunkGraph} chunkGraph the chunk graph
  30. * @param {Chunk} oldChunk the old chunk
  31. * @param {Chunk} newChunk the new chunk
  32. * @returns {(module: Module) => void} function to move module between chunks
  33. */
  34. const moveModuleBetween = (chunkGraph, oldChunk, newChunk) => module => {
  35. chunkGraph.disconnectChunkAndModule(oldChunk, module);
  36. chunkGraph.connectChunkAndModule(newChunk, module);
  37. };
  38. /**
  39. * @param {ChunkGraph} chunkGraph the chunk graph
  40. * @param {Chunk} chunk the chunk
  41. * @returns {(module: Module) => boolean} filter for entry module
  42. */
  43. const isNotAEntryModule = (chunkGraph, chunk) => module =>
  44. !chunkGraph.isEntryModuleInChunk(module, chunk);
  45. /** @typedef {{ id?: NonNullable<Chunk["id"]>, hash?: NonNullable<Chunk["hash"]>, modules: Module[], size: number }} SplitData */
  46. /** @type {WeakSet<Chunk>} */
  47. const recordedChunks = new WeakSet();
  48. const PLUGIN_NAME = "AggressiveSplittingPlugin";
  49. class AggressiveSplittingPlugin {
  50. /**
  51. * @param {AggressiveSplittingPluginOptions=} options options object
  52. */
  53. constructor(options = {}) {
  54. validate(options);
  55. this.options = options;
  56. if (typeof this.options.minSize !== "number") {
  57. this.options.minSize = 30 * 1024;
  58. }
  59. if (typeof this.options.maxSize !== "number") {
  60. this.options.maxSize = 50 * 1024;
  61. }
  62. if (typeof this.options.chunkOverhead !== "number") {
  63. this.options.chunkOverhead = 0;
  64. }
  65. if (typeof this.options.entryChunkMultiplicator !== "number") {
  66. this.options.entryChunkMultiplicator = 1;
  67. }
  68. }
  69. /**
  70. * @param {Chunk} chunk the chunk to test
  71. * @returns {boolean} true if the chunk was recorded
  72. */
  73. static wasChunkRecorded(chunk) {
  74. return recordedChunks.has(chunk);
  75. }
  76. /**
  77. * Apply the plugin
  78. * @param {Compiler} compiler the compiler instance
  79. * @returns {void}
  80. */
  81. apply(compiler) {
  82. compiler.hooks.thisCompilation.tap(PLUGIN_NAME, compilation => {
  83. let needAdditionalSeal = false;
  84. /** @type {SplitData[]} */
  85. let newSplits;
  86. /** @type {Set<Chunk>} */
  87. let fromAggressiveSplittingSet;
  88. /** @type {Map<Chunk, SplitData>} */
  89. let chunkSplitDataMap;
  90. compilation.hooks.optimize.tap(PLUGIN_NAME, () => {
  91. newSplits = [];
  92. fromAggressiveSplittingSet = new Set();
  93. chunkSplitDataMap = new Map();
  94. });
  95. compilation.hooks.optimizeChunks.tap(
  96. {
  97. name: PLUGIN_NAME,
  98. stage: STAGE_ADVANCED
  99. },
  100. chunks => {
  101. const chunkGraph = compilation.chunkGraph;
  102. // Precompute stuff
  103. const nameToModuleMap = new Map();
  104. const moduleToNameMap = new Map();
  105. const makePathsRelative =
  106. identifierUtils.makePathsRelative.bindContextCache(
  107. compiler.context,
  108. compiler.root
  109. );
  110. for (const m of compilation.modules) {
  111. const name = makePathsRelative(m.identifier());
  112. nameToModuleMap.set(name, m);
  113. moduleToNameMap.set(m, name);
  114. }
  115. // Check used chunk ids
  116. const usedIds = new Set();
  117. for (const chunk of chunks) {
  118. usedIds.add(chunk.id);
  119. }
  120. const recordedSplits =
  121. (compilation.records && compilation.records.aggressiveSplits) || [];
  122. const usedSplits = newSplits
  123. ? [...recordedSplits, ...newSplits]
  124. : recordedSplits;
  125. const minSize = /** @type {number} */ (this.options.minSize);
  126. const maxSize = /** @type {number} */ (this.options.maxSize);
  127. /**
  128. * @param {SplitData} splitData split data
  129. * @returns {boolean} true when applied, otherwise false
  130. */
  131. const applySplit = splitData => {
  132. // Cannot split if id is already taken
  133. if (splitData.id !== undefined && usedIds.has(splitData.id)) {
  134. return false;
  135. }
  136. // Get module objects from names
  137. const selectedModules = splitData.modules.map(name =>
  138. nameToModuleMap.get(name)
  139. );
  140. // Does the modules exist at all?
  141. if (!selectedModules.every(Boolean)) return false;
  142. // Check if size matches (faster than waiting for hash)
  143. let size = 0;
  144. for (const m of selectedModules) size += m.size();
  145. if (size !== splitData.size) return false;
  146. // get chunks with all modules
  147. const selectedChunks = intersect(
  148. selectedModules.map(
  149. m => new Set(chunkGraph.getModuleChunksIterable(m))
  150. )
  151. );
  152. // No relevant chunks found
  153. if (selectedChunks.size === 0) return false;
  154. // The found chunk is already the split or similar
  155. if (
  156. selectedChunks.size === 1 &&
  157. chunkGraph.getNumberOfChunkModules([...selectedChunks][0]) ===
  158. selectedModules.length
  159. ) {
  160. const chunk = [...selectedChunks][0];
  161. if (fromAggressiveSplittingSet.has(chunk)) return false;
  162. fromAggressiveSplittingSet.add(chunk);
  163. chunkSplitDataMap.set(chunk, splitData);
  164. return true;
  165. }
  166. // split the chunk into two parts
  167. const newChunk = compilation.addChunk();
  168. newChunk.chunkReason = "aggressive splitted";
  169. for (const chunk of selectedChunks) {
  170. for (const module of selectedModules) {
  171. moveModuleBetween(chunkGraph, chunk, newChunk)(module);
  172. }
  173. chunk.split(newChunk);
  174. chunk.name = null;
  175. }
  176. fromAggressiveSplittingSet.add(newChunk);
  177. chunkSplitDataMap.set(newChunk, splitData);
  178. if (splitData.id !== null && splitData.id !== undefined) {
  179. newChunk.id = splitData.id;
  180. newChunk.ids = [splitData.id];
  181. }
  182. return true;
  183. };
  184. // try to restore to recorded splitting
  185. let changed = false;
  186. for (let j = 0; j < usedSplits.length; j++) {
  187. const splitData = usedSplits[j];
  188. if (applySplit(splitData)) changed = true;
  189. }
  190. // for any chunk which isn't splitted yet, split it and create a new entry
  191. // start with the biggest chunk
  192. const cmpFn = compareChunks(chunkGraph);
  193. const sortedChunks = [...chunks].sort((a, b) => {
  194. const diff1 =
  195. chunkGraph.getChunkModulesSize(b) -
  196. chunkGraph.getChunkModulesSize(a);
  197. if (diff1) return diff1;
  198. const diff2 =
  199. chunkGraph.getNumberOfChunkModules(a) -
  200. chunkGraph.getNumberOfChunkModules(b);
  201. if (diff2) return diff2;
  202. return cmpFn(a, b);
  203. });
  204. for (const chunk of sortedChunks) {
  205. if (fromAggressiveSplittingSet.has(chunk)) continue;
  206. const size = chunkGraph.getChunkModulesSize(chunk);
  207. if (
  208. size > maxSize &&
  209. chunkGraph.getNumberOfChunkModules(chunk) > 1
  210. ) {
  211. const modules = chunkGraph
  212. .getOrderedChunkModules(chunk, compareModulesByIdentifier)
  213. .filter(isNotAEntryModule(chunkGraph, chunk));
  214. const selectedModules = [];
  215. let selectedModulesSize = 0;
  216. for (let k = 0; k < modules.length; k++) {
  217. const module = modules[k];
  218. const newSize = selectedModulesSize + module.size();
  219. if (newSize > maxSize && selectedModulesSize >= minSize) {
  220. break;
  221. }
  222. selectedModulesSize = newSize;
  223. selectedModules.push(module);
  224. }
  225. if (selectedModules.length === 0) continue;
  226. /** @type {SplitData} */
  227. const splitData = {
  228. modules: selectedModules
  229. .map(m => moduleToNameMap.get(m))
  230. .sort(),
  231. size: selectedModulesSize
  232. };
  233. if (applySplit(splitData)) {
  234. newSplits = [...(newSplits || []), splitData];
  235. changed = true;
  236. }
  237. }
  238. }
  239. if (changed) return true;
  240. }
  241. );
  242. compilation.hooks.recordHash.tap(PLUGIN_NAME, records => {
  243. // 4. save made splittings to records
  244. const allSplits = new Set();
  245. /** @type {Set<SplitData>} */
  246. const invalidSplits = new Set();
  247. // Check if some splittings are invalid
  248. // We remove invalid splittings and try again
  249. for (const chunk of compilation.chunks) {
  250. const splitData = chunkSplitDataMap.get(chunk);
  251. if (
  252. splitData !== undefined &&
  253. splitData.hash &&
  254. chunk.hash !== splitData.hash
  255. ) {
  256. // Split was successful, but hash doesn't equal
  257. // We can throw away the split since it's useless now
  258. invalidSplits.add(splitData);
  259. }
  260. }
  261. if (invalidSplits.size > 0) {
  262. records.aggressiveSplits =
  263. /** @type {SplitData[]} */
  264. (records.aggressiveSplits).filter(
  265. splitData => !invalidSplits.has(splitData)
  266. );
  267. needAdditionalSeal = true;
  268. } else {
  269. // set hash and id values on all (new) splittings
  270. for (const chunk of compilation.chunks) {
  271. const splitData = chunkSplitDataMap.get(chunk);
  272. if (splitData !== undefined) {
  273. splitData.hash =
  274. /** @type {NonNullable<Chunk["hash"]>} */
  275. (chunk.hash);
  276. splitData.id =
  277. /** @type {NonNullable<Chunk["id"]>} */
  278. (chunk.id);
  279. allSplits.add(splitData);
  280. // set flag for stats
  281. recordedChunks.add(chunk);
  282. }
  283. }
  284. // Also add all unused historical splits (after the used ones)
  285. // They can still be used in some future compilation
  286. const recordedSplits =
  287. compilation.records && compilation.records.aggressiveSplits;
  288. if (recordedSplits) {
  289. for (const splitData of recordedSplits) {
  290. if (!invalidSplits.has(splitData)) allSplits.add(splitData);
  291. }
  292. }
  293. // record all splits
  294. records.aggressiveSplits = [...allSplits];
  295. needAdditionalSeal = false;
  296. }
  297. });
  298. compilation.hooks.needAdditionalSeal.tap(PLUGIN_NAME, () => {
  299. if (needAdditionalSeal) {
  300. needAdditionalSeal = false;
  301. return true;
  302. }
  303. });
  304. });
  305. }
  306. }
  307. module.exports = AggressiveSplittingPlugin;