Last commit july 5th

This commit is contained in:
2024-07-05 13:46:23 +02:00
parent dad0d86e8c
commit b0e4dfbb76
24982 changed files with 2621219 additions and 413 deletions

View File

@@ -0,0 +1,10 @@
'use strict';
var camelcase = require('camelcase');
var getFileExtension = require('./getFileExtension');
module.exports = function getAssetKind(options, asset) {
var ext = getFileExtension(asset);
return camelcase(ext);
};

View File

@@ -0,0 +1,38 @@
'use strict';
function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } }
var getAssetsFromChildChunk = function getAssetsFromChildChunk(options, chunk, loadingBehavior) {
var assets = [];
if (chunk.assets) {
chunk.assets.forEach(function (asset) {
asset.loadingBehavior = loadingBehavior;
assets.push(asset);
});
}
if (options.includeAuxiliaryAssets && chunk.auxiliaryAssets) {
chunk.auxiliaryAssets.forEach(function (asset) {
asset.loadingBehavior = loadingBehavior;
assets.push(asset);
});
}
return assets;
};
module.exports = function getDynamicImportedChildAssets(options, children) {
var loadingBehaviors = ['preload', 'prefetch'];
var assets = [];
loadingBehaviors.forEach(function (loadingBehavior) {
if (children[loadingBehavior]) {
children[loadingBehavior].forEach(function (childChunk) {
assets = [].concat(_toConsumableArray(assets), _toConsumableArray(getAssetsFromChildChunk(options, childChunk, loadingBehavior)));
});
}
});
return assets;
};

View File

@@ -0,0 +1,8 @@
'use strict';
module.exports = function getFileExtension(asset) {
var extRegex = /\.([0-9a-z]+)(?=[?#])|(\.)(?:[\w]+)$/i;
var ext = asset.match(extRegex);
return ext ? ext[0].slice(1) : '';
};

View File

@@ -0,0 +1,11 @@
'use strict';
var pathTemplate = require('./pathTemplate');
module.exports = function isHMRUpdate(options, asset) {
if (asset.includes('.hot-update.')) return true;
var hotUpdateChunkFilename = options.output.hotUpdateChunkFilename;
var hotUpdateTemplate = pathTemplate(hotUpdateChunkFilename);
return hotUpdateTemplate.matches(asset);
};

View File

@@ -0,0 +1,9 @@
'use strict';
var pathTemplate = require('./pathTemplate');
module.exports = function isSourceMap(options, asset) {
var sourceMapFilename = options.output.sourceMapFilename;
var sourcemapTemplate = pathTemplate(sourceMapFilename);
return sourcemapTemplate.matches(asset);
};

View File

@@ -0,0 +1,93 @@
'use strict';
function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
var path = require('path');
var fs = require('fs');
var _ = require('lodash');
var error = require('../utils/error');
function sortAssets(assets) {
return Object.keys(assets).map(function (i) {
return _defineProperty({}, i, assets[i]);
}).sort(function (a, b) {
if (a.manifest) {
return -1;
}
if (b.manifest) {
return 1;
}
return 0;
});
}
function orderAssets(assets, options) {
return options.manifestFirst ? Object.assign.apply(Object, [{}].concat(_toConsumableArray(sortAssets(assets)))) : assets;
}
module.exports = function (options) {
var update = options.update;
var firstRun = true;
options.processOutput = options.processOutput || function (assets) {
return JSON.stringify(assets, null, options.prettyPrint ? 2 : null);
};
return function writeOutput(fileStream, newAssets, next) {
// if options.update is false and we're on the first pass of a (possibly) multicompiler
var overwrite = !update && firstRun;
var localFs = options.keepInMemory ? fileStream : fs;
function mkdirCallback(err) {
if (err) handleMkdirError(err);
var outputPath = options.keepInMemory ? localFs.join(options.path, options.filename) : path.join(options.path, options.filename);
localFs.readFile(outputPath, 'utf8', function (err, data) {
// if file does not exist, just write data to it
if (err && err.code !== 'ENOENT') {
return next(error('Could not read output file ' + outputPath, err));
}
// if options.update is false and we're on first run, so start with empty data
data = overwrite ? '{}' : data || '{}';
var oldAssets = void 0;
try {
oldAssets = JSON.parse(data);
} catch (err) {
oldAssets = {};
}
var assets = orderAssets(_.merge({}, oldAssets, newAssets), options);
var output = options.processOutput(assets);
if (output !== data) {
localFs.writeFile(outputPath, output, function (err) {
if (err) {
return next(error('Unable to write to ' + outputPath, err));
}
firstRun = false;
next();
});
} else {
next();
}
});
}
function handleMkdirError(err) {
return next(error('Could not create output folder ' + options.path, err));
}
if (options.keepInMemory) {
localFs.mkdirp(options.path, mkdirCallback);
} else {
fs.mkdir(options.path, { recursive: true }, mkdirCallback);
}
};
};

View File

@@ -0,0 +1,34 @@
"use strict";
/**
* Takes in a processor function, and returns a writer function.
*
* @param {Function} processor
*
* @return {Function} queuedWriter
*/
module.exports = function createQueuedWriter(processor) {
var queue = [];
var iterator = function iterator(callback) {
return function (err) {
queue.shift();
callback(err);
var next = queue[0];
if (next) {
processor(next.fs, next.data, iterator(next.callback));
}
};
};
return function queuedWriter(fs, data, callback) {
var empty = !queue.length;
queue.push({ fs: fs, data: data, callback: callback });
if (empty) {
// start processing
processor(fs, data, iterator(callback));
}
};
};

View File

@@ -0,0 +1,168 @@
'use strict';
var escapeRegExp = require('escape-string-regexp');
var SIMPLE_PLACEHOLDER_RX = /^\[(id|name|file|query|filebase)]/i;
var HASH_PLACEHOLDER_RX = /^\[((?:full)?(?:chunk)?hash)(?::(\d+))?]/i;
var templateCache = Object.create(null);
module.exports = function createTemplate(str) {
if (!templateCache[str]) {
templateCache[str] = new PathTemplate(str);
}
return templateCache[str];
};
function PathTemplate(template) {
this.template = template;
this.fields = parseTemplate(template);
this.matcher = createTemplateMatcher(this.fields);
}
PathTemplate.prototype = {
constructor: PathTemplate,
/**
* Returns whether the given path matches this template.
*
* @param String data
*/
matches: function matches(path) {
return this.matcher.test(path);
},
/**
* Applies data to this template and outputs a filename.
*
* @param Object data
*/
resolve: function resolve(data) {
return this.fields.reduce(function (output, field) {
var replacement = '';
var placeholder = field.placeholder;
var width = field.width;
if (field.prefix) {
output += field.prefix;
}
if (placeholder) {
replacement = data[placeholder] || '';
if (width && (placeholder === 'hash' || placeholder === 'fullhash' || placeholder === 'chunkhash')) {
replacement = replacement.slice(0, width);
}
output += replacement;
}
return output;
}, '');
}
/**
* Loop over the template string and return an array of objects in the form:
* {
* prefix: 'literal text',
* placeholder: 'replacement field name'
* [, width: maximum hash length for hash & chunkhash placeholders]
* }
*
* The values in the object conceptually represent a span of literal text followed by a single replacement field.
* If there is no literal text (which can happen if two replacement fields occur consecutively),
* then prefix will be an empty string.
* If there is no replacement field, then the value of placeholder will be null.
* If the value of placeholder is either 'hash', 'fullhash', or 'chunkhash', then width will be a positive integer.
* Otherwise it will be left undefined.
*/
};function parseTemplate(str) {
var fields = [];
var char = '';
var pos = 0;
var prefix = '';
var match = null;
var input = void 0;
while (true) {
// eslint-disable-line no-constant-condition
char = str[pos];
if (!char) {
fields.push({
prefix: prefix,
placeholder: null
});
break;
} else if (char === '[') {
input = str.slice(pos);
match = SIMPLE_PLACEHOLDER_RX.exec(input);
if (match) {
fields.push({
prefix: prefix,
placeholder: match[1].toLowerCase()
});
pos += match[0].length;
prefix = '';
continue;
}
match = HASH_PLACEHOLDER_RX.exec(input);
if (match) {
fields.push({
prefix: prefix,
placeholder: match[1].toLowerCase(),
width: parseInt(match[2] || 0, 10)
});
pos += match[0].length;
prefix = '';
continue;
}
}
prefix += char;
pos++;
}
return fields;
}
/**
* Returns a RegExp which, given the replacement fields returned by parseTemplate(),
* can match a file path against a path template.
*/
function createTemplateMatcher(fields) {
var length = fields.length;
var pattern = fields.reduce(function (pattern, field, i) {
if (i === 0) {
pattern = '^';
}
if (field.prefix) {
pattern += '(' + escapeRegExp(field.prefix) + ')';
}
if (field.placeholder) {
switch (field.placeholder) {
case 'hash':
case 'fullhash':
case 'chunkhash':
pattern += '[0-9a-fA-F]';
pattern += field.width ? '{1,' + field.width + '}' : '+';
break;
case 'id':
case 'name':
case 'file':
case 'filebase':
pattern += '.+?';
break;
case 'query':
pattern += '(?:\\?.+?)?';
break;
}
}
if (i === length - 1) {
pattern += '$';
}
return pattern;
}, '');
return new RegExp(pattern);
}

View File

@@ -0,0 +1,9 @@
'use strict';
var _ = require('lodash');
module.exports = function pluginError(message, previousError) {
var err = new Error('[AssetsWebpackPlugin] ' + message);
return previousError ? _.assign(err, previousError) : err;
};