You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
1119 lines
34 KiB
1119 lines
34 KiB
3 years ago
|
/*
|
||
|
MIT License http://www.opensource.org/licenses/mit-license.php
|
||
|
Author Tobias Koppers @sokra
|
||
|
*/
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
const EventEmitter = require("events");
|
||
|
const { extname, basename } = require("path");
|
||
|
const { URL } = require("url");
|
||
|
const { createGunzip, createBrotliDecompress, createInflate } = require("zlib");
|
||
|
const NormalModule = require("../NormalModule");
|
||
|
const createSchemaValidation = require("../util/create-schema-validation");
|
||
|
const createHash = require("../util/createHash");
|
||
|
const { mkdirp, dirname, join } = require("../util/fs");
|
||
|
const memoize = require("../util/memoize");
|
||
|
|
||
|
/** @typedef {import("../../declarations/plugins/schemes/HttpUriPlugin").HttpUriPluginOptions} HttpUriPluginOptions */
|
||
|
/** @typedef {import("../Compiler")} Compiler */
|
||
|
|
||
|
const getHttp = memoize(() => require("http"));
|
||
|
const getHttps = memoize(() => require("https"));
|
||
|
const proxyFetch = (request, proxy) => (url, options, callback) => {
|
||
|
const eventEmitter = new EventEmitter();
|
||
|
const doRequest = socket =>
|
||
|
request
|
||
|
.get(url, { ...options, ...(socket && { socket }) }, callback)
|
||
|
.on("error", eventEmitter.emit.bind(eventEmitter, "error"));
|
||
|
|
||
|
if (proxy) {
|
||
|
const { hostname: host, port } = new URL(proxy);
|
||
|
|
||
|
getHttp()
|
||
|
.request({
|
||
|
host, // IP address of proxy server
|
||
|
port, // port of proxy server
|
||
|
method: "CONNECT",
|
||
|
path: url.host
|
||
|
})
|
||
|
.on("connect", (res, socket) => {
|
||
|
if (res.statusCode === 200) {
|
||
|
// connected to proxy server
|
||
|
doRequest(socket);
|
||
|
}
|
||
|
})
|
||
|
.on("error", err => {
|
||
|
eventEmitter.emit(
|
||
|
"error",
|
||
|
new Error(
|
||
|
`Failed to connect to proxy server "${proxy}": ${err.message}`
|
||
|
)
|
||
|
);
|
||
|
})
|
||
|
.end();
|
||
|
} else {
|
||
|
doRequest();
|
||
|
}
|
||
|
|
||
|
return eventEmitter;
|
||
|
};
|
||
|
|
||
|
/** @type {(() => void)[] | undefined} */
|
||
|
let inProgressWrite = undefined;
|
||
|
|
||
|
const validate = createSchemaValidation(
|
||
|
require("../../schemas/plugins/schemes/HttpUriPlugin.check.js"),
|
||
|
() => require("../../schemas/plugins/schemes/HttpUriPlugin.json"),
|
||
|
{
|
||
|
name: "Http Uri Plugin",
|
||
|
baseDataPath: "options"
|
||
|
}
|
||
|
);
|
||
|
|
||
|
const toSafePath = str =>
|
||
|
str
|
||
|
.replace(/^[^a-zA-Z0-9]+|[^a-zA-Z0-9]+$/g, "")
|
||
|
.replace(/[^a-zA-Z0-9._-]+/g, "_");
|
||
|
|
||
|
const computeIntegrity = content => {
|
||
|
const hash = createHash("sha512");
|
||
|
hash.update(content);
|
||
|
const integrity = "sha512-" + hash.digest("base64");
|
||
|
return integrity;
|
||
|
};
|
||
|
|
||
|
const verifyIntegrity = (content, integrity) => {
|
||
|
if (integrity === "ignore") return true;
|
||
|
return computeIntegrity(content) === integrity;
|
||
|
};
|
||
|
|
||
|
/**
|
||
|
* @param {string} str input
|
||
|
* @returns {Record<string, string>} parsed
|
||
|
*/
|
||
|
const parseKeyValuePairs = str => {
|
||
|
/** @type {Record<string, string>} */
|
||
|
const result = {};
|
||
|
for (const item of str.split(",")) {
|
||
|
const i = item.indexOf("=");
|
||
|
if (i >= 0) {
|
||
|
const key = item.slice(0, i).trim();
|
||
|
const value = item.slice(i + 1).trim();
|
||
|
result[key] = value;
|
||
|
} else {
|
||
|
const key = item.trim();
|
||
|
if (!key) continue;
|
||
|
result[key] = key;
|
||
|
}
|
||
|
}
|
||
|
return result;
|
||
|
};
|
||
|
|
||
|
const parseCacheControl = (cacheControl, requestTime) => {
|
||
|
// When false resource is not stored in cache
|
||
|
let storeCache = true;
|
||
|
// When false resource is not stored in lockfile cache
|
||
|
let storeLock = true;
|
||
|
// Resource is only revalidated, after that timestamp and when upgrade is chosen
|
||
|
let validUntil = 0;
|
||
|
if (cacheControl) {
|
||
|
const parsed = parseKeyValuePairs(cacheControl);
|
||
|
if (parsed["no-cache"]) storeCache = storeLock = false;
|
||
|
if (parsed["max-age"] && !isNaN(+parsed["max-age"])) {
|
||
|
validUntil = requestTime + +parsed["max-age"] * 1000;
|
||
|
}
|
||
|
if (parsed["must-revalidate"]) validUntil = 0;
|
||
|
}
|
||
|
return {
|
||
|
storeLock,
|
||
|
storeCache,
|
||
|
validUntil
|
||
|
};
|
||
|
};
|
||
|
|
||
|
/**
|
||
|
* @typedef {Object} LockfileEntry
|
||
|
* @property {string} resolved
|
||
|
* @property {string} integrity
|
||
|
* @property {string} contentType
|
||
|
*/
|
||
|
|
||
|
const areLockfileEntriesEqual = (a, b) => {
|
||
|
return (
|
||
|
a.resolved === b.resolved &&
|
||
|
a.integrity === b.integrity &&
|
||
|
a.contentType === b.contentType
|
||
|
);
|
||
|
};
|
||
|
|
||
|
const entryToString = entry => {
|
||
|
return `resolved: ${entry.resolved}, integrity: ${entry.integrity}, contentType: ${entry.contentType}`;
|
||
|
};
|
||
|
|
||
|
class Lockfile {
|
||
|
constructor() {
|
||
|
this.version = 1;
|
||
|
/** @type {Map<string, LockfileEntry | "ignore" | "no-cache">} */
|
||
|
this.entries = new Map();
|
||
|
}
|
||
|
|
||
|
static parse(content) {
|
||
|
// TODO handle merge conflicts
|
||
|
const data = JSON.parse(content);
|
||
|
if (data.version !== 1)
|
||
|
throw new Error(`Unsupported lockfile version ${data.version}`);
|
||
|
const lockfile = new Lockfile();
|
||
|
for (const key of Object.keys(data)) {
|
||
|
if (key === "version") continue;
|
||
|
const entry = data[key];
|
||
|
lockfile.entries.set(
|
||
|
key,
|
||
|
typeof entry === "string"
|
||
|
? entry
|
||
|
: {
|
||
|
resolved: key,
|
||
|
...entry
|
||
|
}
|
||
|
);
|
||
|
}
|
||
|
return lockfile;
|
||
|
}
|
||
|
|
||
|
toString() {
|
||
|
let str = "{\n";
|
||
|
const entries = Array.from(this.entries).sort(([a], [b]) =>
|
||
|
a < b ? -1 : 1
|
||
|
);
|
||
|
for (const [key, entry] of entries) {
|
||
|
if (typeof entry === "string") {
|
||
|
str += ` ${JSON.stringify(key)}: ${JSON.stringify(entry)},\n`;
|
||
|
} else {
|
||
|
str += ` ${JSON.stringify(key)}: { `;
|
||
|
if (entry.resolved !== key)
|
||
|
str += `"resolved": ${JSON.stringify(entry.resolved)}, `;
|
||
|
str += `"integrity": ${JSON.stringify(
|
||
|
entry.integrity
|
||
|
)}, "contentType": ${JSON.stringify(entry.contentType)} },\n`;
|
||
|
}
|
||
|
}
|
||
|
str += ` "version": ${this.version}\n}\n`;
|
||
|
return str;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* @template R
|
||
|
* @param {function(function(Error=, R=): void): void} fn function
|
||
|
* @returns {function(function((Error | null)=, R=): void): void} cached function
|
||
|
*/
|
||
|
const cachedWithoutKey = fn => {
|
||
|
let inFlight = false;
|
||
|
/** @type {Error | undefined} */
|
||
|
let cachedError = undefined;
|
||
|
/** @type {R | undefined} */
|
||
|
let cachedResult = undefined;
|
||
|
/** @type {(function(Error=, R=): void)[] | undefined} */
|
||
|
let cachedCallbacks = undefined;
|
||
|
return callback => {
|
||
|
if (inFlight) {
|
||
|
if (cachedResult !== undefined) return callback(null, cachedResult);
|
||
|
if (cachedError !== undefined) return callback(cachedError);
|
||
|
if (cachedCallbacks === undefined) cachedCallbacks = [callback];
|
||
|
else cachedCallbacks.push(callback);
|
||
|
return;
|
||
|
}
|
||
|
inFlight = true;
|
||
|
fn((err, result) => {
|
||
|
if (err) cachedError = err;
|
||
|
else cachedResult = result;
|
||
|
const callbacks = cachedCallbacks;
|
||
|
cachedCallbacks = undefined;
|
||
|
callback(err, result);
|
||
|
if (callbacks !== undefined) for (const cb of callbacks) cb(err, result);
|
||
|
});
|
||
|
};
|
||
|
};
|
||
|
|
||
|
/**
|
||
|
* @template T
|
||
|
* @template R
|
||
|
* @param {function(T, function(Error=, R=): void): void} fn function
|
||
|
* @param {function(T, function(Error=, R=): void): void=} forceFn function for the second try
|
||
|
* @returns {(function(T, function((Error | null)=, R=): void): void) & { force: function(T, function((Error | null)=, R=): void): void }} cached function
|
||
|
*/
|
||
|
const cachedWithKey = (fn, forceFn = fn) => {
|
||
|
/** @typedef {{ result?: R, error?: Error, callbacks?: (function((Error | null)=, R=): void)[], force?: true }} CacheEntry */
|
||
|
/** @type {Map<T, CacheEntry>} */
|
||
|
const cache = new Map();
|
||
|
const resultFn = (arg, callback) => {
|
||
|
const cacheEntry = cache.get(arg);
|
||
|
if (cacheEntry !== undefined) {
|
||
|
if (cacheEntry.result !== undefined)
|
||
|
return callback(null, cacheEntry.result);
|
||
|
if (cacheEntry.error !== undefined) return callback(cacheEntry.error);
|
||
|
if (cacheEntry.callbacks === undefined) cacheEntry.callbacks = [callback];
|
||
|
else cacheEntry.callbacks.push(callback);
|
||
|
return;
|
||
|
}
|
||
|
/** @type {CacheEntry} */
|
||
|
const newCacheEntry = {
|
||
|
result: undefined,
|
||
|
error: undefined,
|
||
|
callbacks: undefined
|
||
|
};
|
||
|
cache.set(arg, newCacheEntry);
|
||
|
fn(arg, (err, result) => {
|
||
|
if (err) newCacheEntry.error = err;
|
||
|
else newCacheEntry.result = result;
|
||
|
const callbacks = newCacheEntry.callbacks;
|
||
|
newCacheEntry.callbacks = undefined;
|
||
|
callback(err, result);
|
||
|
if (callbacks !== undefined) for (const cb of callbacks) cb(err, result);
|
||
|
});
|
||
|
};
|
||
|
resultFn.force = (arg, callback) => {
|
||
|
const cacheEntry = cache.get(arg);
|
||
|
if (cacheEntry !== undefined && cacheEntry.force) {
|
||
|
if (cacheEntry.result !== undefined)
|
||
|
return callback(null, cacheEntry.result);
|
||
|
if (cacheEntry.error !== undefined) return callback(cacheEntry.error);
|
||
|
if (cacheEntry.callbacks === undefined) cacheEntry.callbacks = [callback];
|
||
|
else cacheEntry.callbacks.push(callback);
|
||
|
return;
|
||
|
}
|
||
|
/** @type {CacheEntry} */
|
||
|
const newCacheEntry = {
|
||
|
result: undefined,
|
||
|
error: undefined,
|
||
|
callbacks: undefined,
|
||
|
force: true
|
||
|
};
|
||
|
cache.set(arg, newCacheEntry);
|
||
|
forceFn(arg, (err, result) => {
|
||
|
if (err) newCacheEntry.error = err;
|
||
|
else newCacheEntry.result = result;
|
||
|
const callbacks = newCacheEntry.callbacks;
|
||
|
newCacheEntry.callbacks = undefined;
|
||
|
callback(err, result);
|
||
|
if (callbacks !== undefined) for (const cb of callbacks) cb(err, result);
|
||
|
});
|
||
|
};
|
||
|
return resultFn;
|
||
|
};
|
||
|
|
||
|
class HttpUriPlugin {
|
||
|
/**
|
||
|
* @param {HttpUriPluginOptions} options options
|
||
|
*/
|
||
|
constructor(options) {
|
||
|
validate(options);
|
||
|
this._lockfileLocation = options.lockfileLocation;
|
||
|
this._cacheLocation = options.cacheLocation;
|
||
|
this._upgrade = options.upgrade;
|
||
|
this._frozen = options.frozen;
|
||
|
this._allowedUris = options.allowedUris;
|
||
|
this._proxy = options.proxy;
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Apply the plugin
|
||
|
* @param {Compiler} compiler the compiler instance
|
||
|
* @returns {void}
|
||
|
*/
|
||
|
apply(compiler) {
|
||
|
const proxy =
|
||
|
this._proxy || process.env["http_proxy"] || process.env["HTTP_PROXY"];
|
||
|
const schemes = [
|
||
|
{
|
||
|
scheme: "http",
|
||
|
fetch: proxyFetch(getHttp(), proxy)
|
||
|
},
|
||
|
{
|
||
|
scheme: "https",
|
||
|
fetch: proxyFetch(getHttps(), proxy)
|
||
|
}
|
||
|
];
|
||
|
let lockfileCache;
|
||
|
compiler.hooks.compilation.tap(
|
||
|
"HttpUriPlugin",
|
||
|
(compilation, { normalModuleFactory }) => {
|
||
|
const intermediateFs = compiler.intermediateFileSystem;
|
||
|
const fs = compilation.inputFileSystem;
|
||
|
const cache = compilation.getCache("webpack.HttpUriPlugin");
|
||
|
const logger = compilation.getLogger("webpack.HttpUriPlugin");
|
||
|
const lockfileLocation =
|
||
|
this._lockfileLocation ||
|
||
|
join(
|
||
|
intermediateFs,
|
||
|
compiler.context,
|
||
|
compiler.name
|
||
|
? `${toSafePath(compiler.name)}.webpack.lock`
|
||
|
: "webpack.lock"
|
||
|
);
|
||
|
const cacheLocation =
|
||
|
this._cacheLocation !== undefined
|
||
|
? this._cacheLocation
|
||
|
: lockfileLocation + ".data";
|
||
|
const upgrade = this._upgrade || false;
|
||
|
const frozen = this._frozen || false;
|
||
|
const hashFunction = "sha512";
|
||
|
const hashDigest = "hex";
|
||
|
const hashDigestLength = 20;
|
||
|
const allowedUris = this._allowedUris;
|
||
|
|
||
|
let warnedAboutEol = false;
|
||
|
|
||
|
const cacheKeyCache = new Map();
|
||
|
/**
|
||
|
* @param {string} url the url
|
||
|
* @returns {string} the key
|
||
|
*/
|
||
|
const getCacheKey = url => {
|
||
|
const cachedResult = cacheKeyCache.get(url);
|
||
|
if (cachedResult !== undefined) return cachedResult;
|
||
|
const result = _getCacheKey(url);
|
||
|
cacheKeyCache.set(url, result);
|
||
|
return result;
|
||
|
};
|
||
|
|
||
|
/**
|
||
|
* @param {string} url the url
|
||
|
* @returns {string} the key
|
||
|
*/
|
||
|
const _getCacheKey = url => {
|
||
|
const parsedUrl = new URL(url);
|
||
|
const folder = toSafePath(parsedUrl.origin);
|
||
|
const name = toSafePath(parsedUrl.pathname);
|
||
|
const query = toSafePath(parsedUrl.search);
|
||
|
let ext = extname(name);
|
||
|
if (ext.length > 20) ext = "";
|
||
|
const basename = ext ? name.slice(0, -ext.length) : name;
|
||
|
const hash = createHash(hashFunction);
|
||
|
hash.update(url);
|
||
|
const digest = hash.digest(hashDigest).slice(0, hashDigestLength);
|
||
|
return `${folder.slice(-50)}/${`${basename}${
|
||
|
query ? `_${query}` : ""
|
||
|
}`.slice(0, 150)}_${digest}${ext}`;
|
||
|
};
|
||
|
|
||
|
const getLockfile = cachedWithoutKey(
|
||
|
/**
|
||
|
* @param {function((Error | null)=, Lockfile=): void} callback callback
|
||
|
* @returns {void}
|
||
|
*/
|
||
|
callback => {
|
||
|
const readLockfile = () => {
|
||
|
intermediateFs.readFile(lockfileLocation, (err, buffer) => {
|
||
|
if (err && err.code !== "ENOENT") {
|
||
|
compilation.missingDependencies.add(lockfileLocation);
|
||
|
return callback(err);
|
||
|
}
|
||
|
compilation.fileDependencies.add(lockfileLocation);
|
||
|
compilation.fileSystemInfo.createSnapshot(
|
||
|
compiler.fsStartTime,
|
||
|
buffer ? [lockfileLocation] : [],
|
||
|
[],
|
||
|
buffer ? [] : [lockfileLocation],
|
||
|
{ timestamp: true },
|
||
|
(err, snapshot) => {
|
||
|
if (err) return callback(err);
|
||
|
const lockfile = buffer
|
||
|
? Lockfile.parse(buffer.toString("utf-8"))
|
||
|
: new Lockfile();
|
||
|
lockfileCache = {
|
||
|
lockfile,
|
||
|
snapshot
|
||
|
};
|
||
|
callback(null, lockfile);
|
||
|
}
|
||
|
);
|
||
|
});
|
||
|
};
|
||
|
if (lockfileCache) {
|
||
|
compilation.fileSystemInfo.checkSnapshotValid(
|
||
|
lockfileCache.snapshot,
|
||
|
(err, valid) => {
|
||
|
if (err) return callback(err);
|
||
|
if (!valid) return readLockfile();
|
||
|
callback(null, lockfileCache.lockfile);
|
||
|
}
|
||
|
);
|
||
|
} else {
|
||
|
readLockfile();
|
||
|
}
|
||
|
}
|
||
|
);
|
||
|
|
||
|
/** @type {Map<string, LockfileEntry | "ignore" | "no-cache"> | undefined} */
|
||
|
let lockfileUpdates = undefined;
|
||
|
const storeLockEntry = (lockfile, url, entry) => {
|
||
|
const oldEntry = lockfile.entries.get(url);
|
||
|
if (lockfileUpdates === undefined) lockfileUpdates = new Map();
|
||
|
lockfileUpdates.set(url, entry);
|
||
|
lockfile.entries.set(url, entry);
|
||
|
if (!oldEntry) {
|
||
|
logger.log(`${url} added to lockfile`);
|
||
|
} else if (typeof oldEntry === "string") {
|
||
|
if (typeof entry === "string") {
|
||
|
logger.log(`${url} updated in lockfile: ${oldEntry} -> ${entry}`);
|
||
|
} else {
|
||
|
logger.log(
|
||
|
`${url} updated in lockfile: ${oldEntry} -> ${entry.resolved}`
|
||
|
);
|
||
|
}
|
||
|
} else if (typeof entry === "string") {
|
||
|
logger.log(
|
||
|
`${url} updated in lockfile: ${oldEntry.resolved} -> ${entry}`
|
||
|
);
|
||
|
} else if (oldEntry.resolved !== entry.resolved) {
|
||
|
logger.log(
|
||
|
`${url} updated in lockfile: ${oldEntry.resolved} -> ${entry.resolved}`
|
||
|
);
|
||
|
} else if (oldEntry.integrity !== entry.integrity) {
|
||
|
logger.log(`${url} updated in lockfile: content changed`);
|
||
|
} else if (oldEntry.contentType !== entry.contentType) {
|
||
|
logger.log(
|
||
|
`${url} updated in lockfile: ${oldEntry.contentType} -> ${entry.contentType}`
|
||
|
);
|
||
|
} else {
|
||
|
logger.log(`${url} updated in lockfile`);
|
||
|
}
|
||
|
};
|
||
|
|
||
|
const storeResult = (lockfile, url, result, callback) => {
|
||
|
if (result.storeLock) {
|
||
|
storeLockEntry(lockfile, url, result.entry);
|
||
|
if (!cacheLocation || !result.content)
|
||
|
return callback(null, result);
|
||
|
const key = getCacheKey(result.entry.resolved);
|
||
|
const filePath = join(intermediateFs, cacheLocation, key);
|
||
|
mkdirp(intermediateFs, dirname(intermediateFs, filePath), err => {
|
||
|
if (err) return callback(err);
|
||
|
intermediateFs.writeFile(filePath, result.content, err => {
|
||
|
if (err) return callback(err);
|
||
|
callback(null, result);
|
||
|
});
|
||
|
});
|
||
|
} else {
|
||
|
storeLockEntry(lockfile, url, "no-cache");
|
||
|
callback(null, result);
|
||
|
}
|
||
|
};
|
||
|
|
||
|
for (const { scheme, fetch } of schemes) {
|
||
|
/**
|
||
|
*
|
||
|
* @param {string} url URL
|
||
|
* @param {string} integrity integrity
|
||
|
* @param {function((Error | null)=, { entry: LockfileEntry, content: Buffer, storeLock: boolean }=): void} callback callback
|
||
|
*/
|
||
|
const resolveContent = (url, integrity, callback) => {
|
||
|
const handleResult = (err, result) => {
|
||
|
if (err) return callback(err);
|
||
|
if ("location" in result) {
|
||
|
return resolveContent(
|
||
|
result.location,
|
||
|
integrity,
|
||
|
(err, innerResult) => {
|
||
|
if (err) return callback(err);
|
||
|
callback(null, {
|
||
|
entry: innerResult.entry,
|
||
|
content: innerResult.content,
|
||
|
storeLock: innerResult.storeLock && result.storeLock
|
||
|
});
|
||
|
}
|
||
|
);
|
||
|
} else {
|
||
|
if (
|
||
|
!result.fresh &&
|
||
|
integrity &&
|
||
|
result.entry.integrity !== integrity &&
|
||
|
!verifyIntegrity(result.content, integrity)
|
||
|
) {
|
||
|
return fetchContent.force(url, handleResult);
|
||
|
}
|
||
|
return callback(null, {
|
||
|
entry: result.entry,
|
||
|
content: result.content,
|
||
|
storeLock: result.storeLock
|
||
|
});
|
||
|
}
|
||
|
};
|
||
|
fetchContent(url, handleResult);
|
||
|
};
|
||
|
|
||
|
/** @typedef {{ storeCache: boolean, storeLock: boolean, validUntil: number, etag: string | undefined, fresh: boolean }} FetchResultMeta */
|
||
|
/** @typedef {FetchResultMeta & { location: string }} RedirectFetchResult */
|
||
|
/** @typedef {FetchResultMeta & { entry: LockfileEntry, content: Buffer }} ContentFetchResult */
|
||
|
/** @typedef {RedirectFetchResult | ContentFetchResult} FetchResult */
|
||
|
|
||
|
/**
|
||
|
* @param {string} url URL
|
||
|
* @param {FetchResult | RedirectFetchResult} cachedResult result from cache
|
||
|
* @param {function((Error | null)=, FetchResult=): void} callback callback
|
||
|
* @returns {void}
|
||
|
*/
|
||
|
const fetchContentRaw = (url, cachedResult, callback) => {
|
||
|
const requestTime = Date.now();
|
||
|
fetch(
|
||
|
new URL(url),
|
||
|
{
|
||
|
headers: {
|
||
|
"accept-encoding": "gzip, deflate, br",
|
||
|
"user-agent": "webpack",
|
||
|
"if-none-match": cachedResult
|
||
|
? cachedResult.etag || null
|
||
|
: null
|
||
|
}
|
||
|
},
|
||
|
res => {
|
||
|
const etag = res.headers["etag"];
|
||
|
const location = res.headers["location"];
|
||
|
const cacheControl = res.headers["cache-control"];
|
||
|
const { storeLock, storeCache, validUntil } = parseCacheControl(
|
||
|
cacheControl,
|
||
|
requestTime
|
||
|
);
|
||
|
/**
|
||
|
* @param {Partial<Pick<FetchResultMeta, "fresh">> & (Pick<RedirectFetchResult, "location"> | Pick<ContentFetchResult, "content" | "entry">)} partialResult result
|
||
|
* @returns {void}
|
||
|
*/
|
||
|
const finishWith = partialResult => {
|
||
|
if ("location" in partialResult) {
|
||
|
logger.debug(
|
||
|
`GET ${url} [${res.statusCode}] -> ${partialResult.location}`
|
||
|
);
|
||
|
} else {
|
||
|
logger.debug(
|
||
|
`GET ${url} [${res.statusCode}] ${Math.ceil(
|
||
|
partialResult.content.length / 1024
|
||
|
)} kB${!storeLock ? " no-cache" : ""}`
|
||
|
);
|
||
|
}
|
||
|
const result = {
|
||
|
...partialResult,
|
||
|
fresh: true,
|
||
|
storeLock,
|
||
|
storeCache,
|
||
|
validUntil,
|
||
|
etag
|
||
|
};
|
||
|
if (!storeCache) {
|
||
|
logger.log(
|
||
|
`${url} can't be stored in cache, due to Cache-Control header: ${cacheControl}`
|
||
|
);
|
||
|
return callback(null, result);
|
||
|
}
|
||
|
cache.store(
|
||
|
url,
|
||
|
null,
|
||
|
{
|
||
|
...result,
|
||
|
fresh: false
|
||
|
},
|
||
|
err => {
|
||
|
if (err) {
|
||
|
logger.warn(
|
||
|
`${url} can't be stored in cache: ${err.message}`
|
||
|
);
|
||
|
logger.debug(err.stack);
|
||
|
}
|
||
|
callback(null, result);
|
||
|
}
|
||
|
);
|
||
|
};
|
||
|
if (res.statusCode === 304) {
|
||
|
if (
|
||
|
cachedResult.validUntil < validUntil ||
|
||
|
cachedResult.storeLock !== storeLock ||
|
||
|
cachedResult.storeCache !== storeCache ||
|
||
|
cachedResult.etag !== etag
|
||
|
) {
|
||
|
return finishWith(cachedResult);
|
||
|
} else {
|
||
|
logger.debug(`GET ${url} [${res.statusCode}] (unchanged)`);
|
||
|
return callback(null, {
|
||
|
...cachedResult,
|
||
|
fresh: true
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
if (
|
||
|
location &&
|
||
|
res.statusCode >= 301 &&
|
||
|
res.statusCode <= 308
|
||
|
) {
|
||
|
const result = {
|
||
|
location: new URL(location, url).href
|
||
|
};
|
||
|
if (
|
||
|
!cachedResult ||
|
||
|
!("location" in cachedResult) ||
|
||
|
cachedResult.location !== result.location ||
|
||
|
cachedResult.validUntil < validUntil ||
|
||
|
cachedResult.storeLock !== storeLock ||
|
||
|
cachedResult.storeCache !== storeCache ||
|
||
|
cachedResult.etag !== etag
|
||
|
) {
|
||
|
return finishWith(result);
|
||
|
} else {
|
||
|
logger.debug(`GET ${url} [${res.statusCode}] (unchanged)`);
|
||
|
return callback(null, {
|
||
|
...result,
|
||
|
fresh: true,
|
||
|
storeLock,
|
||
|
storeCache,
|
||
|
validUntil,
|
||
|
etag
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
const contentType = res.headers["content-type"] || "";
|
||
|
const bufferArr = [];
|
||
|
|
||
|
const contentEncoding = res.headers["content-encoding"];
|
||
|
let stream = res;
|
||
|
if (contentEncoding === "gzip") {
|
||
|
stream = stream.pipe(createGunzip());
|
||
|
} else if (contentEncoding === "br") {
|
||
|
stream = stream.pipe(createBrotliDecompress());
|
||
|
} else if (contentEncoding === "deflate") {
|
||
|
stream = stream.pipe(createInflate());
|
||
|
}
|
||
|
|
||
|
stream.on("data", chunk => {
|
||
|
bufferArr.push(chunk);
|
||
|
});
|
||
|
|
||
|
stream.on("end", () => {
|
||
|
if (!res.complete) {
|
||
|
logger.log(`GET ${url} [${res.statusCode}] (terminated)`);
|
||
|
return callback(new Error(`${url} request was terminated`));
|
||
|
}
|
||
|
|
||
|
const content = Buffer.concat(bufferArr);
|
||
|
|
||
|
if (res.statusCode !== 200) {
|
||
|
logger.log(`GET ${url} [${res.statusCode}]`);
|
||
|
return callback(
|
||
|
new Error(
|
||
|
`${url} request status code = ${
|
||
|
res.statusCode
|
||
|
}\n${content.toString("utf-8")}`
|
||
|
)
|
||
|
);
|
||
|
}
|
||
|
|
||
|
const integrity = computeIntegrity(content);
|
||
|
const entry = { resolved: url, integrity, contentType };
|
||
|
|
||
|
finishWith({
|
||
|
entry,
|
||
|
content
|
||
|
});
|
||
|
});
|
||
|
}
|
||
|
).on("error", err => {
|
||
|
logger.log(`GET ${url} (error)`);
|
||
|
err.message += `\nwhile fetching ${url}`;
|
||
|
callback(err);
|
||
|
});
|
||
|
};
|
||
|
|
||
|
const fetchContent = cachedWithKey(
|
||
|
/**
|
||
|
* @param {string} url URL
|
||
|
* @param {function((Error | null)=, { validUntil: number, etag?: string, entry: LockfileEntry, content: Buffer, fresh: boolean } | { validUntil: number, etag?: string, location: string, fresh: boolean }=): void} callback callback
|
||
|
* @returns {void}
|
||
|
*/ (url, callback) => {
|
||
|
cache.get(url, null, (err, cachedResult) => {
|
||
|
if (err) return callback(err);
|
||
|
if (cachedResult) {
|
||
|
const isValid = cachedResult.validUntil >= Date.now();
|
||
|
if (isValid) return callback(null, cachedResult);
|
||
|
}
|
||
|
fetchContentRaw(url, cachedResult, callback);
|
||
|
});
|
||
|
},
|
||
|
(url, callback) => fetchContentRaw(url, undefined, callback)
|
||
|
);
|
||
|
|
||
|
const isAllowed = uri => {
|
||
|
for (const allowed of allowedUris) {
|
||
|
if (typeof allowed === "string") {
|
||
|
if (uri.startsWith(allowed)) return true;
|
||
|
} else if (typeof allowed === "function") {
|
||
|
if (allowed(uri)) return true;
|
||
|
} else {
|
||
|
if (allowed.test(uri)) return true;
|
||
|
}
|
||
|
}
|
||
|
return false;
|
||
|
};
|
||
|
|
||
|
const getInfo = cachedWithKey(
|
||
|
/**
|
||
|
* @param {string} url the url
|
||
|
* @param {function((Error | null)=, { entry: LockfileEntry, content: Buffer }=): void} callback callback
|
||
|
* @returns {void}
|
||
|
*/
|
||
|
(url, callback) => {
|
||
|
if (!isAllowed(url)) {
|
||
|
return callback(
|
||
|
new Error(
|
||
|
`${url} doesn't match the allowedUris policy. These URIs are allowed:\n${allowedUris
|
||
|
.map(uri => ` - ${uri}`)
|
||
|
.join("\n")}`
|
||
|
)
|
||
|
);
|
||
|
}
|
||
|
getLockfile((err, lockfile) => {
|
||
|
if (err) return callback(err);
|
||
|
const entryOrString = lockfile.entries.get(url);
|
||
|
if (!entryOrString) {
|
||
|
if (frozen) {
|
||
|
return callback(
|
||
|
new Error(
|
||
|
`${url} has no lockfile entry and lockfile is frozen`
|
||
|
)
|
||
|
);
|
||
|
}
|
||
|
resolveContent(url, null, (err, result) => {
|
||
|
if (err) return callback(err);
|
||
|
storeResult(lockfile, url, result, callback);
|
||
|
});
|
||
|
return;
|
||
|
}
|
||
|
if (typeof entryOrString === "string") {
|
||
|
const entryTag = entryOrString;
|
||
|
resolveContent(url, null, (err, result) => {
|
||
|
if (err) return callback(err);
|
||
|
if (!result.storeLock || entryTag === "ignore")
|
||
|
return callback(null, result);
|
||
|
if (frozen) {
|
||
|
return callback(
|
||
|
new Error(
|
||
|
`${url} used to have ${entryTag} lockfile entry and has content now, but lockfile is frozen`
|
||
|
)
|
||
|
);
|
||
|
}
|
||
|
if (!upgrade) {
|
||
|
return callback(
|
||
|
new Error(
|
||
|
`${url} used to have ${entryTag} lockfile entry and has content now.
|
||
|
This should be reflected in the lockfile, so this lockfile entry must be upgraded, but upgrading is not enabled.
|
||
|
Remove this line from the lockfile to force upgrading.`
|
||
|
)
|
||
|
);
|
||
|
}
|
||
|
storeResult(lockfile, url, result, callback);
|
||
|
});
|
||
|
return;
|
||
|
}
|
||
|
let entry = entryOrString;
|
||
|
const doFetch = lockedContent => {
|
||
|
resolveContent(url, entry.integrity, (err, result) => {
|
||
|
if (err) {
|
||
|
if (lockedContent) {
|
||
|
logger.warn(
|
||
|
`Upgrade request to ${url} failed: ${err.message}`
|
||
|
);
|
||
|
logger.debug(err.stack);
|
||
|
return callback(null, {
|
||
|
entry,
|
||
|
content: lockedContent
|
||
|
});
|
||
|
}
|
||
|
return callback(err);
|
||
|
}
|
||
|
if (!result.storeLock) {
|
||
|
// When the lockfile entry should be no-cache
|
||
|
// we need to update the lockfile
|
||
|
if (frozen) {
|
||
|
return callback(
|
||
|
new Error(
|
||
|
`${url} has a lockfile entry and is no-cache now, but lockfile is frozen\nLockfile: ${entryToString(
|
||
|
entry
|
||
|
)}`
|
||
|
)
|
||
|
);
|
||
|
}
|
||
|
storeResult(lockfile, url, result, callback);
|
||
|
return;
|
||
|
}
|
||
|
if (!areLockfileEntriesEqual(result.entry, entry)) {
|
||
|
// When the lockfile entry is outdated
|
||
|
// we need to update the lockfile
|
||
|
if (frozen) {
|
||
|
return callback(
|
||
|
new Error(
|
||
|
`${url} has an outdated lockfile entry, but lockfile is frozen\nLockfile: ${entryToString(
|
||
|
entry
|
||
|
)}\nExpected: ${entryToString(result.entry)}`
|
||
|
)
|
||
|
);
|
||
|
}
|
||
|
storeResult(lockfile, url, result, callback);
|
||
|
return;
|
||
|
}
|
||
|
if (!lockedContent && cacheLocation) {
|
||
|
// When the lockfile cache content is missing
|
||
|
// we need to update the lockfile
|
||
|
if (frozen) {
|
||
|
return callback(
|
||
|
new Error(
|
||
|
`${url} is missing content in the lockfile cache, but lockfile is frozen\nLockfile: ${entryToString(
|
||
|
entry
|
||
|
)}`
|
||
|
)
|
||
|
);
|
||
|
}
|
||
|
storeResult(lockfile, url, result, callback);
|
||
|
return;
|
||
|
}
|
||
|
return callback(null, result);
|
||
|
});
|
||
|
};
|
||
|
if (cacheLocation) {
|
||
|
// When there is a lockfile cache
|
||
|
// we read the content from there
|
||
|
const key = getCacheKey(entry.resolved);
|
||
|
const filePath = join(intermediateFs, cacheLocation, key);
|
||
|
fs.readFile(filePath, (err, result) => {
|
||
|
const content = /** @type {Buffer} */ (result);
|
||
|
if (err) {
|
||
|
if (err.code === "ENOENT") return doFetch();
|
||
|
return callback(err);
|
||
|
}
|
||
|
const continueWithCachedContent = result => {
|
||
|
if (!upgrade) {
|
||
|
// When not in upgrade mode, we accept the result from the lockfile cache
|
||
|
return callback(null, { entry, content });
|
||
|
}
|
||
|
return doFetch(content);
|
||
|
};
|
||
|
if (!verifyIntegrity(content, entry.integrity)) {
|
||
|
let contentWithChangedEol;
|
||
|
let isEolChanged = false;
|
||
|
try {
|
||
|
contentWithChangedEol = Buffer.from(
|
||
|
content.toString("utf-8").replace(/\r\n/g, "\n")
|
||
|
);
|
||
|
isEolChanged = verifyIntegrity(
|
||
|
contentWithChangedEol,
|
||
|
entry.integrity
|
||
|
);
|
||
|
} catch (e) {
|
||
|
// ignore
|
||
|
}
|
||
|
if (isEolChanged) {
|
||
|
if (!warnedAboutEol) {
|
||
|
const explainer = `Incorrect end of line sequence was detected in the lockfile cache.
|
||
|
The lockfile cache is protected by integrity checks, so any external modification will lead to a corrupted lockfile cache.
|
||
|
When using git make sure to configure .gitattributes correctly for the lockfile cache:
|
||
|
**/*webpack.lock.data/** -text
|
||
|
This will avoid that the end of line sequence is changed by git on Windows.`;
|
||
|
if (frozen) {
|
||
|
logger.error(explainer);
|
||
|
} else {
|
||
|
logger.warn(explainer);
|
||
|
logger.info(
|
||
|
"Lockfile cache will be automatically fixed now, but when lockfile is frozen this would result in an error."
|
||
|
);
|
||
|
}
|
||
|
warnedAboutEol = true;
|
||
|
}
|
||
|
if (!frozen) {
|
||
|
// "fix" the end of line sequence of the lockfile content
|
||
|
logger.log(
|
||
|
`${filePath} fixed end of line sequence (\\r\\n instead of \\n).`
|
||
|
);
|
||
|
intermediateFs.writeFile(
|
||
|
filePath,
|
||
|
contentWithChangedEol,
|
||
|
err => {
|
||
|
if (err) return callback(err);
|
||
|
continueWithCachedContent(contentWithChangedEol);
|
||
|
}
|
||
|
);
|
||
|
return;
|
||
|
}
|
||
|
}
|
||
|
if (frozen) {
|
||
|
return callback(
|
||
|
new Error(
|
||
|
`${
|
||
|
entry.resolved
|
||
|
} integrity mismatch, expected content with integrity ${
|
||
|
entry.integrity
|
||
|
} but got ${computeIntegrity(content)}.
|
||
|
Lockfile corrupted (${
|
||
|
isEolChanged
|
||
|
? "end of line sequence was unexpectedly changed"
|
||
|
: "incorrectly merged? changed by other tools?"
|
||
|
}).
|
||
|
Run build with un-frozen lockfile to automatically fix lockfile.`
|
||
|
)
|
||
|
);
|
||
|
} else {
|
||
|
// "fix" the lockfile entry to the correct integrity
|
||
|
// the content has priority over the integrity value
|
||
|
entry = {
|
||
|
...entry,
|
||
|
integrity: computeIntegrity(content)
|
||
|
};
|
||
|
storeLockEntry(lockfile, url, entry);
|
||
|
}
|
||
|
}
|
||
|
continueWithCachedContent(result);
|
||
|
});
|
||
|
} else {
|
||
|
doFetch();
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
);
|
||
|
|
||
|
const respondWithUrlModule = (url, resourceData, callback) => {
|
||
|
getInfo(url.href, (err, result) => {
|
||
|
if (err) return callback(err);
|
||
|
resourceData.resource = url.href;
|
||
|
resourceData.path = url.origin + url.pathname;
|
||
|
resourceData.query = url.search;
|
||
|
resourceData.fragment = url.hash;
|
||
|
resourceData.context = new URL(
|
||
|
".",
|
||
|
result.entry.resolved
|
||
|
).href.slice(0, -1);
|
||
|
resourceData.data.mimetype = result.entry.contentType;
|
||
|
callback(null, true);
|
||
|
});
|
||
|
};
|
||
|
normalModuleFactory.hooks.resolveForScheme
|
||
|
.for(scheme)
|
||
|
.tapAsync(
|
||
|
"HttpUriPlugin",
|
||
|
(resourceData, resolveData, callback) => {
|
||
|
respondWithUrlModule(
|
||
|
new URL(resourceData.resource),
|
||
|
resourceData,
|
||
|
callback
|
||
|
);
|
||
|
}
|
||
|
);
|
||
|
normalModuleFactory.hooks.resolveInScheme
|
||
|
.for(scheme)
|
||
|
.tapAsync("HttpUriPlugin", (resourceData, data, callback) => {
|
||
|
// Only handle relative urls (./xxx, ../xxx, /xxx, //xxx)
|
||
|
if (
|
||
|
data.dependencyType !== "url" &&
|
||
|
!/^\.{0,2}\//.test(resourceData.resource)
|
||
|
) {
|
||
|
return callback();
|
||
|
}
|
||
|
respondWithUrlModule(
|
||
|
new URL(resourceData.resource, data.context + "/"),
|
||
|
resourceData,
|
||
|
callback
|
||
|
);
|
||
|
});
|
||
|
const hooks = NormalModule.getCompilationHooks(compilation);
|
||
|
hooks.readResourceForScheme
|
||
|
.for(scheme)
|
||
|
.tapAsync("HttpUriPlugin", (resource, module, callback) => {
|
||
|
return getInfo(resource, (err, result) => {
|
||
|
if (err) return callback(err);
|
||
|
module.buildInfo.resourceIntegrity = result.entry.integrity;
|
||
|
callback(null, result.content);
|
||
|
});
|
||
|
});
|
||
|
hooks.needBuild.tapAsync(
|
||
|
"HttpUriPlugin",
|
||
|
(module, context, callback) => {
|
||
|
if (
|
||
|
module.resource &&
|
||
|
module.resource.startsWith(`${scheme}://`)
|
||
|
) {
|
||
|
getInfo(module.resource, (err, result) => {
|
||
|
if (err) return callback(err);
|
||
|
if (
|
||
|
result.entry.integrity !==
|
||
|
module.buildInfo.resourceIntegrity
|
||
|
) {
|
||
|
return callback(null, true);
|
||
|
}
|
||
|
callback();
|
||
|
});
|
||
|
} else {
|
||
|
return callback();
|
||
|
}
|
||
|
}
|
||
|
);
|
||
|
}
|
||
|
compilation.hooks.finishModules.tapAsync(
|
||
|
"HttpUriPlugin",
|
||
|
(modules, callback) => {
|
||
|
if (!lockfileUpdates) return callback();
|
||
|
const ext = extname(lockfileLocation);
|
||
|
const tempFile = join(
|
||
|
intermediateFs,
|
||
|
dirname(intermediateFs, lockfileLocation),
|
||
|
`.${basename(lockfileLocation, ext)}.${
|
||
|
(Math.random() * 10000) | 0
|
||
|
}${ext}`
|
||
|
);
|
||
|
|
||
|
const writeDone = () => {
|
||
|
const nextOperation = inProgressWrite.shift();
|
||
|
if (nextOperation) {
|
||
|
nextOperation();
|
||
|
} else {
|
||
|
inProgressWrite = undefined;
|
||
|
}
|
||
|
};
|
||
|
const runWrite = () => {
|
||
|
intermediateFs.readFile(lockfileLocation, (err, buffer) => {
|
||
|
if (err && err.code !== "ENOENT") {
|
||
|
writeDone();
|
||
|
return callback(err);
|
||
|
}
|
||
|
const lockfile = buffer
|
||
|
? Lockfile.parse(buffer.toString("utf-8"))
|
||
|
: new Lockfile();
|
||
|
for (const [key, value] of lockfileUpdates) {
|
||
|
lockfile.entries.set(key, value);
|
||
|
}
|
||
|
intermediateFs.writeFile(tempFile, lockfile.toString(), err => {
|
||
|
if (err) {
|
||
|
writeDone();
|
||
|
return intermediateFs.unlink(tempFile, () => callback(err));
|
||
|
}
|
||
|
intermediateFs.rename(tempFile, lockfileLocation, err => {
|
||
|
if (err) {
|
||
|
writeDone();
|
||
|
return intermediateFs.unlink(tempFile, () =>
|
||
|
callback(err)
|
||
|
);
|
||
|
}
|
||
|
writeDone();
|
||
|
callback();
|
||
|
});
|
||
|
});
|
||
|
});
|
||
|
};
|
||
|
if (inProgressWrite) {
|
||
|
inProgressWrite.push(runWrite);
|
||
|
} else {
|
||
|
inProgressWrite = [];
|
||
|
runWrite();
|
||
|
}
|
||
|
}
|
||
|
);
|
||
|
}
|
||
|
);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
module.exports = HttpUriPlugin;
|