Skip to content

Commit

Permalink
Merge pull request #6839 from webpack/feature/contenthash
Browse files Browse the repository at this point in the history
add [contenthash] support
  • Loading branch information
sokra committed Mar 26, 2018
2 parents 4861d2c + b018bc7 commit 41a0482
Show file tree
Hide file tree
Showing 13 changed files with 309 additions and 36 deletions.
8 changes: 8 additions & 0 deletions lib/Chunk.js
Expand Up @@ -55,6 +55,7 @@ class Chunk {
this.files = [];
this.rendered = false;
this.hash = undefined;
this.contentHash = Object.create(null);
this.renderedHash = undefined;
this.chunkReason = undefined;
this.extraAsync = false;
Expand Down Expand Up @@ -340,15 +341,22 @@ class Chunk {

getChunkMaps(realHash) {
const chunkHashMap = Object.create(null);
const chunkContentHashMap = Object.create(null);
const chunkNameMap = Object.create(null);

for (const chunk of this.getAllAsyncChunks()) {
chunkHashMap[chunk.id] = realHash ? chunk.hash : chunk.renderedHash;
for (const key of Object.keys(chunk.contentHash)) {
if (!chunkContentHashMap[key])
chunkContentHashMap[key] = Object.create(null);
chunkContentHashMap[key][chunk.id] = chunk.contentHash[key];
}
if (chunk.name) chunkNameMap[chunk.id] = chunk.name;
}

return {
hash: chunkHashMap,
contentHash: chunkContentHashMap,
name: chunkNameMap
};
}
Expand Down
11 changes: 6 additions & 5 deletions lib/Compilation.js
Expand Up @@ -146,6 +146,7 @@ class Compilation extends Tapable {
recordChunks: new SyncHook(["chunks", "records"]),

beforeHash: new SyncHook([]),
contentHash: new SyncHook(["chunk"]),
afterHash: new SyncHook([]),

recordHash: new SyncHook(["records"]),
Expand Down Expand Up @@ -1680,15 +1681,15 @@ class Compilation extends Tapable {
const chunkHash = createHash(hashFunction);
if (outputOptions.hashSalt) chunkHash.update(outputOptions.hashSalt);
chunk.updateHash(chunkHash);
if (chunk.hasRuntime()) {
this.mainTemplate.updateHashForChunk(chunkHash, chunk);
} else {
this.chunkTemplate.updateHashForChunk(chunkHash, chunk);
}
const template = chunk.hasRuntime()
? this.mainTemplate
: this.chunkTemplate;
template.updateHashForChunk(chunkHash, chunk);
this.hooks.chunkHash.call(chunk, chunkHash);
chunk.hash = chunkHash.digest(hashDigest);
hash.update(chunk.hash);
chunk.renderedHash = chunk.hash.substr(0, hashDigestLength);
this.hooks.contentHash.call(chunk);
}
this.fullHash = hash.digest(hashDigest);
this.hash = this.fullHash.substr(0, hashDigestLength);
Expand Down
28 changes: 27 additions & 1 deletion lib/JavascriptModulesPlugin.js
Expand Up @@ -8,6 +8,7 @@ const Parser = require("./Parser");
const Template = require("./Template");
const { ConcatSource } = require("webpack-sources");
const JavascriptGenerator = require("./JavascriptGenerator");
const createHash = require("./util/createHash");

class JavascriptModulesPlugin {
apply(compiler) {
Expand Down Expand Up @@ -72,6 +73,7 @@ class JavascriptModulesPlugin {
filenameTemplate,
pathOptions: {
noChunkHash: !useChunkHash,
contentHashType: "javascript",
chunk
},
identifier: `chunk${chunk.id}`,
Expand Down Expand Up @@ -115,7 +117,8 @@ class JavascriptModulesPlugin {
),
filenameTemplate,
pathOptions: {
chunk
chunk,
contentHashType: "javascript"
},
identifier: `chunk${chunk.id}`,
hash: chunk.hash
Expand All @@ -124,6 +127,29 @@ class JavascriptModulesPlugin {
return result;
}
);
compilation.hooks.contentHash.tap("JavascriptModulesPlugin", chunk => {
const outputOptions = compilation.outputOptions;
const {
hashSalt,
hashDigest,
hashDigestLength,
hashFunction
} = outputOptions;
const hash = createHash(hashFunction);
if (hashSalt) hash.update(hashSalt);
const template = chunk.hasRuntime()
? compilation.mainTemplate
: compilation.chunkTemplate;
template.updateHashForChunk(hash, chunk);
for (const m of chunk.modulesIterable) {
if (typeof m.source === "function") {
hash.update(m.hash);
}
}
chunk.contentHash.javascript = hash
.digest(hashDigest)
.substr(0, hashDigestLength);
});
}
);
}
Expand Down
13 changes: 4 additions & 9 deletions lib/SourceMapDevToolPlugin.js
Expand Up @@ -249,16 +249,11 @@ class SourceMapDevToolPlugin {
? path.relative(options.fileContext, filename)
: filename,
query,
basename: basename(filename)
basename: basename(filename),
contentHash: createHash("md4")
.update(sourceMapString)
.digest("hex")
});
if (sourceMapFile.includes("[contenthash]")) {
sourceMapFile = sourceMapFile.replace(
/\[contenthash\]/g,
createHash("md4")
.update(sourceMapString)
.digest("hex")
);
}
const sourceMapUrl = options.publicPath
? options.publicPath + sourceMapFile.replace(/\\/g, "/")
: path
Expand Down
33 changes: 31 additions & 2 deletions lib/TemplatedPathPlugin.js
Expand Up @@ -7,6 +7,7 @@
const REGEXP_HASH = /\[hash(?::(\d+))?\]/gi,
REGEXP_CHUNKHASH = /\[chunkhash(?::(\d+))?\]/gi,
REGEXP_MODULEHASH = /\[modulehash(?::(\d+))?\]/gi,
REGEXP_CONTENTHASH = /\[contenthash(?::(\d+))?\]/gi,
REGEXP_NAME = /\[name\]/gi,
REGEXP_ID = /\[id\]/gi,
REGEXP_MODULEID = /\[moduleid\]/gi,
Expand All @@ -18,6 +19,7 @@ const REGEXP_HASH = /\[hash(?::(\d+))?\]/gi,
// We use a normal RegExp instead of .test
const REGEXP_HASH_FOR_TEST = new RegExp(REGEXP_HASH.source, "i"),
REGEXP_CHUNKHASH_FOR_TEST = new RegExp(REGEXP_CHUNKHASH.source, "i"),
REGEXP_CONTENTHASH_FOR_TEST = new RegExp(REGEXP_CONTENTHASH.source, "i"),
REGEXP_NAME_FOR_TEST = new RegExp(REGEXP_NAME.source, "i");

const withHashLength = (replacer, handlerFn) => {
Expand Down Expand Up @@ -55,6 +57,15 @@ const replacePathVariables = (path, data) => {
const chunkName = chunk && (chunk.name || chunk.id);
const chunkHash = chunk && (chunk.renderedHash || chunk.hash);
const chunkHashWithLength = chunk && chunk.hashWithLength;
const contentHashType = data.contentHashType;
const contentHash =
(chunk && chunk.contentHash && chunk.contentHash[contentHashType]) ||
data.contentHash;
const contentHashWithLength =
(chunk &&
chunk.contentHashWithLength &&
chunk.contentHashWithLength[contentHashType]) ||
data.contentHashWithLength;
const module = data.module;
const moduleId = module && module.id;
const moduleHash = module && (module.renderedHash || module.hash);
Expand All @@ -64,9 +75,15 @@ const replacePathVariables = (path, data) => {
path = path(data);
}

if (data.noChunkHash && REGEXP_CHUNKHASH_FOR_TEST.test(path)) {
if (
data.noChunkHash &&
(REGEXP_CHUNKHASH_FOR_TEST.test(path) ||
REGEXP_CONTENTHASH_FOR_TEST.test(path))
) {
throw new Error(
`Cannot use [chunkhash] for chunk in '${path}' (use [hash] instead)`
`Cannot use [chunkhash] or [contenthash] for chunk in '${
path
}' (use [hash] instead)`
);
}

Expand All @@ -80,6 +97,10 @@ const replacePathVariables = (path, data) => {
REGEXP_CHUNKHASH,
withHashLength(getReplacer(chunkHash), chunkHashWithLength)
)
.replace(
REGEXP_CONTENTHASH,
withHashLength(getReplacer(contentHash), contentHashWithLength)
)
.replace(
REGEXP_MODULEHASH,
withHashLength(getReplacer(moduleHash), moduleHashWithLength)
Expand Down Expand Up @@ -115,6 +136,7 @@ class TemplatedPathPlugin {
if (
REGEXP_HASH_FOR_TEST.test(publicPath) ||
REGEXP_CHUNKHASH_FOR_TEST.test(publicPath) ||
REGEXP_CONTENTHASH_FOR_TEST.test(publicPath) ||
REGEXP_NAME_FOR_TEST.test(publicPath)
)
return true;
Expand All @@ -132,6 +154,13 @@ class TemplatedPathPlugin {
outputOptions.chunkFilename || outputOptions.filename;
if (REGEXP_CHUNKHASH_FOR_TEST.test(chunkFilename))
hash.update(JSON.stringify(chunk.getChunkMaps(true).hash));
if (REGEXP_CONTENTHASH_FOR_TEST.test(chunkFilename)) {
hash.update(
JSON.stringify(
chunk.getChunkMaps(true).contentHash.javascript || {}
)
);
}
if (REGEXP_NAME_FOR_TEST.test(chunkFilename))
hash.update(JSON.stringify(chunk.getChunkMaps(true).name));
}
Expand Down
49 changes: 47 additions & 2 deletions lib/node/NodeMainTemplatePlugin.js
Expand Up @@ -125,10 +125,33 @@ module.exports = class NodeMainTemplatePlugin {
shortChunkHashMap
)}[chunkId] + "`;
},
contentHash: {
javascript: `" + ${JSON.stringify(
chunkMaps.contentHash.javascript
)}[chunkId] + "`
},
contentHashWithLength: {
javascript: length => {
const shortContentHashMap = {};
const contentHash =
chunkMaps.contentHash.javascript;
for (const chunkId of Object.keys(contentHash)) {
if (typeof contentHash[chunkId] === "string") {
shortContentHashMap[chunkId] = contentHash[
chunkId
].substr(0, length);
}
}
return `" + ${JSON.stringify(
shortContentHashMap
)}[chunkId] + "`;
}
},
name: `" + (${JSON.stringify(
chunkMaps.name
)}[chunkId]||chunkId) + "`
}
},
contentHashType: "javascript"
}
) +
";",
Expand Down Expand Up @@ -187,10 +210,32 @@ module.exports = class NodeMainTemplatePlugin {
shortChunkHashMap
)}[chunkId] + "`;
},
contentHash: {
javascript: `" + ${JSON.stringify(
chunkMaps.contentHash.javascript
)}[chunkId] + "`
},
contentHashWithLength: {
javascript: length => {
const shortContentHashMap = {};
const contentHash = chunkMaps.contentHash.javascript;
for (const chunkId of Object.keys(contentHash)) {
if (typeof contentHash[chunkId] === "string") {
shortContentHashMap[chunkId] = contentHash[
chunkId
].substr(0, length);
}
}
return `" + ${JSON.stringify(
shortContentHashMap
)}[chunkId] + "`;
}
},
name: `" + (${JSON.stringify(
chunkMaps.name
)}[chunkId]||chunkId) + "`
}
},
contentHashType: "javascript"
}
);
return Template.asString([
Expand Down
26 changes: 24 additions & 2 deletions lib/web/JsonpMainTemplatePlugin.js
Expand Up @@ -88,8 +88,30 @@ class JsonpMainTemplatePlugin {
},
name: `" + (${JSON.stringify(
chunkMaps.name
)}[chunkId]||chunkId) + "`
}
)}[chunkId]||chunkId) + "`,
contentHash: {
javascript: `" + ${JSON.stringify(
chunkMaps.contentHash.javascript
)}[chunkId] + "`
},
contentHashWithLength: {
javascript: length => {
const shortContentHashMap = {};
const contentHash = chunkMaps.contentHash.javascript;
for (const chunkId of Object.keys(contentHash)) {
if (typeof contentHash[chunkId] === "string") {
shortContentHashMap[chunkId] = contentHash[
chunkId
].substr(0, length);
}
}
return `" + ${JSON.stringify(
shortContentHashMap
)}[chunkId] + "`;
}
}
},
contentHashType: "javascript"
}
);
return Template.asString([
Expand Down
3 changes: 3 additions & 0 deletions lib/webpack.js
Expand Up @@ -153,6 +153,9 @@ exportPlugins((exports.node = {}), {
exportPlugins((exports.debug = {}), {
ProfilingPlugin: () => require("./debug/ProfilingPlugin")
});
exportPlugins((exports.util = {}), {
createHash: () => require("./util/createHash")
});

const defineMissingPluginError = (namespace, pluginName, errorMessage) => {
Object.defineProperty(namespace, pluginName, {
Expand Down
27 changes: 25 additions & 2 deletions lib/webworker/WebWorkerMainTemplatePlugin.js
Expand Up @@ -35,6 +35,7 @@ class WebWorkerMainTemplatePlugin {
"WebWorkerMainTemplatePlugin",
(_, chunk, hash) => {
const chunkFilename = mainTemplate.outputOptions.chunkFilename;
const chunkMaps = chunk.getChunkMaps();
return Template.asString([
"promises.push(Promise.resolve().then(function() {",
Template.indent([
Expand All @@ -50,8 +51,30 @@ class WebWorkerMainTemplatePlugin {
length
)} + "`,
chunk: {
id: '" + chunkId + "'
}
id: '" + chunkId + "',
contentHash: {
javascript: `" + ${JSON.stringify(
chunkMaps.contentHash.javascript
)}[chunkId] + "`
},
contentHashWithLength: {
javascript: length => {
const shortContentHashMap = {};
const contentHash = chunkMaps.contentHash.javascript;
for (const chunkId of Object.keys(contentHash)) {
if (typeof contentHash[chunkId] === "string") {
shortContentHashMap[chunkId] = contentHash[
chunkId
].substr(0, length);
}
}
return `" + ${JSON.stringify(
shortContentHashMap
)}[chunkId] + "`;
}
}
},
contentHashType: "javascript"
}) +
");"
]),
Expand Down
1 change: 1 addition & 0 deletions test/ConfigTestCases.test.js
Expand Up @@ -226,6 +226,7 @@ describe("ConfigTestCases", () => {
);
if (exportedTests < filesCount)
return done(new Error("No tests exported by test case"));
if (testConfig.afterExecute) testConfig.afterExecute();
process.nextTick(done);
});
});
Expand Down
5 changes: 5 additions & 0 deletions test/configCases/hash-length/output-filename/no-async.js
@@ -0,0 +1,5 @@
it("should compile and run the test " + NAME, function() {});

if (Math.random() < -1) {
require(["./chunk"], function() {});
}

0 comments on commit 41a0482

Please sign in to comment.