Skip to content

Commit

Permalink
Make compressing transforms cache configurable (#574)
Browse files Browse the repository at this point in the history
* Make compressing cache transforms configurable

* Add note about cacheCompression in the README
  • Loading branch information
thymikee authored and loganfsmyth committed Sep 1, 2018
1 parent dbcc2b5 commit 67a5f40
Show file tree
Hide file tree
Showing 4 changed files with 61 additions and 12 deletions.
4 changes: 3 additions & 1 deletion README.md
@@ -1,4 +1,4 @@
> This readme is for babel-loader v8 + Babel v7
> This readme is for babel-loader v8 + Babel v7
> Check the [7.x branch](https://github.com/babel/babel-loader/tree/7.x) for docs with Babel v6
[![NPM Status](https://img.shields.io/npm/v/babel-loader.svg?style=flat)](https://www.npmjs.com/package/babel-loader)
Expand Down Expand Up @@ -88,6 +88,8 @@ This loader also supports the following loader-specific option:

* `cacheIdentifier`: Default is a string composed by the babel-core's version, the babel-loader's version, the contents of .babelrc file if it exists and the value of the environment variable `BABEL_ENV` with a fallback to the `NODE_ENV` environment variable. This can be set to a custom value to force cache busting if the identifier changes.

* `cacheCompression`: Default `true`. When set, each Babel transform output will be compressed with Gzip. If you want to opt-out of cache compression, set it to `false` -- your project may benefit from this if it transpiles thousands of files.

__Note:__ The `sourceMap` option is ignored, instead sourceMaps are automatically enabled when webpack is configured to use them (via the `devtool` config option).

## Troubleshooting
Expand Down
29 changes: 18 additions & 11 deletions src/cache.js
Expand Up @@ -32,11 +32,11 @@ const mkdirp = promisify(mkdirpOrig);
* @async
* @params {String} filename
*/
const read = async function(filename) {
const data = await readFile(filename);
const content = await gunzip(data);
const read = async function(filename, compress) {
const data = await readFile(filename + (compress ? ".gz" : ""));
const content = compress ? await gunzip(data) : data;

return JSON.parse(content);
return JSON.parse(content.toString());
};

/**
Expand All @@ -46,11 +46,11 @@ const read = async function(filename) {
* @params {String} filename
* @params {String} result
*/
const write = async function(filename, result) {
const write = async function(filename, compress, result) {
const content = JSON.stringify(result);

const data = await gzip(content);
return await writeFile(filename, data);
const data = compress ? await gzip(content) : content;
return await writeFile(filename + (compress ? ".gz" : ""), data);
};

/**
Expand All @@ -68,7 +68,7 @@ const filename = function(source, identifier, options) {

hash.update(contents);

return hash.digest("hex") + ".json.gz";
return hash.digest("hex") + ".json";
};

/**
Expand All @@ -78,14 +78,20 @@ const filename = function(source, identifier, options) {
* @params {Object} params
*/
const handleCache = async function(directory, params) {
const { source, options = {}, cacheIdentifier, cacheDirectory } = params;
const {
source,
options = {},
cacheIdentifier,
cacheDirectory,
cacheCompression,
} = params;

const file = path.join(directory, filename(source, cacheIdentifier, options));

try {
// No errors mean that the file was previously cached
// we just need to return it
return await read(file);
return await read(file, cacheCompression);
} catch (err) {}

const fallback =
Expand All @@ -107,7 +113,7 @@ const handleCache = async function(directory, params) {
const result = await transform(source, options);

try {
await write(file, result);
await write(file, cacheCompression, result);
} catch (err) {
if (fallback) {
// Fallback to tmpdir if node_modules folder not writable
Expand Down Expand Up @@ -138,6 +144,7 @@ const handleCache = async function(directory, params) {
* cache({
* directory: '.tmp/cache',
* identifier: 'babel-loader-cachefile',
* cacheCompression: false,
* source: *source code from file*,
* options: {
* experimental: true,
Expand Down
3 changes: 3 additions & 0 deletions src/index.js
Expand Up @@ -94,6 +94,7 @@ async function loader(source, inputSourceMap, overrides) {
// Remove loader related options
delete programmaticOptions.cacheDirectory;
delete programmaticOptions.cacheIdentifier;
delete programmaticOptions.cacheCompression;
delete programmaticOptions.metadataSubscribers;

if (!babel.loadPartialConfig) {
Expand Down Expand Up @@ -121,6 +122,7 @@ async function loader(source, inputSourceMap, overrides) {
"@babel/core": transform.version,
"@babel/loader": pkg.version,
}),
cacheCompression = true,
metadataSubscribers = [],
} = loaderOptions;

Expand All @@ -132,6 +134,7 @@ async function loader(source, inputSourceMap, overrides) {
transform,
cacheDirectory,
cacheIdentifier,
cacheCompression,
});
} else {
result = await transform(source, options);
Expand Down
37 changes: 37 additions & 0 deletions test/cache.test.js
Expand Up @@ -119,6 +119,43 @@ test.cb.serial(
},
);

test.cb.serial(
"should output non-compressed files to standard cache dir when cacheCompression is set to false",
t => {
const config = Object.assign({}, globalConfig, {
output: {
path: t.context.directory,
},
module: {
rules: [
{
test: /\.jsx?/,
loader: babelLoader,
exclude: /node_modules/,
options: {
cacheDirectory: true,
cacheCompression: false,
presets: ["@babel/preset-env"],
},
},
],
},
});

webpack(config, err => {
t.is(err, null);

fs.readdir(defaultCacheDir, (err, files) => {
files = files.filter(file => /\b[0-9a-f]{5,40}\b/.test(file));

t.is(err, null);
t.true(files.length > 0);
t.end();
});
});
},
);

test.cb.serial(
"should output files to standard cache dir if set to true in query",
t => {
Expand Down

0 comments on commit 67a5f40

Please sign in to comment.