Skip to content

Commit

Permalink
feat(normalization): archive
Browse files Browse the repository at this point in the history
Also, normalization functions can now be async
  • Loading branch information
gr2m committed Feb 24, 2018
1 parent af653a2 commit 6fe9ca6
Show file tree
Hide file tree
Showing 4 changed files with 79 additions and 6 deletions.
4 changes: 2 additions & 2 deletions bin/record.js
Original file line number Diff line number Diff line change
Expand Up @@ -66,10 +66,10 @@ scenarios.reduce(async (promise, scenarioPath) => {
ids: {}
}

const newNormalizedFixtures = newRawFixtures
const newNormalizedFixtures = await Promise.all(newRawFixtures
.map(cloneDeep)
.filter(hasntIgnoreHeader)
.map(normalize.bind(null, scenarioState))
.map(normalize.bind(null, scenarioState)))

const fixturesDiffs = diff(newNormalizedFixtures, oldNormalizedFixtures)
if (!fixturesDiffs) {
Expand Down
62 changes: 62 additions & 0 deletions lib/normalize/archive.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
module.exports = normalizeArchive

const zlib = require('zlib')

const intoStream = require('into-stream')
const getStream = require('get-stream')
const tar = require('tar-stream')
const gunzip = require('gunzip-maybe')

const temporaryRepository = require('../temporary-repository')

async function normalizeArchive (scenarioState, response, fixture) {
fixture.headers['content-disposition'] = fixture.headers['content-disposition']
// normalize folder name in file name
.replace(temporaryRepository.regex, '$1')
// zerofy sha
.replace(/archive-\w{7}/, 'archive-0000000')

const extract = tar.extract()
const pack = tar.pack()
const readStream = intoStream(Buffer.from(response, 'hex'))

// The response is the Repository folder with the README.md file inside. The
// folder name is always different, based on the repository name when recorded.
// That's why we have to untar/zip the response, change the folder name and
// retar/zip it again.

extract.on('entry', function (header, stream, callback) {
header.name = header.name
// normalize folder name in path
.replace(temporaryRepository.regex, '$1')
// zerofy sha in path
.replace(/-(\w){7}\//, '-0000000/')

// normalize mtime
header.mtime = {
getTime: () => 1507651200000
}

// write the new entry to the pack stream
stream.pipe(pack.entry(header, callback))
})

extract.on('finish', function () {
// all entries done - lets finalize it
pack.finalize()
})

// pipe the old tarball to the extractor
readStream.pipe(gunzip()).pipe(extract)

// pipe the new tarball the another stream
const writeStream = pack.pipe(zlib.createGzip())

const result = await getStream.buffer(writeStream).catch(console.log)
fixture.response = result.toString('hex')

// normalize across operating systems / extra flags
// see http://www.zlib.org/rfc-gzip.html#header-trailer
const normalizedHeader = '1f8b0800000000000003'
fixture.response = normalizedHeader + fixture.response.substr(normalizedHeader.length)
}
12 changes: 8 additions & 4 deletions lib/normalize/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ const headers = require('../headers')
const setIfExists = require('../set-if-exists')
const toEntityName = require('../to-entity-name')

function normalize (scenarioState, fixture) {
async function normalize (scenarioState, fixture) {
// fixture.rawHeaders is an array in the form of ['key1', 'value1', 'key2', 'value2']
// But the order of these can change, e.g. between local tests and CI on Travis.
// That’s why we turn them into an object before storing the fixtures and turn
Expand Down Expand Up @@ -65,12 +65,12 @@ function normalize (scenarioState, fixture) {
}

const responses = Array.isArray(fixture.response) ? fixture.response : [fixture.response]
responses.forEach(response => {
await Promise.all(responses.map(async (response) => {
const entityName = toEntityName(response, fixture)
if (entityName) {
require(`./${entityName}`)(scenarioState, response, fixture)
await require(`./${entityName}`)(scenarioState, response, fixture)
}
})
}))

// remove headers added by proxy
// see https://github.com/octokit/fixtures/pull/20#issuecomment-331558385
Expand All @@ -81,6 +81,10 @@ function normalize (scenarioState, fixture) {
fixture.headers['content-length'] = String(calculateBodyLength(fixture.response))
}

if (fixture.responseIsBinary) {
fixture.headers['content-length'] = Buffer.from(fixture.response, 'hex').length
}

// remove `Transfer-Encoding: chunked` headers:
// https://github.com/octokit/fixtures/issues/97octokit/fixtures#97
if (fixture.headers['transfer-encoding'] === 'chunked') {
Expand Down
7 changes: 7 additions & 0 deletions lib/to-entity-name.js
Original file line number Diff line number Diff line change
@@ -1,9 +1,15 @@
module.exports = toEntityName

function toEntityName (object, fixture) {
// object is binary response, so we check for it above the object check
if (/\/legacy\.(tar\.gz|zip)\/master$/.test(fixture.path)) {
return 'archive'
}

if (typeof object !== 'object') {
return
}

if (object.type === 'Organization') {
return 'organization'
}
Expand Down Expand Up @@ -52,6 +58,7 @@ function toEntityName (object, fixture) {
if (/^\/search\/issues\?/.test(fixture.path)) {
return 'search-issues'
}

if ('errors' in object) {
return 'error'
}
Expand Down

0 comments on commit 6fe9ca6

Please sign in to comment.