Skip to content

Commit

Permalink
feat(backend-bitbucket): Add Git-LFS support (#3118)
Browse files Browse the repository at this point in the history
  • Loading branch information
erezrokah committed Jan 21, 2020
1 parent 0755f90 commit a48c02d
Show file tree
Hide file tree
Showing 36 changed files with 15,343 additions and 5,081 deletions.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

1,036 changes: 622 additions & 414 deletions cypress/fixtures/BitBucket Backend Editorial Workflow__can update an entry.json

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import fixture from './common/media_library';
import { entry1 } from './common/entries';
import * as specUtils from './common/spec_utils';

const backend = 'bitbucket';
const lfs = true;

describe('BitBucket Backend Media Library - Large Media', () => {
let taskResult = { data: {} };

before(() => {
specUtils.before(taskResult, { lfs }, backend);
});

after(() => {
specUtils.after(taskResult, backend);
});

beforeEach(() => {
specUtils.beforeEach(taskResult, backend);
});

afterEach(() => {
specUtils.afterEach(taskResult, backend);
});

fixture({ entries: [entry1], getUser: () => taskResult.data.user });
});
32 changes: 27 additions & 5 deletions cypress/plugins/bitbucket.js
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ function del(token, path) {
});
}

async function prepareTestGitLabRepo() {
async function prepareTestBitBucketRepo({ lfs }) {
const { owner, repo, token } = await getEnvs();

// postfix a random string to avoid collisions
Expand Down Expand Up @@ -113,6 +113,15 @@ async function prepareTestGitLabRepo() {
);
await git.push(['-u', 'origin', 'master']);

if (lfs) {
console.log(`Enabling LFS for repo ${owner}/${repo}`);
await git.addConfig('commit.gpgsign', 'false');
await git.raw(['lfs', 'track', '*.png', '*.jpg']);
await git.add('.gitattributes');
await git.commit('chore: track images files under LFS');
await git.push('origin', 'master');
}

return { owner, repo: testRepoName, tempDir };
}

Expand Down Expand Up @@ -162,12 +171,13 @@ async function resetRepositories({ owner, repo, tempDir }) {
}

async function setupBitBucket(options) {
const { lfs = false, ...rest } = options;
if (process.env.RECORD_FIXTURES) {
console.log('Running tests in "record" mode - live data with be used!');
const [user, repoData] = await Promise.all([getUser(), prepareTestGitLabRepo()]);
const [user, repoData] = await Promise.all([getUser(), prepareTestBitBucketRepo({ lfs })]);

await updateConfig(config => {
merge(config, options, {
merge(config, rest, {
backend: {
repo: `${repoData.owner}/${repoData.repo}`,
},
Expand All @@ -179,7 +189,7 @@ async function setupBitBucket(options) {
console.log('Running tests in "playback" mode - local data with be used');

await updateConfig(config => {
merge(config, options, {
merge(config, rest, {
backend: {
repo: `${BITBUCKET_REPO_OWNER_SANITIZED_VALUE}/${BITBUCKET_REPO_NAME_SANITIZED_VALUE}`,
},
Expand Down Expand Up @@ -225,7 +235,9 @@ const sanitizeString = (str, { owner, repo, token, ownerName }) => {
.replace(
new RegExp('https://secure.gravatar.+?/u/.+?v=\\d', 'g'),
`${FAKE_OWNER_USER.links.avatar.href}`,
);
)
.replace(new RegExp(/\?token=.+?&/g), 'token=fakeToken&')
.replace(new RegExp(/&client=.+?&/g), 'client=fakeClient&');

if (ownerName) {
replaced = replaced.replace(
Expand Down Expand Up @@ -254,6 +266,16 @@ const transformRecordedData = (expectation, toSanitize) => {
}
} else if (httpRequest.body && httpRequest.body.type === 'STRING' && httpRequest.body.string) {
body = httpRequest.body.string;
} else if (
httpRequest.body &&
httpRequest.body.type === 'BINARY' &&
httpRequest.body.base64Bytes
) {
body = {
encoding: 'base64',
content: httpRequest.body.base64Bytes,
contentType: httpRequest.body.contentType,
};
}
return body;
};
Expand Down
38 changes: 31 additions & 7 deletions cypress/support/commands.js
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,25 @@ const matchRoute = (route, fetchArgs) => {
const options = fetchArgs[1];

const method = options && options.method ? options.method : 'GET';
const body = options && options.body;
let body = options && options.body;
let routeBody = route.body;

let bodyMatch = false;
if (routeBody?.encoding === 'base64' && ['File', 'Blob'].includes(body?.constructor.name)) {
const blob = new Blob([Buffer.from(routeBody.content, 'base64')], {
type: routeBody.contentType,
});
// size matching is good enough
bodyMatch = blob.size === body.size;
} else if (routeBody && body?.constructor.name === 'FormData') {
bodyMatch = Array.from(body.entries()).some(([key, value]) => {
const val = typeof value === 'string' ? value : '';
const match = routeBody.includes(key) && routeBody.includes(val);
return match;
});
} else {
bodyMatch = body === routeBody;
}

// use pattern matching for the timestamp parameter
const urlRegex = escapeRegExp(decodeURIComponent(route.url)).replace(
Expand All @@ -43,19 +61,23 @@ const matchRoute = (route, fetchArgs) => {
);

return (
method === route.method &&
body === route.body &&
decodeURIComponent(url).match(new RegExp(`${urlRegex}`))
method === route.method && bodyMatch && decodeURIComponent(url).match(new RegExp(`${urlRegex}`))
);
};

const stubFetch = (win, routes) => {
const fetch = win.fetch;
cy.stub(win, 'fetch').callsFake((...args) => {
const routeIndex = routes.findIndex(r => matchRoute(r, args));
let routeIndex = routes.findIndex(r => matchRoute(r, args));
if (routeIndex >= 0) {
const route = routes.splice(routeIndex, 1)[0];
console.log(`matched ${args[0]} to ${route.url} ${route.method} ${route.status}`);
let route = routes.splice(routeIndex, 1)[0];
const message = `matched ${args[0]} to ${route.url} ${route.method} ${route.status}`;
console.log(message);
if (route.status === 302) {
console.log(`resolving redirect to ${route.headers.Location}`);
routeIndex = routes.findIndex(r => matchRoute(r, [route.headers.Location]));
route = routes.splice(routeIndex, 1)[0];
}

let blob;
if (route.response && route.response.encoding === 'base64') {
Expand All @@ -76,6 +98,8 @@ const stubFetch = (win, routes) => {
} else if (
args[0].includes('api.github.com') ||
args[0].includes('api.bitbucket.org') ||
args[0].includes('bitbucket.org') ||
args[0].includes('api.media.atlassian.com') ||
args[0].includes('gitlab.com') ||
args[0].includes('netlify.com') ||
args[0].includes('s3.amazonaws.com')
Expand Down
2 changes: 2 additions & 0 deletions cypress/utils/mock-server.js
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@ const retrieveRecordedExpectations = async () => {
Host.includes('api.github.com') ||
(Host.includes('gitlab.com') && httpRequest.path.includes('api/v4')) ||
Host.includes('api.bitbucket.org') ||
(Host.includes('bitbucket.org') && httpRequest.path.includes('info/lfs')) ||
Host.includes('api.media.atlassian.com') ||
Host.some(host => host.includes('netlify.com')) ||
Host.some(host => host.includes('s3.amazonaws.com'))
);
Expand Down
100 changes: 100 additions & 0 deletions packages/netlify-cms-backend-bitbucket/src/git-lfs-client.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
import minimatch from 'minimatch';
import { ApiRequest, PointerFile } from 'netlify-cms-lib-util';

type MakeAuthorizedRequest = (req: ApiRequest) => Promise<Response>;

interface LfsBatchAction {
href: string;
header?: { [key: string]: string };
expires_in?: number;
expires_at?: string;
}

interface LfsBatchObject {
oid: string;
size: number;
}

interface LfsBatchObjectUpload extends LfsBatchObject {
actions?: {
upload: LfsBatchAction;
verify?: LfsBatchAction;
};
}

interface LfsBatchObjectError extends LfsBatchObject {
error: {
code: number;
message: string;
};
}

interface LfsBatchUploadResponse {
transfer?: string;
objects: (LfsBatchObjectUpload | LfsBatchObjectError)[];
}

export class GitLfsClient {
private static defaultContentHeaders = {
Accept: 'application/vnd.git-lfs+json',
['Content-Type']: 'application/vnd.git-lfs+json',
};

constructor(
public enabled: boolean,
public rootURL: string,
public patterns: string[],
private makeAuthorizedRequest: MakeAuthorizedRequest,
) {}

matchPath(path: string) {
return this.patterns.some(pattern => minimatch(path, pattern, { matchBase: true }));
}

async uploadResource(pointer: PointerFile, resource: Blob): Promise<string> {
const requests = await this.getResourceUploadRequests([pointer]);
for (const request of requests) {
await this.doUpload(request.actions!.upload, resource);
if (request.actions!.verify) {
await this.doVerify(request.actions!.verify, request);
}
}
return pointer.sha;
}

private async doUpload(upload: LfsBatchAction, resource: Blob) {
await fetch(decodeURI(upload.href), {
method: 'PUT',
body: resource,
headers: upload.header,
});
}
private async doVerify(verify: LfsBatchAction, object: LfsBatchObject) {
this.makeAuthorizedRequest({
url: decodeURI(verify.href),
method: 'POST',
headers: { ...GitLfsClient.defaultContentHeaders, ...verify.header },
body: JSON.stringify({ oid: object.oid, size: object.size }),
});
}

private async getResourceUploadRequests(objects: PointerFile[]): Promise<LfsBatchObjectUpload[]> {
const response = await this.makeAuthorizedRequest({
url: `${this.rootURL}/objects/batch`,
method: 'POST',
headers: GitLfsClient.defaultContentHeaders,
body: JSON.stringify({
operation: 'upload',
transfers: ['basic'],
objects: objects.map(({ sha, ...rest }) => ({ ...rest, oid: sha })),
}),
});
return ((await response.json()) as LfsBatchUploadResponse).objects.filter(object => {
if ('error' in object) {
console.error(object.error);
return false;
}
return object.actions;
});
}
}

0 comments on commit a48c02d

Please sign in to comment.