Update tar to 3.0.1, handle stream errors in schema import (#1278)
Fixes #1252 Closes #1279
This commit is contained in:
Родитель
6e880bd930
Коммит
02d2a70771
|
@ -39,7 +39,6 @@
|
|||
"comment-json": "1.1.3",
|
||||
"coveralls": "2.13.1",
|
||||
"deepmerge": "1.3.2",
|
||||
"fstream": "1.0.11",
|
||||
"gfm.css": "1.1.1",
|
||||
"grunt": "1.0.1",
|
||||
"grunt-contrib-clean": "1.1.0",
|
||||
|
@ -66,7 +65,7 @@
|
|||
"sinon": "2.3.1",
|
||||
"webpack": "2.6.1",
|
||||
"webpack-dev-server": "2.4.5",
|
||||
"tar": "2.2.1"
|
||||
"tar": "3.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"ajv": "5.0.1",
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import zlib from 'zlib';
|
||||
|
||||
import commentJson from 'comment-json';
|
||||
import merge from 'deepmerge';
|
||||
|
@ -504,26 +503,69 @@ inner.isBrowserSchema = (path) => {
|
|||
return schemaRegexes.some((re) => re.test(path));
|
||||
};
|
||||
|
||||
export function fetchSchemas({ inputPath, outputPath, version }) {
|
||||
return new Promise((resolve) => {
|
||||
let tarball;
|
||||
/**
|
||||
* Strip a null byte if it's the last character in a string.
|
||||
**/
|
||||
export function stripTrailingNullByte(str) {
|
||||
if (str.indexOf('\u0000') === str.length - 1) {
|
||||
return str.slice(0, -1);
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
||||
function getTarballPath({ inputPath, version }) {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (inputPath) {
|
||||
tarball = fs.createReadStream(inputPath);
|
||||
resolve(inputPath);
|
||||
} else if (version) {
|
||||
tarball = request.get(downloadUrl(version));
|
||||
const url = downloadUrl(version);
|
||||
const tmpPath = path.join('tmp', path.basename(url));
|
||||
request
|
||||
.get(url)
|
||||
.on('error', (error) => {
|
||||
reject(error);
|
||||
})
|
||||
.pipe(fs.createWriteStream(tmpPath))
|
||||
.on('error', (error) => {
|
||||
reject(error);
|
||||
})
|
||||
.on('close', () => {
|
||||
resolve(tmpPath);
|
||||
});
|
||||
} else {
|
||||
reject(new Error('inputPath or version is required'));
|
||||
}
|
||||
tarball
|
||||
.pipe(zlib.createGunzip())
|
||||
// eslint-disable-next-line new-cap
|
||||
.pipe(tar.Parse())
|
||||
.on('entry', (entry) => {
|
||||
if (inner.isBrowserSchema(entry.path)) {
|
||||
const filePath = path.join(outputPath, path.basename(entry.path));
|
||||
entry.pipe(fs.createWriteStream(filePath));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function fetchSchemas({ inputPath, outputPath, version }) {
|
||||
return new Promise((resolve, reject) => {
|
||||
getTarballPath({ inputPath, version })
|
||||
.then((tarballPath) => {
|
||||
const tarball = fs.createReadStream(tarballPath);
|
||||
tarball
|
||||
.pipe(new tar.Parse())
|
||||
.on('entry', (entry) => {
|
||||
// The updated tar library doesn't always strip null bytes from the
|
||||
// end of strings anymore so we get to do that here.
|
||||
const entryPath = stripTrailingNullByte(entry.path);
|
||||
if (inner.isBrowserSchema(entryPath)) {
|
||||
const filePath = path.join(outputPath, path.basename(entryPath));
|
||||
entry.pipe(fs.createWriteStream(filePath));
|
||||
} else {
|
||||
entry.resume();
|
||||
}
|
||||
})
|
||||
.on('error', (error) => {
|
||||
reject(error);
|
||||
})
|
||||
.on('end', () => {
|
||||
fs.unlinkSync(tarballPath);
|
||||
resolve();
|
||||
});
|
||||
})
|
||||
.on('end', () => {
|
||||
resolve();
|
||||
.catch((error) => {
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import fs from 'fs';
|
||||
import fstream from 'fstream';
|
||||
import path from 'path';
|
||||
import zlib from 'zlib';
|
||||
import stream from 'stream';
|
||||
|
||||
import request from 'request';
|
||||
import tar from 'tar';
|
||||
|
@ -21,8 +20,12 @@ import {
|
|||
rewriteKey,
|
||||
rewriteOptionalToRequired,
|
||||
rewriteValue,
|
||||
stripTrailingNullByte,
|
||||
} from 'schema/firefox-schemas-import';
|
||||
|
||||
// Get a reference to unlinkSync so it won't get stubbed later.
|
||||
const { unlinkSync } = fs;
|
||||
|
||||
describe('firefox schema import', () => {
|
||||
let sandbox;
|
||||
|
||||
|
@ -32,7 +35,7 @@ describe('firefox schema import', () => {
|
|||
|
||||
function removeDir(dirPath) {
|
||||
fs.readdirSync(dirPath).forEach(
|
||||
(file) => fs.unlinkSync(path.join(dirPath, file)));
|
||||
(file) => unlinkSync(path.join(dirPath, file)));
|
||||
fs.rmdirSync(dirPath);
|
||||
}
|
||||
|
||||
|
@ -1030,23 +1033,32 @@ describe('firefox schema import', () => {
|
|||
|
||||
describe('fetchSchemas', () => {
|
||||
const outputPath = 'tests/schema/imported';
|
||||
const expectedTarballPath = 'tmp/FIREFOX_AURORA_54_BASE.tar.gz';
|
||||
|
||||
beforeEach(() => {
|
||||
assert.notOk(
|
||||
fs.existsSync(expectedTarballPath),
|
||||
`Tar file already exists at ${expectedTarballPath}`);
|
||||
createDir(outputPath);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
assert.notOk(
|
||||
fs.existsSync(expectedTarballPath),
|
||||
`Tar file was not cleaned up ${expectedTarballPath}`);
|
||||
removeDir(outputPath);
|
||||
});
|
||||
|
||||
it('rejects if there is no inputPath or version', () => {
|
||||
return fetchSchemas({}).then(
|
||||
() => assert.ok(false, 'expected rejection'),
|
||||
(err) => assert.equal(err.message, 'inputPath or version is required'));
|
||||
});
|
||||
|
||||
it('downloads the firefox source and extracts the schemas', () => {
|
||||
// eslint-disable-next-line new-cap
|
||||
const packer = tar.Pack({ noProprietary: true });
|
||||
const schemaPath = 'tests/schema/firefox';
|
||||
// eslint-disable-next-line new-cap
|
||||
const tarball = fstream.Reader({ path: schemaPath, type: 'Directory' })
|
||||
.pipe(packer)
|
||||
.pipe(zlib.createGzip());
|
||||
const cwd = 'tests/schema';
|
||||
const schemaPath = 'firefox';
|
||||
const tarball = tar.create({ cwd, gzip: true }, [schemaPath]);
|
||||
sandbox
|
||||
.stub(inner, 'isBrowserSchema')
|
||||
.withArgs('firefox/cookies.json')
|
||||
|
@ -1065,13 +1077,9 @@ describe('firefox schema import', () => {
|
|||
});
|
||||
|
||||
it('extracts the schemas from a local file', () => {
|
||||
// eslint-disable-next-line new-cap
|
||||
const packer = tar.Pack({ noProprietary: true });
|
||||
const schemaPath = 'tests/schema/firefox';
|
||||
// eslint-disable-next-line new-cap
|
||||
const tarball = fstream.Reader({ path: schemaPath, type: 'Directory' })
|
||||
.pipe(packer)
|
||||
.pipe(zlib.createGzip());
|
||||
const cwd = 'tests/schema';
|
||||
const schemaPath = 'firefox';
|
||||
const tarball = tar.create({ cwd, gzip: true }, [schemaPath]);
|
||||
sandbox
|
||||
.stub(inner, 'isBrowserSchema')
|
||||
.withArgs('firefox/cookies.json')
|
||||
|
@ -1082,12 +1090,91 @@ describe('firefox schema import', () => {
|
|||
.stub(fs, 'createReadStream')
|
||||
.withArgs('mozilla-central.tgz')
|
||||
.returns(tarball);
|
||||
sandbox
|
||||
.stub(fs, 'unlinkSync')
|
||||
.withArgs('mozilla-central.tgz')
|
||||
.returns(undefined);
|
||||
assert.deepEqual(fs.readdirSync(outputPath), []);
|
||||
return fetchSchemas({ inputPath: 'mozilla-central.tgz', outputPath })
|
||||
.then(() => {
|
||||
assert.deepEqual(fs.readdirSync(outputPath), ['manifest.json']);
|
||||
});
|
||||
});
|
||||
|
||||
it('handles errors when parsing the tarball', () => {
|
||||
const cwd = 'tests/schema';
|
||||
const schemaPath = 'firefox';
|
||||
const tarball = tar.create({ cwd, gzip: true }, [schemaPath]);
|
||||
sandbox
|
||||
.stub(fs, 'createReadStream')
|
||||
.withArgs('mozilla-central.tgz')
|
||||
.returns(tarball);
|
||||
const extractedStream = new stream.Duplex({
|
||||
read() {
|
||||
this.emit('error', new Error('stream error'));
|
||||
},
|
||||
});
|
||||
sandbox
|
||||
.stub(tar, 'Parse')
|
||||
.returns(extractedStream);
|
||||
assert.deepEqual(fs.readdirSync(outputPath), []);
|
||||
return fetchSchemas({ inputPath: 'mozilla-central.tgz', outputPath })
|
||||
.then(() => {
|
||||
assert.notOk(true, 'unexpected success');
|
||||
}, () => {
|
||||
assert.ok(true, 'error was propagated');
|
||||
});
|
||||
});
|
||||
|
||||
it('handles errors when downloading', () => {
|
||||
const mockStream = new stream.Readable({
|
||||
read() {
|
||||
this.emit('error', new Error('stream error'));
|
||||
},
|
||||
});
|
||||
sandbox
|
||||
.stub(request, 'get')
|
||||
.withArgs('https://hg.mozilla.org/mozilla-central/archive/FIREFOX_AURORA_54_BASE.tar.gz')
|
||||
.returns(mockStream);
|
||||
assert.deepEqual(fs.readdirSync(outputPath), []);
|
||||
return fetchSchemas({ version: 54, outputPath })
|
||||
.then(() => {
|
||||
assert.notOk(true, 'unexpected success');
|
||||
}, () => {
|
||||
// Manually remove the tar file since it doesn't get cleaned up.
|
||||
fs.unlinkSync('tmp/FIREFOX_AURORA_54_BASE.tar.gz');
|
||||
assert.ok(true, 'error was propagated');
|
||||
});
|
||||
});
|
||||
|
||||
it('handles errors when writing the download', () => {
|
||||
const cwd = 'tests/schema';
|
||||
const schemaPath = 'firefox';
|
||||
const tarball = tar.create({ cwd, gzip: true }, [schemaPath]);
|
||||
sandbox
|
||||
.stub(request, 'get')
|
||||
.withArgs('https://hg.mozilla.org/mozilla-central/archive/FIREFOX_AURORA_54_BASE.tar.gz')
|
||||
.returns(tarball);
|
||||
const mockStream = new stream.Duplex({
|
||||
read() {
|
||||
this.emit('error', new Error('stream error'));
|
||||
},
|
||||
write() {
|
||||
this.emit('error', new Error('stream error'));
|
||||
},
|
||||
});
|
||||
sandbox
|
||||
.stub(fs, 'createWriteStream')
|
||||
.withArgs('tmp/FIREFOX_AURORA_54_BASE.tar.gz')
|
||||
.returns(mockStream);
|
||||
assert.deepEqual(fs.readdirSync(outputPath), []);
|
||||
return fetchSchemas({ version: 54, outputPath })
|
||||
.then(() => {
|
||||
assert.notOk(true, 'unexpected success');
|
||||
}, () => {
|
||||
assert.ok(true, 'error was propagated');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('isBrowserSchema', () => {
|
||||
|
@ -1289,4 +1376,26 @@ describe('firefox schema import', () => {
|
|||
assert.deepEqual(filterSchemas(schemas), schemas);
|
||||
});
|
||||
});
|
||||
|
||||
describe('stripTrailingNullByte', () => {
|
||||
it('strips a trailing null byte if present at the end', () => {
|
||||
const str = 'foo\u0000';
|
||||
assert.equal(stripTrailingNullByte(str), 'foo');
|
||||
});
|
||||
|
||||
it('returns the string unchanged if not present', () => {
|
||||
const str = 'bar';
|
||||
assert.strictEqual(stripTrailingNullByte(str), str);
|
||||
});
|
||||
|
||||
it('returns the string unchanged if not at the end', () => {
|
||||
const str = 'b\u0000az';
|
||||
assert.strictEqual(stripTrailingNullByte(str), str);
|
||||
});
|
||||
|
||||
it('handles empty strings', () => {
|
||||
const str = '';
|
||||
assert.strictEqual(stripTrailingNullByte(str), str);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
Загрузка…
Ссылка в новой задаче