Merge pull request #1 from TheNewSource/add-tests-and-ci

G Add tests and ci
This commit is contained in:
David Thetford 2018-01-17 14:56:22 -06:00 коммит произвёл GitHub
Родитель 6f2b4f58dd b73619ce87
Коммит a351d36511
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
10 изменённых файлов: 1827 добавлений и 12823 удалений

19
.travis.yml Normal file
Просмотреть файл

@ -0,0 +1,19 @@
dist: trusty
sudo: false
language: node_js
notifications:
email:
on_success: never
on_failure: never
branches:
only:
- master
node_js:
- 8
addons:
chrome: beta

Просмотреть файл

@ -1,13 +1,21 @@
# lightcrawler
Crawl a website and run it through Google lighthouse
Crawl a website and run it through [Google Lighthouse](https://github.com/GoogleChrome/lighthouse).
## Installing
```bash
npm install --save-dev lightcrawler
```
## Running
```bash
lightcrawler --url https://atom.io/ --config lightcrawler-config.json
```
where `lightcrawler-config.json` looks something like this:
```json
{
"extends": "lighthouse:default",
@ -38,4 +46,18 @@ where `lightcrawler-config.json` looks something like this:
}
```
### Running on CI
You can set this up on Travis with the following `.travis.yml` config:
```yml
dist: trusty
addons:
chrome: beta
```
See https://docs.travis-ci.com/user/gui-and-headless-browsers/#Using-the-Chrome-addon-in-the-headless-mode
for more details.
Enjoy!

Просмотреть файл

@ -4,7 +4,8 @@ const Crawler = require('simplecrawler')
const path = require('path')
const queue = require('async/queue')
const fs = require('fs')
const colors = require('colors')
require('colors')
const stats = {
pageCount: 0,
@ -34,11 +35,11 @@ module.exports = (options) => {
return links
}
let totalErrorCount = 0
let lighthouseFailed = false
const lighthouseQueue = queue((url, callback) => {
runLighthouse(url, configPath, (errorCount) => {
totalErrorCount += errorCount
runLighthouse(url, configPath, (error) => {
if (error != null) lighthouseFailed = true
callback()
})
}, config.settings.crawler.maxChromeInstances)
@ -49,7 +50,7 @@ module.exports = (options) => {
crawler.once('complete', () => {
lighthouseQueue.drain = () => {
printStats()
if (totalErrorCount > 0) {
if (lighthouseFailed) {
process.exit(1)
}
}
@ -89,7 +90,7 @@ function runLighthouse (url, configPath, callback) {
report = JSON.parse(output)
} catch (parseError) {
console.error(`Parsing JSON report output failed: ${output}`)
callback(1)
callback(parseError)
return
}
@ -100,8 +101,8 @@ function runLighthouse (url, configPath, callback) {
stats.passedAuditsCount++
} else {
if (!displayedCategory) {
console.log();
console.log(category.name.bold.underline);
console.log()
console.log(category.name.bold.underline)
displayedCategory = true
}
errorCount++
@ -146,28 +147,32 @@ function runLighthouse (url, configPath, callback) {
})
})
callback(errorCount)
if (errorCount > 0) {
callback(new Error(`Lighthouse violations: ${errorCount}`))
} else {
callback()
}
})
}
function printStats() {
console.log();
console.log();
console.log('Lighthouse Summary'.bold.underline);
console.log(` Total Pages Scanned: ${stats.pageCount}`);
console.log(` Total Auditing Time: ${new Date() - stats.startTime} ms`);
function printStats () {
console.log()
console.log()
console.log('Lighthouse Summary'.bold.underline)
console.log(` Total Pages Scanned: ${stats.pageCount}`)
console.log(` Total Auditing Time: ${new Date() - stats.startTime} ms`)
const totalTime = Object.keys(stats.auditTimesByPageUrl).reduce((sum, url) => {
const {endTime, startTime} = stats.auditTimesByPageUrl[url]
return (endTime - startTime) + sum
}, 0)
console.log(` Average Page Audit Time: ${Math.round(totalTime/stats.pageCount)} ms`);
console.log(` Total Audits Passed: ${stats.passedAuditsCount}`, '\u2713'.green);
console.log(` Average Page Audit Time: ${Math.round(totalTime / stats.pageCount)} ms`)
console.log(` Total Audits Passed: ${stats.passedAuditsCount}`, '\u2713'.green)
if (Object.keys(stats.violationCounts).length === 0) {
console.log(` Total Violations: None! \\o/ 🎉`);
console.log(` Total Violations: None! \\o/ 🎉`)
} else {
console.log(` Total Violations:`);
console.log(` Total Violations:`)
Object.keys(stats.violationCounts).forEach(category => {
console.log(` ${category}: ${stats.violationCounts[category]}`, '\u2717'.red);
console.log(` ${category}: ${stats.violationCounts[category]}`, '\u2717'.red)
})
}
}

1859
package-lock.json сгенерированный

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,10 +1,11 @@
{
"name": "lightcrawler",
"version": "0.0.9",
"description": "",
"description": "Crawl sites and run pages through Google Lighthouse",
"repository": "https://github.com/github/lighthouse",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
"test": "mocha test && standard --env mocha"
},
"bin": {
"lightcrawler": "./cli.js"
@ -19,5 +20,15 @@
"lighthouse": "^2.1.0",
"simplecrawler": "^1.1.3",
"yargs": "^8.0.2"
}
},
"devDependencies": {
"express": "^4.15.3",
"get-port": "^3.1.0",
"mocha": "^3.4.2",
"standard": "^10.0.2"
},
"files": [
"cli.js",
"index.js"
]
}

12568
report.json

Различия файлов скрыты, потому что одна или несколько строк слишком длинны

85
test/cli-test.js Normal file
Просмотреть файл

@ -0,0 +1,85 @@
const assert = require('assert')
const ChildProcess = require('child_process')
const path = require('path')
const express = require('express')
const getPort = require('get-port')
describe('lightcrawler CLI', function () {
this.timeout(60000)
let server = null
let pagesURL = null
beforeEach(async function () {
const port = await getPort(4000)
const app = express()
pagesURL = `http://localhost:${port}/pages`
app.use('/pages', express.static(path.join(__dirname, 'fixtures', 'pages')))
return new Promise((resolve, reject) => {
server = app.listen(port, 'localhost', resolve)
})
})
afterEach(function (done) {
server.close(function () {
done()
})
})
describe('when the page has violations', function () {
it('reports failures and exits non-zero', async function () {
const {code, stdout, stderr} = await runLighthouse({
config: path.join(__dirname, 'fixtures', 'config.json'),
url: `${pagesURL}/document-write.html`
})
assert.equal(code, 1)
assert.equal(stderr, '')
assert.equal(stdout.includes('no-document-write'), true, stdout)
assert.equal(stdout.includes('Best Practices: 1'), true, stdout)
})
})
describe('when the page has no violations', function () {
it('exits with zero', async function () {
const {code, stdout, stderr} = await runLighthouse({
config: path.join(__dirname, 'fixtures', 'config.json'),
url: `${pagesURL}/empty.html`
})
assert.equal(code, 0)
assert.equal(stderr, '')
assert.equal(stdout.includes('Total Violations: None'), true, stdout)
})
})
})
function runLighthouse ({config, url}) {
return new Promise((resolve, reject) => {
const args = [
path.join(__dirname, '..', 'cli.js'),
'--config',
config,
'--url',
url
]
const lighthouseProcess = ChildProcess.spawn(process.execPath, args)
let stdout = ''
lighthouseProcess.stdout.on('data', (chunk) => {
stdout += chunk
})
let stderr = ''
lighthouseProcess.stderr.on('data', (chunk) => {
stderr += chunk
})
lighthouseProcess.on('error', (error) => {
reject(error)
})
lighthouseProcess.on('close', (code) => {
stdout = stdout.trim()
stderr = stderr.trim()
resolve({code, stdout, stderr})
})
})
}

12
test/fixtures/config.json поставляемый Normal file
Просмотреть файл

@ -0,0 +1,12 @@
{
"extends": "lighthouse:default",
"settings": {
"crawler": {
"maxDepth": 1,
"maxChromeInstances": 1
},
"onlyAudits": [
"no-document-write"
]
}
}

12
test/fixtures/pages/document-write.html поставляемый Normal file
Просмотреть файл

@ -0,0 +1,12 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title></title>
</head>
<body>
<script>
document.write('hello world')
</script>
</body>
</html>

9
test/fixtures/pages/empty.html поставляемый Normal file
Просмотреть файл

@ -0,0 +1,9 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title></title>
</head>
<body>
</body>
</html>