зеркало из https://github.com/github/docs.git
Fix robots.txt from being self repeating (#16705)
* Fix robots.txt from being self repeating * Add test to make sure robots doesnt have duplicate lines
This commit is contained in:
Родитель
bf99faa184
Коммит
e3cfacd8df
|
@ -4,6 +4,28 @@ const { deprecated } = require('../lib/enterprise-server-releases.js')
|
|||
|
||||
let defaultResponse = 'User-agent: *'
|
||||
|
||||
// Disallow crawling of WIP localized content
|
||||
Object.values(languages)
|
||||
.filter(language => language.wip)
|
||||
.forEach(language => {
|
||||
defaultResponse = defaultResponse.concat(`\nDisallow: /${language.code}\nDisallow: /${language.code}/*\n`)
|
||||
})
|
||||
|
||||
// Disallow crawling of WIP products
|
||||
Object.values(products)
|
||||
.filter(product => product.wip)
|
||||
.forEach(product => {
|
||||
defaultResponse = defaultResponse.concat(`\nDisallow: /*${product.href}\nDisallow: /*/enterprise/*/user${product.href}`)
|
||||
})
|
||||
|
||||
// Disallow crawling of Deprecated enterprise versions
|
||||
deprecated
|
||||
.forEach(version => {
|
||||
defaultResponse = defaultResponse
|
||||
.concat(`\nDisallow: /*/enterprise-server@${version}/*`)
|
||||
.concat(`\nDisallow: /*/enterprise/${version}/*`)
|
||||
})
|
||||
|
||||
const disallowAll = `User-agent: *
|
||||
Disallow: /`
|
||||
|
||||
|
@ -21,27 +43,5 @@ module.exports = function (req, res, next) {
|
|||
return res.send(disallowAll)
|
||||
}
|
||||
|
||||
// Disallow crawling of WIP localized content
|
||||
Object.values(languages)
|
||||
.filter(language => language.wip)
|
||||
.forEach(language => {
|
||||
defaultResponse = defaultResponse.concat(`\nDisallow: /${language.code}\nDisallow: /${language.code}/*\n`)
|
||||
})
|
||||
|
||||
// Disallow crawling of WIP products
|
||||
Object.values(products)
|
||||
.filter(product => product.wip)
|
||||
.forEach(product => {
|
||||
defaultResponse = defaultResponse.concat(`\nDisallow: /*${product.href}\nDisallow: /*/enterprise/*/user${product.href}`)
|
||||
})
|
||||
|
||||
// Disallow crawling of Deprecated enterprise versions
|
||||
deprecated
|
||||
.forEach(version => {
|
||||
defaultResponse = defaultResponse
|
||||
.concat(`\nDisallow: /*/enterprise-server@${version}/*`)
|
||||
.concat(`\nDisallow: /*/enterprise/${version}/*`)
|
||||
})
|
||||
|
||||
return res.send(defaultResponse)
|
||||
}
|
||||
|
|
|
@ -110,4 +110,13 @@ describe('robots.txt', () => {
|
|||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('does not have duplicate lines', () => {
|
||||
const lines = new Set()
|
||||
for (const line of res.text.split('\n')) {
|
||||
if (/^\s*$/.test(line)) continue
|
||||
expect(lines.has(line)).toBe(false)
|
||||
lines.add(line)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
|
Загрузка…
Ссылка в новой задаче