зеркало из https://github.com/github/docs.git
repo sync
This commit is contained in:
Коммит
82b490ce59
|
@ -1,6 +1,3 @@
|
|||
ALGOLIA_API_KEY=
|
||||
ALGOLIA_APPLICATION_ID=
|
||||
ALLOW_TRANSLATION_COMMITS=
|
||||
EARLY_ACCESS_HOSTNAME=
|
||||
EARLY_ACCESS_SHARED_SECRET=
|
||||
GITHUB_TOKEN=
|
|
@ -7,7 +7,7 @@ labels:
|
|||
assignees: ''
|
||||
---
|
||||
<!--
|
||||
HUBBERS BEWARE! THE GITHUB/DOCS REPO IS PUBLIC TO THE ENTIRE INTERNET. OPEN AN ISSUE IN GITHUB/DOCS-CONTENT https://github.com/github/docs-content/issues/new/choose INSTEAD.
|
||||
HUBBERS BEWARE! THE GITHUB/DOCS REPO IS PUBLIC TO THE ENTIRE INTERNET. OPEN AN ISSUE IN GITHUB/DOCS-CONTENT INSTEAD.
|
||||
-->
|
||||
|
||||
<!--
|
||||
|
|
|
@ -7,7 +7,7 @@ assignees: ''
|
|||
---
|
||||
|
||||
<!--
|
||||
HUBBERS BEWARE! THE GITHUB/DOCS REPO IS PUBLIC TO THE ENTIRE INTERNET. OPEN AN ISSUE IN GITHUB/DOCS-CONTENT https://github.com/github/docs-content/issues/new/choose INSTEAD.
|
||||
HUBBERS BEWARE! THE GITHUB/DOCS REPO IS PUBLIC TO THE ENTIRE INTERNET. OPEN AN ISSUE IN GITHUB/DOCS-CONTENT INSTEAD.
|
||||
-->
|
||||
|
||||
<!--
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
name: Close unwanted pull requests
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/**'
|
||||
- '.github/CODEOWNERS'
|
||||
- 'translations/**'
|
||||
- 'assets/fonts/**'
|
||||
- 'data/graphql/**'
|
||||
- 'lib/graphql/**'
|
||||
- 'lib/redirects/**'
|
||||
- 'lib/webhooks/**'
|
||||
jobs:
|
||||
close_unwanted_pull_requests:
|
||||
if: github.repository == 'github/docs' && github.event.pull_request.user.login != 'Octomerger'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/github-script@626af12fe9a53dc2972b48385e7fe7dec79145c9
|
||||
with:
|
||||
script: |
|
||||
await github.issues.createComment({
|
||||
...context.repo,
|
||||
issue_number: context.payload.pull_request.number,
|
||||
body:
|
||||
`Thanks for contributing! We do not accept community changes to these files at this time.
|
||||
- '.github/workflows/**'
|
||||
- '.github/CODEOWNERS'
|
||||
- 'translations/**'
|
||||
- 'assets/fonts/**'
|
||||
- 'data/graphql/**'
|
||||
- 'lib/graphql/**'
|
||||
- 'lib/redirects/**'
|
||||
- 'lib/webhooks/**'`
|
||||
})
|
||||
await github.issues.update({
|
||||
...context.repo,
|
||||
issue_number: context.payload.pull_request.number,
|
||||
state: 'closed'
|
||||
})
|
|
@ -37,4 +37,4 @@ jobs:
|
|||
if: failure()
|
||||
env:
|
||||
SLACK_WEBHOOK: ${{ secrets.DOCS_ALERTS_SLACK_WEBHOOK }}
|
||||
SLACK_MESSAGE: The last Algolia workflow run for ${{github.repository}} failed. See https://github.com/github/docs-internal/actions?query=workflow%3AAlgolia
|
||||
SLACK_MESSAGE: The last Algolia workflow run for ${{github.repository}} failed. Search actions for `workflow:Algolia`
|
||||
|
|
|
@ -27,7 +27,7 @@ jobs:
|
|||
with:
|
||||
cancel_others: 'false'
|
||||
github_token: ${{ github.token }}
|
||||
paths: '[".github/workflows/test.yml",".node-version", ".npmrc", "app.json", "content/**", "data/**","lib/**", "Dockerfile", "feature-flags.json", "Gemfile", "Gemfile.lock", "middleware/**", "node_modules/**","package.json", "package-lock.json", "server.js", "tests/**", "translations/**", "Procfile", "webpack.config.js"]'
|
||||
paths: '[".github/workflows/test.yml", ".node-version", ".npmrc", "app.json", "content/**", "data/**","lib/**", "Dockerfile", "feature-flags.json", "Gemfile", "Gemfile.lock", "middleware/**", "node_modules/**","package.json", "package-lock.json", "server.js", "tests/**", "translations/**", "Procfile", "webpack.config.js"]'
|
||||
|
||||
test:
|
||||
needs: see_if_should_skip
|
||||
|
@ -44,6 +44,9 @@ jobs:
|
|||
- if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
|
||||
name: Check out repo
|
||||
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
|
||||
with:
|
||||
# Enables cloning the Early Access repo later with the relevant PAT
|
||||
persist-credentials: 'false'
|
||||
|
||||
- if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
|
||||
name: Setup node
|
||||
|
@ -70,8 +73,15 @@ jobs:
|
|||
name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
|
||||
name: Run build script
|
||||
- name: Clone early access
|
||||
if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' && github.repository == 'github/docs-internal' }}
|
||||
run: npm run heroku-postbuild
|
||||
env:
|
||||
DOCUBOT_REPO_PAT: ${{ secrets.DOCUBOT_REPO_PAT }}
|
||||
GIT_BRANCH: ${{ github.ref }}
|
||||
|
||||
- name: Run build script
|
||||
if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' && github.repository != 'github/docs-internal' }}
|
||||
run: npm run build
|
||||
|
||||
- if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
|
||||
|
@ -79,10 +89,3 @@ jobs:
|
|||
run: npx jest tests/${{ matrix.test-group }}/
|
||||
env:
|
||||
NODE_OPTIONS: '--max_old_space_size=4096'
|
||||
|
||||
- name: Send Slack notification if workflow fails
|
||||
uses: rtCamp/action-slack-notify@e17352feaf9aee300bf0ebc1dfbf467d80438815
|
||||
if: failure() && github.ref == 'early-access'
|
||||
env:
|
||||
SLACK_WEBHOOK: ${{ secrets.DOCS_ALERTS_SLACK_WEBHOOK }}
|
||||
SLACK_MESSAGE: 'Tests are failing on the `early-access` branch. https://github.com/github/docs-internal/tree/early-access'
|
||||
|
|
|
@ -1,9 +1,17 @@
|
|||
.algolia-cache
|
||||
.DS_Store
|
||||
.env
|
||||
node_modules
|
||||
/node_modules/
|
||||
npm-debug.log
|
||||
coverage
|
||||
coverage/
|
||||
.linkinator
|
||||
broken_links.md
|
||||
/assets/images/early-access
|
||||
/content/early-access
|
||||
/data/early-access
|
||||
dist
|
||||
|
||||
# blc: broken link checker
|
||||
blc_output.log
|
||||
blc_output_internal.log
|
||||
/dist/
|
||||
broken_links.md
|
||||
|
|
|
@ -1,16 +0,0 @@
|
|||
## Importing Aftermarket Octicons
|
||||
|
||||
#### Background
|
||||
Some octicons are missing from the project's current version of the `octicons` gem. Because this project is being replaced soon and updating `octicons` would require [significant changes](https://github.com/github/docs-internal/issues/6250#issuecomment-339730405), new octicons should be added manually as needed, via the following process:
|
||||
|
||||
#### How to add
|
||||
|
||||
1. Locate the missing octicon `.svg` in [primer/octions](https://github.com/primer/octicons/tree/master/lib/svg) and download it to your local `app/assets/images/octions` folder
|
||||
1. Add a line to `app/assets/stylesheets/shared/_octicons.scss` for the new octicon, like so:
|
||||
```apple css
|
||||
.octicon-<YOUR OCTICON NAME>:before {
|
||||
content: url("#{$new-octicons-path}/<YOUR OCTICON FILENAME>.svg")
|
||||
}
|
||||
```
|
||||
|
||||
You may now use the new octicon in your content as normal! :tada:
|
|
@ -46,16 +46,16 @@ typing `git remote -v`:
|
|||
```shell
|
||||
$ git remote -v
|
||||
# View existing remotes
|
||||
> origin https://github.com/github/reactivecocoa.git (fetch)
|
||||
> origin https://github.com/github/reactivecocoa.git (push)
|
||||
> origin https://github.com/ghost/reactivecocoa.git (fetch)
|
||||
> origin https://github.com/ghost/reactivecocoa.git (push)
|
||||
|
||||
$ git remote set-url origin https://github.com/github/ReactiveCocoa.git
|
||||
$ git remote set-url origin https://github.com/ghost/ReactiveCocoa.git
|
||||
# Change the 'origin' remote's URL
|
||||
|
||||
$ git remote -v
|
||||
# Verify new remote URL
|
||||
> origin https://github.com/github/ReactiveCocoa.git (fetch)
|
||||
> origin https://github.com/github/ReactiveCocoa.git (push)
|
||||
> origin https://github.com/ghost/ReactiveCocoa.git (fetch)
|
||||
> origin https://github.com/ghost/ReactiveCocoa.git (push)
|
||||
```
|
||||
|
||||
Alternatively, you can change the URL through our
|
||||
|
|
|
@ -298,7 +298,7 @@ You can quickly analyze small portions of a monorepo when you modify code in spe
|
|||
|
||||
If your workflow for {% data variables.product.prodname_code_scanning %} accesses a private repository, other than the repository that contains the workflow, you'll need to configure Git to authenticate with a personal access token. Define the secret in the runner environment by using `jobs.<job_id>.steps.env` in your workflow before any {% data variables.product.prodname_codeql %} actions. For more information, see "[Creating a personal access token for the command line](/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line)" and "[Creating and storing encrypted secrets](/actions/configuring-and-managing-workflows/creating-and-storing-encrypted-secrets)."
|
||||
|
||||
For example, the following configuration has Git replace the full URLs to the `github/foo`, `github/bar`, and `github/baz` repositories on {% data variables.product.prodname_dotcom_the_website %} with URLs that include the personal access token that you store in the `ACCESS_TOKEN` environment variable.
|
||||
For example, the following configuration has Git replace the full URLs to the `ghost/foo`, `ghost/bar`, and `ghost/baz` repositories on {% data variables.product.prodname_dotcom_the_website %} with URLs that include the personal access token that you store in the `ACCESS_TOKEN` environment variable.
|
||||
|
||||
{% raw %}
|
||||
```yaml
|
||||
|
@ -307,9 +307,9 @@ steps:
|
|||
env:
|
||||
TOKEN: ${{ secrets.ACCESS_TOKEN }}
|
||||
run: |
|
||||
git config --global url."https://${TOKEN}@github.com/github/foo".insteadOf "https://github.com/github/foo"
|
||||
git config --global url."https://${TOKEN}@github.com/github/bar".insteadOf "https://github.com/github/bar"
|
||||
git config --global url."https://${TOKEN}@github.com/github/baz".insteadOf "https://github.com/github/baz"
|
||||
git config --global url."https://${TOKEN}@github.com/ghost/foo".insteadOf "https://github.com/ghost/foo"
|
||||
git config --global url."https://${TOKEN}@github.com/ghost/bar".insteadOf "https://github.com/ghost/bar"
|
||||
git config --global url."https://${TOKEN}@github.com/ghost/baz".insteadOf "https://github.com/ghost/baz"
|
||||
```
|
||||
{% endraw %}
|
||||
|
||||
|
|
|
@ -109,7 +109,7 @@ A location within a programming artifact, such as a file in the repository or a
|
|||
|
||||
| Name | Description |
|
||||
|----|----|
|
||||
| `artifactLocation.uri`| **Required.** A URI indicating the location of an artifact, usually a file either in the repository or generated during a build. If the URI is relative, it should be relative to the root of the {% data variables.product.prodname_dotcom %} repository being analyzed. For example, main.js or src/script.js are relative to the root of the repository. If the URI is absolute, {% data variables.product.prodname_code_scanning %} can use the URI to checkout the artifact and match up files in the repository. For example, `https://github.com/github/example/blob/00/src/promiseUtils.js`.
|
||||
| `artifactLocation.uri`| **Required.** A URI indicating the location of an artifact, usually a file either in the repository or generated during a build. If the URI is relative, it should be relative to the root of the {% data variables.product.prodname_dotcom %} repository being analyzed. For example, main.js or src/script.js are relative to the root of the repository. If the URI is absolute, {% data variables.product.prodname_code_scanning %} can use the URI to checkout the artifact and match up files in the repository. For example, `https://github.com/ghost/example/blob/00/src/promiseUtils.js`.
|
||||
| `region.startLine` | **Required.** The line number of the first character in the region.
|
||||
| `region.startColumn` | **Required.** The column number of the first character in the region.
|
||||
| `region.endLine` | **Required.** The line number of the last character in the region.
|
||||
|
|
|
@ -27,7 +27,7 @@ If you want to explore repositories about a certain topic, find projects to cont
|
|||
|
||||
The `is:featured` search qualifier will narrow search results to the topics with the most repositories on {% data variables.product.product_name %}. These topics are also featured at https://github.com/topics/.
|
||||
|
||||
The `is:curated` search qualifier will narrow search results to topics that community members have added extra information to. For more information, see the explore repository at https://github.com/github/explore.
|
||||
The `is:curated` search qualifier will narrow search results to topics that community members have added extra information to. For more information, see the [explore repository](https://github.com/github/explore).
|
||||
|
||||
You can filter topics based when they were created using the date parameter and `created:` or based on how many repositories are associated with this topic using `repositories:n`. Both of these qualifiers can use the [greater than and less than range qualifiers](/articles/understanding-the-search-syntax).
|
||||
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
title: API previews
|
||||
intro: You can use API previews to try out new features and provide feedback before these features become official.
|
||||
redirect_from:
|
||||
- /early-access/
|
||||
- /v3/previews
|
||||
versions:
|
||||
free-pro-team: '*'
|
||||
|
@ -60,7 +59,7 @@ Create, list, update, and delete environments for pre-receive hooks.
|
|||
{% if enterpriseServerVersions contains currentVersion and currentVersion ver_lt "enterprise-server@2.22" %}
|
||||
### Integrations
|
||||
|
||||
Manage [integrations](/early-access/integrations/) through the API.
|
||||
Manage [integrations](/v3/integrations) through the API.
|
||||
|
||||
**Custom media type:** `machine-man-preview`
|
||||
**Announced:** [2016-09-14](https://developer.github.com/changes/2016-09-14-Integrations-Early-Access/)
|
||||
|
|
14
crowdin.yml
14
crowdin.yml
|
@ -3,17 +3,19 @@ files:
|
|||
translation: /translations/%locale%/%original_path%/%original_file_name%
|
||||
ignore:
|
||||
- '/content/README.md'
|
||||
- '/content/early-access'
|
||||
- source: /data/**/*.yml
|
||||
translation: /translations/%locale%/%original_path%/%original_file_name%
|
||||
- source: /data/**/*.md
|
||||
translation: /translations/%locale%/%original_path%/%original_file_name%
|
||||
ignore:
|
||||
- 'data/README.md'
|
||||
- 'data/reusables/README.md'
|
||||
- 'data/variables/product.yml'
|
||||
- 'data/variables/README.md'
|
||||
- 'data/graphql'
|
||||
- 'data/products.yml'
|
||||
- '/data/README.md'
|
||||
- '/data/reusables/README.md'
|
||||
- '/data/variables/product.yml'
|
||||
- '/data/variables/README.md'
|
||||
- '/data/early-access'
|
||||
- '/data/graphql'
|
||||
- '/data/products.yml'
|
||||
|
||||
# These end up as env vars used by the GitHub Actions workflow
|
||||
project_id_env: CROWDIN_PROJECT_ID
|
||||
|
|
|
@ -5,14 +5,14 @@ by [automation](../script/graphql/README.md). These files **should not** be edit
|
|||
|
||||
Dotcom source files:
|
||||
```
|
||||
https://github.com/github/github/tree/master/config/schema.docs.graphql
|
||||
https://github.com/github/github/tree/master/config/graphql_previews.yml
|
||||
https://github.com/github/github/tree/master/config/graphql_upcoming_changes.yml
|
||||
config/schema.docs.graphql
|
||||
config/graphql_previews.yml
|
||||
graphql_upcoming_changes.yml
|
||||
```
|
||||
|
||||
Enterprise source files:
|
||||
```
|
||||
https://github.com/github/github/tree/enterprise-VERSION-release/config/schema.docs-enterprise.graphql
|
||||
https://github.com/github/github/tree/enterprise-VERSION-release/config/graphql_previews.enterprise.yml
|
||||
https://github.com/github/github/tree/enterprise-VERSION-release/config/graphql_upcoming_changes.public-enterprise.yml
|
||||
enterprise-VERSION-release/config/schema.docs-enterprise.graphql
|
||||
enterprise-VERSION-release/config/graphql_previews.enterprise.yml
|
||||
enterprise-VERSION-release/config/graphql_upcoming_changes.public-enterprise.yml
|
||||
```
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
1. From the application server, navigate to the latest release of {% data variables.product.prodname_insights %} on the [Releases page](https://github.com/github/insights-releases/releases/latest) for `github/insights-releases`.
|
||||
1. From the application server, navigate to the latest release of {% data variables.product.prodname_insights %} on the Releases page for `github/insights-releases`.
|
||||
2. To download the latest release, under "Assets", click `insights-VERSION.tar.gz`.
|
||||
![Installation assset](/assets/images/help/insights/installation-tgz.png)
|
||||
3. Unzip the directory.
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Use this variable wherever backticks are necessary: https://github.com/github/docs-internal/pull/1176#discussion-diff-19853931
|
||||
# Use this variable wherever backticks are necessary
|
||||
backticks: >-
|
||||
{% if currentVersion == "free-pro-team@latest" %}github.com{% else %}[hostname]{% endif %}
|
||||
|
||||
|
|
|
@ -67,7 +67,7 @@
|
|||
|
||||
<div class="d-block border-top border-gray-light mt-4 markdown-body">
|
||||
{% include helpfulness %}
|
||||
{% include contribution %}
|
||||
{% unless page.hidden %}{% include contribution %}{% endunless %}
|
||||
</div>
|
||||
</article>
|
||||
</main>
|
||||
|
|
|
@ -1,6 +1,10 @@
|
|||
<nav class="breadcrumbs f5" aria-label="Breadcrumb">
|
||||
{% for breadcrumb in breadcrumbs %}
|
||||
{% if page.hidden %}
|
||||
<span class="d-inline-block">{{breadcrumb[1].title}}</span>
|
||||
{% else %}
|
||||
<a title="{{ breadcrumb[0]}}: {{breadcrumb[1].title}}" href="/{{currentLanguage}}{{breadcrumb[1].href}}" class="d-inline-block {% if breadcrumb[1].href == currentPathWithoutLanguage %}text-gray-light{% endif %}">
|
||||
{{breadcrumb[1].title}}</a>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
</nav>
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
<div class="border-bottom border-gray-light no-print">
|
||||
{% unless error == '404' %}
|
||||
{% include header-notification %}
|
||||
{% endunless %}
|
||||
|
||||
<header class="container-xl px-3 px-md-6 pt-3 pb-2 position-relative d-flex flex-justify-between width-full {% if error == '404' %} d-md-none {% endif %}">
|
||||
|
||||
|
|
|
@ -9,7 +9,9 @@
|
|||
{% assign product = siteTree[currentLanguage][currentVersion].products[currentProduct] %}
|
||||
{% include all-products-link %}
|
||||
<li title="{{product.title}}" class="sidebar-product mb-2">
|
||||
{% unless page.hidden %}
|
||||
<a href="/{{currentLanguage}}{{product.href}}" class="pl-4 pr-5 pb-1 f4">{{ product.title }}</a>
|
||||
{% endunless %}
|
||||
</li>
|
||||
<ul class="sidebar-categories list-style-none">
|
||||
{% for category in product.categories %}
|
||||
|
|
|
@ -2,5 +2,4 @@
|
|||
export default function () {
|
||||
// TODO support "Run in Explorer" links in GraphQL guides
|
||||
// will need to handle query params separately from search queries
|
||||
// see JS block at https://github.com/github/internal-developer.github.com/blob/master/assets/javascripts/documentation.js#L230
|
||||
}
|
||||
|
|
|
@ -3,8 +3,10 @@ const loadPages = require('../pages')
|
|||
module.exports = async function findIndexablePages () {
|
||||
const allPages = await loadPages()
|
||||
const indexablePages = allPages
|
||||
// exclude pages that are part of WIP products
|
||||
.filter(page => !page.parentProduct || !page.parentProduct.wip)
|
||||
// exclude hidden pages
|
||||
.filter(page => !page.hidden)
|
||||
// exclude pages that are part of WIP or hidden products
|
||||
.filter(page => !page.parentProduct || !page.parentProduct.wip || page.parentProduct.hidden)
|
||||
// exclude index homepages
|
||||
.filter(page => !page.relativePath.endsWith('index.md'))
|
||||
|
||||
|
|
|
@ -14,14 +14,32 @@ const productsYml = yaml.load(fs.readFileSync(productsFile, 'utf8'))
|
|||
const sortedProductIds = productsYml.productsInOrder
|
||||
|
||||
const contentProductIds = fs.readdirSync(contentDir, { withFileTypes: true })
|
||||
.map(entry => {
|
||||
// `fs.readdir` provides file entries based on `fs.lstat`, which doesn't
|
||||
// resolve symbolic links to their target file/directory. We need to take
|
||||
// an extra step here to resolve the Early Access symlinked directory.
|
||||
const { name } = entry
|
||||
if (entry.isSymbolicLink()) {
|
||||
entry = fs.statSync(path.join(contentDir, entry.name))
|
||||
entry.name = name
|
||||
}
|
||||
return entry
|
||||
})
|
||||
.filter(entry => entry.isDirectory())
|
||||
.map(entry => entry.name)
|
||||
|
||||
assert(difference(sortedProductIds, contentProductIds).length === 0)
|
||||
assert(difference(contentProductIds, sortedProductIds).length === 0)
|
||||
// require the content/<subdir> list to match the list in data/products.yml,
|
||||
// with the exception of content/early-access, which lives in a separate private repo
|
||||
const publicContentProductIds = contentProductIds.filter(id => id !== 'early-access')
|
||||
assert(difference(sortedProductIds, publicContentProductIds).length === 0)
|
||||
assert(difference(publicContentProductIds, sortedProductIds).length === 0)
|
||||
|
||||
const internalProducts = {}
|
||||
|
||||
// add optional early access content dir to sorted products list if present
|
||||
const earlyAccessId = contentProductIds.find(id => id === 'early-access')
|
||||
if (earlyAccessId) sortedProductIds.push(earlyAccessId)
|
||||
|
||||
sortedProductIds.forEach(productId => {
|
||||
const relPath = productId
|
||||
const dir = slash(path.join('content', relPath))
|
||||
|
@ -36,7 +54,8 @@ sortedProductIds.forEach(productId => {
|
|||
href,
|
||||
dir,
|
||||
toc,
|
||||
wip: data.wip || false
|
||||
wip: data.wip || false,
|
||||
hidden: data.hidden || false
|
||||
}
|
||||
|
||||
internalProducts[productId].versions = applicableVersions
|
||||
|
|
|
@ -6,7 +6,7 @@ const s3ConfigPath = path.join(homedir, '.s3cfg')
|
|||
|
||||
// check for config files
|
||||
if (!(fs.existsSync(awsCredsPath) || fs.existsSync(s3ConfigPath))) {
|
||||
console.error('You need to set up awssume and s3cmd. Follow the steps at https://github.com/github/product-documentation/blob/master/doc-team-workflows/workflow-information-for-all-writers/setting-up-awssume-and-s3cmd.md')
|
||||
console.error('You need to set up awssume and s3cmd. Follow the steps at docs-content/doc-team-workflows/workflow-information-for-all-writers/setting-up-awssume-and-s3cmd.md')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
const versionSatisfiesRange = require('./version-satisfies-range')
|
||||
|
||||
// GHES Release Lifecycle Dates:
|
||||
// https://github.com/github/enterprise-releases/blob/master/docs/supported-versions.md#release-lifecycle-dates
|
||||
// enterprise-releases/docs/supported-versions.md#release-lifecycle-dates
|
||||
const dates = require('../lib/enterprise-dates.json')
|
||||
|
||||
const supported = [
|
||||
|
|
|
@ -1,33 +0,0 @@
|
|||
// This module loads an array of Early Access page paths from EARLY_ACCESS_HOSTNAME
|
||||
//
|
||||
// See also middleware/early-acces-proxy.js which fetches Early Access docs from the obscured remote host
|
||||
|
||||
require('dotenv').config()
|
||||
|
||||
const got = require('got')
|
||||
const isURL = require('is-url')
|
||||
|
||||
module.exports = async function fetchEarlyAccessPaths () {
|
||||
let url
|
||||
if (process.env.NODE_ENV === 'test') return []
|
||||
|
||||
if (!isURL(process.env.EARLY_ACCESS_HOSTNAME)) {
|
||||
console.log('EARLY_ACCESS_HOSTNAME is not defined; skipping fetching early access paths')
|
||||
return []
|
||||
}
|
||||
|
||||
try {
|
||||
url = `${process.env.EARLY_ACCESS_HOSTNAME}/early-access-paths.json`
|
||||
const { body } = await got(url, {
|
||||
json: true,
|
||||
timeout: 3000,
|
||||
headers: {
|
||||
'early-access-shared-secret': process.env.EARLY_ACCESS_SHARED_SECRET
|
||||
}
|
||||
})
|
||||
return body
|
||||
} catch (err) {
|
||||
console.error('Unable to fetch early-access-paths.json from', url, err)
|
||||
return []
|
||||
}
|
||||
}
|
|
@ -38,11 +38,9 @@ const schema = {
|
|||
mapTopic: {
|
||||
type: 'boolean'
|
||||
},
|
||||
// The `hidden` frontmatter property is no longer used, but leaving it here
|
||||
// with an enum of `[false]` will help us catch any possible regressions.
|
||||
// allow hidden articles under `early-access`
|
||||
hidden: {
|
||||
type: 'boolean',
|
||||
enum: [false]
|
||||
type: 'boolean'
|
||||
},
|
||||
layout: {
|
||||
type: ['string', 'boolean'],
|
||||
|
@ -109,7 +107,7 @@ function frontmatter (markdown, opts = {}) {
|
|||
const defaults = {
|
||||
schema,
|
||||
validateKeyNames: true,
|
||||
validateKeyOrder: false // TODO: enable this once we've sorted all the keys. See https://github.com/github/docs-internal/issues/9658
|
||||
validateKeyOrder: false // TODO: enable this once we've sorted all the keys. See issue 9658
|
||||
}
|
||||
|
||||
return parse(markdown, Object.assign({}, defaults, opts))
|
||||
|
|
|
@ -33,7 +33,7 @@ class Page {
|
|||
this.fullPath = slash(path.join(this.basePath, this.relativePath))
|
||||
this.raw = fs.readFileSync(this.fullPath, 'utf8')
|
||||
|
||||
// TODO remove this when https://github.com/github/crowdin-support/issues/66 has been resolved
|
||||
// TODO remove this when crowdin-support issue 66 has been resolved
|
||||
if (this.languageCode !== 'en' && this.raw.includes(': verdadero')) {
|
||||
this.raw = this.raw.replace(': verdadero', ': true')
|
||||
}
|
||||
|
|
|
@ -25,7 +25,7 @@ module.exports = {
|
|||
searchPath: /\/search(?:\/)?(\?)/,
|
||||
ymd: /^\d{4}-\d{2}-\d{2}$/,
|
||||
hasLiquid: /[{{][{%]/,
|
||||
dataReference: /{% ?data\s(?:reusables|variables|ui)\..*?%}/gm,
|
||||
dataReference: /{% ?data\s(?:early-access\.)?(?:reusables|variables|ui)\..*?%}/gm,
|
||||
imagePath: /\/?assets\/images\/.*?\.(png|svg|gif|pdf|ico|jpg|jpeg)/gi,
|
||||
homepagePath: /^\/\w{2}$/, // /en, /ja, /cn
|
||||
multipleSlashes: /^\/{2,}/,
|
||||
|
|
|
@ -1,15 +1,14 @@
|
|||
const statsd = require('./statsd')
|
||||
const fetchEarlyAccessPaths = require('./fetch-early-access-paths')
|
||||
const loadPages = require('./pages')
|
||||
const loadRedirects = require('./redirects/precompile')
|
||||
const loadSiteData = require('./site-data')
|
||||
const loadSiteTree = require('./site-tree')
|
||||
|
||||
// For local caching
|
||||
let pages, site, redirects, siteTree, earlyAccessPaths
|
||||
let pages, site, redirects, siteTree
|
||||
|
||||
function isFullyWarmed () {
|
||||
return Boolean(pages && site && earlyAccessPaths && redirects && siteTree)
|
||||
return Boolean(pages && site && redirects && siteTree)
|
||||
}
|
||||
|
||||
function getWarmedCache () {
|
||||
|
@ -17,8 +16,7 @@ function getWarmedCache () {
|
|||
pages,
|
||||
site,
|
||||
redirects,
|
||||
siteTree,
|
||||
earlyAccessPaths
|
||||
siteTree
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -29,12 +27,11 @@ async function warmServer () {
|
|||
console.log('Priming context information...')
|
||||
}
|
||||
|
||||
if (!pages || !site || !earlyAccessPaths) {
|
||||
if (!pages || !site) {
|
||||
// Promise.all is used to load multiple things in parallel
|
||||
[pages, site, earlyAccessPaths] = await Promise.all([
|
||||
[pages, site] = await Promise.all([
|
||||
pages || loadPages(),
|
||||
site || loadSiteData(),
|
||||
earlyAccessPaths || fetchEarlyAccessPaths()
|
||||
site || loadSiteData()
|
||||
])
|
||||
}
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ const got = require('got')
|
|||
|
||||
// This module handles requests for the CSS and JS assets for
|
||||
// deprecated GitHub Enterprise versions by routing them to static content in
|
||||
// https://github.com/github/help-docs-archived-enterprise-versions
|
||||
// help-docs-archived-enterprise-versions
|
||||
//
|
||||
// See also ./archived-enterprise-versions.js for non-CSS/JS paths
|
||||
|
||||
|
|
|
@ -8,8 +8,7 @@ const got = require('got')
|
|||
const findPage = require('../lib/find-page')
|
||||
|
||||
// This module handles requests for deprecated GitHub Enterprise versions
|
||||
// by routing them to static content in
|
||||
// https://github.com/github/help-docs-archived-enterprise-versions
|
||||
// by routing them to static content in help-docs-archived-enterprise-versions
|
||||
|
||||
module.exports = async (req, res, next) => {
|
||||
const { isArchived, requestedVersion } = isArchivedVersion(req)
|
||||
|
|
|
@ -2,7 +2,7 @@ const languages = require('../lib/languages')
|
|||
const enterpriseServerReleases = require('../lib/enterprise-server-releases')
|
||||
const allVersions = require('../lib/all-versions')
|
||||
const allProducts = require('../lib/all-products')
|
||||
const activeProducts = Object.values(allProducts).filter(product => !product.wip)
|
||||
const activeProducts = Object.values(allProducts).filter(product => !product.wip && !product.hidden)
|
||||
const { getVersionStringFromPath, getProductStringFromPath, getPathWithoutLanguage } = require('../lib/path-utils')
|
||||
const productNames = require('../lib/product-names')
|
||||
const warmServer = require('../lib/warm-server')
|
||||
|
@ -12,7 +12,7 @@ const featureFlags = Object.keys(require('../feature-flags'))
|
|||
// Note that additional middleware in middleware/index.js adds to this context object
|
||||
module.exports = async function contextualize (req, res, next) {
|
||||
// Ensure that we load some data only once on first request
|
||||
const { site, redirects, pages, siteTree, earlyAccessPaths } = await warmServer()
|
||||
const { site, redirects, pages, siteTree } = await warmServer()
|
||||
req.context = {}
|
||||
|
||||
// make feature flag environment variables accessible in layouts
|
||||
|
@ -33,7 +33,6 @@ module.exports = async function contextualize (req, res, next) {
|
|||
req.context.currentPath = req.path
|
||||
req.context.query = req.query
|
||||
req.context.languages = languages
|
||||
req.context.earlyAccessPaths = earlyAccessPaths
|
||||
req.context.productNames = productNames
|
||||
req.context.enterpriseServerReleases = enterpriseServerReleases
|
||||
req.context.enterpriseServerVersions = Object.keys(allVersions).filter(version => version.startsWith('enterprise-server@'))
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
module.exports = function earlyAccessContext (req, res, next) {
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
return next(404)
|
||||
}
|
||||
|
||||
// Get a list of all hidden pages per version
|
||||
const earlyAccessPageLinks = req.context.pages
|
||||
.filter(page => page.hidden)
|
||||
// Do not include early access landing page
|
||||
.filter(page => page.relativePath !== 'early-access/index.md')
|
||||
// Create Markdown links
|
||||
.map(page => {
|
||||
return page.permalinks.map(permalink => `- [${permalink.title}](${permalink.href})`)
|
||||
})
|
||||
.flat()
|
||||
// Get links for the current version
|
||||
.filter(link => link.includes(req.context.currentVersion))
|
||||
.sort()
|
||||
|
||||
// Add to the rendering context
|
||||
// This is only used in the separate EA repo on local development
|
||||
req.context.earlyAccessPageLinks = earlyAccessPageLinks.length
|
||||
? earlyAccessPageLinks.join('\n')
|
||||
: '_None for this version!_'
|
||||
|
||||
return next()
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
const { chain } = require('lodash')
|
||||
let paths
|
||||
|
||||
// This middleware finds all pages with `hidden: true` frontmatter
|
||||
// and responds with a JSON array of all requests paths (and redirects) that lead to those pages.
|
||||
|
||||
// Requesting this path from EARLY_ACCESS_HOSTNAME will respond with an array of Early Access paths.
|
||||
// Requesting this path from docs.github.com (production) will respond with an empty array (no Early Access paths).
|
||||
|
||||
module.exports = async (req, res, next) => {
|
||||
if (req.path !== '/early-access-paths.json') return next()
|
||||
|
||||
if (
|
||||
!req.headers ||
|
||||
!req.headers['early-access-shared-secret'] ||
|
||||
req.headers['early-access-shared-secret'] !== process.env.EARLY_ACCESS_SHARED_SECRET
|
||||
) {
|
||||
return res.status(401).send({ error: '401 Unauthorized' })
|
||||
}
|
||||
|
||||
paths = paths || chain(req.context.pages)
|
||||
.filter(page => page.hidden && page.languageCode === 'en')
|
||||
.map(page => {
|
||||
const permalinks = page.permalinks.map(permalink => permalink.href)
|
||||
const redirects = Object.keys(page.redirects)
|
||||
return permalinks.concat(redirects)
|
||||
})
|
||||
.flatten()
|
||||
.uniq()
|
||||
.value()
|
||||
|
||||
return res.json(paths)
|
||||
}
|
|
@ -1,25 +0,0 @@
|
|||
// This module serves requests to Early Access content from a hidden proxy host (EARLY_ACCESS_HOSTNAME).
|
||||
// Paths to this content are fetched in the warmServer module at startup.
|
||||
|
||||
const got = require('got')
|
||||
const isURL = require('is-url')
|
||||
|
||||
module.exports = async (req, res, next) => {
|
||||
if (
|
||||
isURL(process.env.EARLY_ACCESS_HOSTNAME) &&
|
||||
req.context &&
|
||||
req.context.earlyAccessPaths &&
|
||||
req.context.earlyAccessPaths.includes(req.path)
|
||||
) {
|
||||
try {
|
||||
const proxyURL = `${process.env.EARLY_ACCESS_HOSTNAME}${req.path}`
|
||||
const proxiedRes = await got(proxyURL)
|
||||
res.set('content-type', proxiedRes.headers['content-type'])
|
||||
res.send(proxiedRes.body)
|
||||
} catch (err) {
|
||||
next()
|
||||
}
|
||||
} else {
|
||||
next()
|
||||
}
|
||||
}
|
|
@ -62,8 +62,7 @@ module.exports = function (app) {
|
|||
app.use('/csrf', require('./csrf-route'))
|
||||
app.use(require('./archived-enterprise-versions'))
|
||||
app.use(require('./robots'))
|
||||
app.use(require('./early-access-paths'))
|
||||
app.use(require('./early-access-proxy'))
|
||||
app.use(/(\/.*)?\/early-access$/, require('./contextualizers/early-access-links'))
|
||||
app.use(require('./categories-for-support-team'))
|
||||
app.use(require('./loaderio-verification'))
|
||||
app.get('/_500', asyncMiddleware(require('./trigger-error')))
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
const { get } = require('lodash')
|
||||
const env = require('lil-env-thing')
|
||||
const { liquid } = require('../lib/render-content')
|
||||
const patterns = require('../lib/patterns')
|
||||
const layouts = require('../lib/layouts')
|
||||
|
@ -66,7 +65,7 @@ module.exports = async function renderPage (req, res, next) {
|
|||
}
|
||||
|
||||
// `?json` query param for debugging request context
|
||||
if ('json' in req.query && !env.production) {
|
||||
if ('json' in req.query && process.env.NODE_ENV !== 'production') {
|
||||
if (req.query.json.length > 1) {
|
||||
// deep reference: ?json=page.permalinks
|
||||
return res.json(get(context, req.query.json))
|
||||
|
|
|
@ -13,9 +13,12 @@ Object.values(languages)
|
|||
|
||||
// Disallow crawling of WIP products
|
||||
Object.values(products)
|
||||
.filter(product => product.wip)
|
||||
.filter(product => product.wip || product.hidden)
|
||||
.forEach(product => {
|
||||
defaultResponse = defaultResponse.concat(`\nDisallow: /*${product.href}\nDisallow: /*/enterprise/*/user${product.href}`)
|
||||
defaultResponse = defaultResponse.concat(`\nDisallow: /*${product.href}`)
|
||||
product.versions.forEach(version => {
|
||||
defaultResponse = defaultResponse.concat(`\nDisallow: /*${version}/${product.id}`)
|
||||
})
|
||||
})
|
||||
|
||||
// Disallow crawling of Deprecated enterprise versions
|
||||
|
|
|
@ -8,7 +8,7 @@ ownership:
|
|||
team: github/docs-engineering
|
||||
maintainer: zeke
|
||||
exec_sponsor: danaiszuul
|
||||
product_manager: jwargo
|
||||
product_manager: simpsoka
|
||||
mention: github/docs-engineering
|
||||
qos: critical
|
||||
dependencies: []
|
||||
|
|
|
@ -17412,11 +17412,6 @@
|
|||
"type-check": "~0.3.2"
|
||||
}
|
||||
},
|
||||
"lil-env-thing": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/lil-env-thing/-/lil-env-thing-1.0.0.tgz",
|
||||
"integrity": "sha1-etQmBiG/M1rR6HE1d5s15vFmxns="
|
||||
},
|
||||
"lines-and-columns": {
|
||||
"version": "1.1.6",
|
||||
"resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.1.6.tgz",
|
||||
|
@ -17898,11 +17893,11 @@
|
|||
"integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw=="
|
||||
},
|
||||
"resolve": {
|
||||
"version": "1.18.1",
|
||||
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.18.1.tgz",
|
||||
"integrity": "sha512-lDfCPaMKfOJXjy0dPayzPdF1phampNWr3qFCjAu+rw/qbQmr5jWH5xN2hwh9QKfw9E5v4hwV7A+jrCmL8yjjqA==",
|
||||
"version": "1.19.0",
|
||||
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.19.0.tgz",
|
||||
"integrity": "sha512-rArEXAgsBG4UgRGcynxWIWKFvh/XZCcS8UJdHhwy91zwAvCZIbcs+vAbflgBnNjYMs/i/i+/Ux6IZhML1yPvxg==",
|
||||
"requires": {
|
||||
"is-core-module": "^2.0.0",
|
||||
"is-core-module": "^2.1.0",
|
||||
"path-parse": "^1.0.6"
|
||||
}
|
||||
},
|
||||
|
@ -17912,9 +17907,9 @@
|
|||
"integrity": "sha512-OrOb32TeeambH6UrhtShmF7CRDqhL6/5XpPNp2DuRH6+9QLw/orhp72j87v8Qa1ScDkvrrBNpZcDejAirJmfXQ=="
|
||||
},
|
||||
"type-fest": {
|
||||
"version": "0.18.0",
|
||||
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.18.0.tgz",
|
||||
"integrity": "sha512-fbDukFPnJBdn2eZ3RR+5mK2slHLFd6gYHY7jna1KWWy4Yr4XysHuCdXRzy+RiG/HwG4WJat00vdC2UHky5eKiQ=="
|
||||
"version": "0.18.1",
|
||||
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.18.1.tgz",
|
||||
"integrity": "sha512-OIAYXk8+ISY+qTOwkHtKqzAuxchoMiD9Udx+FSGQDuiRR+PJKJHc2NJAXlbhkGwTt/4/nKZxELY1w3ReWOL8mw=="
|
||||
},
|
||||
"yallist": {
|
||||
"version": "4.0.0",
|
||||
|
|
|
@ -61,7 +61,6 @@
|
|||
"is-url": "^1.2.4",
|
||||
"js-cookie": "^2.2.1",
|
||||
"js-yaml": "^3.14.0",
|
||||
"lil-env-thing": "^1.0.0",
|
||||
"linkinator": "^2.2.2",
|
||||
"liquid": "^5.1.0",
|
||||
"lodash": "^4.17.19",
|
||||
|
@ -169,7 +168,8 @@
|
|||
"check-deps": "node script/check-deps.js",
|
||||
"prevent-pushes-to-main": "node script/prevent-pushes-to-main.js",
|
||||
"pa11y-ci": "pa11y-ci",
|
||||
"pa11y-test": "start-server-and-test browser-test-server 4001 pa11y-ci"
|
||||
"pa11y-test": "start-server-and-test browser-test-server 4001 pa11y-ci",
|
||||
"heroku-postbuild": "node script/early-access/clone-for-build.js && npm run build"
|
||||
},
|
||||
"engines": {
|
||||
"node": "12 - 14"
|
||||
|
|
|
@ -73,7 +73,7 @@ This script is run automatically when you run the server locally. It checks whet
|
|||
|
||||
### [`check-s3-images.js`](check-s3-images.js)
|
||||
|
||||
Run this script in your branch to check whether any images referenced in content are not in an expected S3 bucket. You will need to authenticate to S3 via `awssume` to use this script. Instructions for the one-time setup are [here](https://github.com/github/product-documentation/blob/master/doc-team-workflows/workflow-information-for-all-writers/setting-up-awssume-and-s3cmd.md).
|
||||
Run this script in your branch to check whether any images referenced in content are not in an expected S3 bucket. You will need to authenticate to S3 via `awssume` to use this script.
|
||||
|
||||
---
|
||||
|
||||
|
@ -304,14 +304,14 @@ This script is run as a git precommit hook (installed by husky after npm install
|
|||
|
||||
### [`purge-fastly`](purge-fastly)
|
||||
|
||||
Run this script to manually purge the [Fastly cache](https://github.com/github/docs-internal#fastly-cdn). Note this script requires a `FASTLY_SERVICE_ID` and `FASTLY_TOKEN` in your `.env` file.
|
||||
Run this script to manually purge the Fastly cache. Note this script requires a `FASTLY_SERVICE_ID` and `FASTLY_TOKEN` in your `.env` file.
|
||||
|
||||
---
|
||||
|
||||
|
||||
### [`purge-fastly-by-url.js`](purge-fastly-by-url.js)
|
||||
|
||||
Run this script to manually purge the [Fastly cache](https://github.com/github/docs-internal#fastly-cdn) for all language variants of a single URL or for a batch of URLs in a file. This script does not require authentication.
|
||||
Run this script to manually purge the Fastly cache for all language variants of a single URL or for a batch of URLs in a file. This script does not require authentication.
|
||||
|
||||
---
|
||||
|
||||
|
@ -362,11 +362,11 @@ Examples:
|
|||
|
||||
reset a single translated file using a relative path: $ script/reset-translated-file.js translations/es-XL/content/actions/index.md
|
||||
|
||||
reset a single translated file using a full path: $ script/reset-translated-file.js /Users/z/git/github/docs-internal/translations/es-XL/content/actions/index.md
|
||||
reset a single translated file using a full path: $ script/reset-translated-file.js /Users/z/git/github/docs/translations/es-XL/content/actions/index.md
|
||||
|
||||
reset all language variants of a single English file (using a relative path): $ script/reset-translated-file.js content/actions/index.md $ script/reset-translated-file.js data/ui.yml
|
||||
|
||||
reset all language variants of a single English file (using a full path): $ script/reset-translated-file.js /Users/z/git/github/docs-internal/content/desktop/index.md $ script/reset-translated-file.js /Users/z/git/github/docs-internal/data/ui.yml
|
||||
reset all language variants of a single English file (using a full path): $ script/reset-translated-file.js /Users/z/git/github/docs/content/desktop/index.md $ script/reset-translated-file.js /Users/z/git/github/docs/data/ui.yml
|
||||
|
||||
---
|
||||
|
||||
|
@ -422,7 +422,7 @@ Starts the local development server with all of the available languages enabled.
|
|||
|
||||
### [`standardize-frontmatter-order.js`](standardize-frontmatter-order.js)
|
||||
|
||||
Run this script to standardize frontmatter fields in all content files, per the order decided in https://github.com/github/docs-internal/issues/9658#issuecomment-485536265.
|
||||
Run this script to standardize frontmatter fields in all content files.
|
||||
|
||||
---
|
||||
|
||||
|
@ -443,7 +443,7 @@ List all the TODOs in our JavaScript files and stylesheets.
|
|||
|
||||
### [`update-enterprise-dates.js`](update-enterprise-dates.js)
|
||||
|
||||
Run this script during Enterprise releases and deprecations. It uses the GitHub API to get dates from [`enterprise-releases`](https://github.com/github/enterprise-releases/blob/master/releases.json) and updates `lib/enterprise-dates.json`. The help site uses this JSON to display dates at the top of some Enterprise versions.
|
||||
Run this script during Enterprise releases and deprecations. It uses the GitHub API to get dates from `enterprise-releases` and updates `lib/enterprise-dates.json`. The help site uses this JSON to display dates at the top of some Enterprise versions.
|
||||
|
||||
This script requires that you have a GitHub Personal Access Token in a `.env` file. If you don't have a token, get one [here](https://github.com/settings/tokens/new?scopes=repo&description=docs-dev). If you don't have an `.env` file in your docs checkout, run this command in Terminal:
|
||||
|
||||
|
@ -465,7 +465,7 @@ This script crawls the script directory, hooks on special comment markers in eac
|
|||
|
||||
### [`update-s3cmd-config.js`](update-s3cmd-config.js)
|
||||
|
||||
This script is used by other scripts to update temporary AWS credentials and authenticate to S3. See docs at [Setting up awssume and S3cmd](https://github.com/github/product-documentation/tree/master/doc-team-workflows/workflow-information-for-all-writers/setting-up-awssume-and-s3cmd.md).
|
||||
This script is used by other scripts to update temporary AWS credentials and authenticate to S3.
|
||||
|
||||
---
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ const versionsToCheck = Object.keys(allVersions)
|
|||
//
|
||||
// Run this script in your branch to check whether any images referenced in content are
|
||||
// not in an expected S3 bucket. You will need to authenticate to S3 via `awssume` to use this script.
|
||||
// Instructions for the one-time setup are [here](https://github.com/github/product-documentation/blob/master/doc-team-workflows/workflow-information-for-all-writers/setting-up-awssume-and-s3cmd.md).
|
||||
// Instructions for the one-time setup are at docs-content/doc-team-workflows/workflow-information-for-all-writers/setting-up-awssume-and-s3cmd.md
|
||||
//
|
||||
// [end-readme]
|
||||
|
||||
|
|
|
@ -0,0 +1,130 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
// [start-readme]
|
||||
//
|
||||
// This script is run as a postbuild script during staging and deployments on Heroku. It clones a branch
|
||||
// in the early-access repo that matches the current branch in the docs repo; if one can't be found, it
|
||||
// clones the `main` branch.
|
||||
//
|
||||
// [end-readme]
|
||||
|
||||
require('dotenv').config()
|
||||
const {
|
||||
DOCUBOT_REPO_PAT,
|
||||
HEROKU_PRODUCTION_APP,
|
||||
GIT_BRANCH // Set by the deployer with the name of the docs-internal branch
|
||||
} = process.env
|
||||
|
||||
// Exit if PAT is not found
|
||||
if (!DOCUBOT_REPO_PAT) {
|
||||
console.log('Skipping early access, not authorized')
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
const { execSync } = require('child_process')
|
||||
const rimraf = require('rimraf').sync
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const os = require('os')
|
||||
const EA_PRODUCTION_BRANCH = 'main'
|
||||
|
||||
// If a branch name is not provided in the environment, attempt to get
|
||||
// the local branch name; or default to 'main'
|
||||
let currentBranch = (GIT_BRANCH || '').replace(/^refs\/heads\//, '')
|
||||
if (!currentBranch) {
|
||||
try {
|
||||
currentBranch = execSync('git branch --show-current').toString()
|
||||
} catch (err) {
|
||||
// Ignore but log
|
||||
console.warn('Error checking for local branch:', err.message)
|
||||
}
|
||||
}
|
||||
if (!currentBranch) {
|
||||
currentBranch = EA_PRODUCTION_BRANCH
|
||||
}
|
||||
|
||||
// Early Access details
|
||||
const earlyAccessOwner = 'github'
|
||||
const earlyAccessRepoName = 'docs-early-access'
|
||||
const earlyAccessDirName = 'early-access'
|
||||
const earlyAccessFullRepo = `https://${DOCUBOT_REPO_PAT}@github.com/${earlyAccessOwner}/${earlyAccessRepoName}`
|
||||
|
||||
const earlyAccessCloningParentDir = os.tmpdir()
|
||||
const earlyAccessCloningDir = path.join(earlyAccessCloningParentDir, earlyAccessRepoName)
|
||||
|
||||
const destinationDirNames = ['content', 'data', 'assets/images']
|
||||
const destinationDirsMap = destinationDirNames
|
||||
.reduce(
|
||||
(map, dirName) => {
|
||||
map[dirName] = path.join(process.cwd(), dirName, earlyAccessDirName)
|
||||
return map
|
||||
},
|
||||
{}
|
||||
)
|
||||
|
||||
// Production vs. staging environment
|
||||
// TODO test that this works as expected
|
||||
const environment = HEROKU_PRODUCTION_APP ? 'production' : 'staging'
|
||||
|
||||
// Early access branch to clone
|
||||
let earlyAccessBranch = HEROKU_PRODUCTION_APP ? EA_PRODUCTION_BRANCH : currentBranch
|
||||
|
||||
// Confirm that the branch exists in the remote
|
||||
let branchExists = execSync(`git ls-remote --heads ${earlyAccessFullRepo} ${earlyAccessBranch}`).toString()
|
||||
|
||||
// If the branch did NOT exist, try checking for the default branch instead
|
||||
if (!branchExists && earlyAccessBranch !== EA_PRODUCTION_BRANCH) {
|
||||
console.warn(`The branch '${earlyAccessBranch}' was not found in ${earlyAccessOwner}/${earlyAccessRepoName}!`)
|
||||
console.warn(`Attempting the default branch ${EA_PRODUCTION_BRANCH} instead...`)
|
||||
|
||||
earlyAccessBranch = EA_PRODUCTION_BRANCH
|
||||
branchExists = execSync(`git ls-remote --heads ${earlyAccessFullRepo} ${earlyAccessBranch}`).toString()
|
||||
}
|
||||
|
||||
// If no suitable branch was found, bail out now
|
||||
if (!branchExists) {
|
||||
console.error(`The branch '${earlyAccessBranch}' was not found in ${earlyAccessOwner}/${earlyAccessRepoName}!`)
|
||||
console.error('Exiting!')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
// Remove any previously cloned copies of the early access repo
|
||||
rimraf(earlyAccessCloningDir)
|
||||
|
||||
// Clone the repo
|
||||
console.log(`Setting up: ${earlyAccessCloningDir}`)
|
||||
execSync(
|
||||
`git clone --single-branch --branch ${earlyAccessBranch} ${earlyAccessFullRepo} ${earlyAccessRepoName}`,
|
||||
{
|
||||
cwd: earlyAccessCloningParentDir
|
||||
}
|
||||
)
|
||||
console.log(`Using early-access ${environment} branch: '${earlyAccessBranch}'`)
|
||||
|
||||
// Remove all existing early access directories from this repo
|
||||
destinationDirNames.forEach(key => rimraf(destinationDirsMap[key]))
|
||||
|
||||
// Move the latest early access source directories into this repo
|
||||
destinationDirNames.forEach((dirName) => {
|
||||
const sourceDir = path.join(earlyAccessCloningDir, dirName)
|
||||
const destDir = destinationDirsMap[dirName]
|
||||
|
||||
// If the source directory doesn't exist, skip it
|
||||
if (!fs.existsSync(sourceDir)) {
|
||||
console.warn(`Early access directory '${dirName}' does not exist. Skipping...`)
|
||||
return
|
||||
}
|
||||
|
||||
// Move the directory from the cloned source to the destination
|
||||
fs.renameSync(sourceDir, destDir)
|
||||
|
||||
// Confirm the newly moved directory exist
|
||||
if (fs.existsSync(destDir)) {
|
||||
console.log(`Successfully moved early access directory '${dirName}' into this repo`)
|
||||
} else {
|
||||
throw new Error(`Failed to move early access directory '${dirName}'!`)
|
||||
}
|
||||
})
|
||||
|
||||
// Remove the source content again for good hygiene
|
||||
rimraf(earlyAccessCloningDir)
|
|
@ -0,0 +1,27 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# [start-readme]
|
||||
#
|
||||
# This script is run on a writer's machine to begin developing Early Access content locally.
|
||||
#
|
||||
# [end-readme]
|
||||
|
||||
# Go up a directory
|
||||
pushd .. > /dev/null
|
||||
|
||||
if [ -d "docs-early-access" ]; then
|
||||
echo "A 'docs-early-access' directory already exists! Try script/early-access/feature-branch.js."
|
||||
popd > /dev/null
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Clone the repo
|
||||
git clone git@github.com:github/docs-early-access.git
|
||||
|
||||
# Go back to the previous working directory
|
||||
popd > /dev/null
|
||||
|
||||
# Symlink the local docs-early-access repo into this repo
|
||||
node script/early-access/symlink-from-local-repo.js -p ../docs-early-access
|
||||
|
||||
echo -e '\nDone!'
|
|
@ -0,0 +1,106 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
// [start-readme]
|
||||
//
|
||||
// This script is run on a writer's machine while developing Early Access content locally.
|
||||
// You must pass the script the location of your local copy of
|
||||
// the `github/docs-early-access` git repo as the first argument.
|
||||
//
|
||||
// [end-readme]
|
||||
|
||||
const rimraf = require('rimraf').sync
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const program = require('commander')
|
||||
|
||||
// Early Access details
|
||||
const earlyAccessRepo = 'docs-early-access'
|
||||
const earlyAccessDirName = 'early-access'
|
||||
const earlyAccessRepoUrl = `https://github.com/github/${earlyAccessRepo}`
|
||||
|
||||
program
|
||||
.description(`Create or destroy symlinks to your local "${earlyAccessRepo}" repository.`)
|
||||
.option('-p, --path-to-early-access-repo <PATH>', `path to a local checkout of ${earlyAccessRepoUrl}`)
|
||||
.option('-u, --unlink', 'remove the symlinks')
|
||||
.parse(process.argv)
|
||||
|
||||
const { pathToEarlyAccessRepo, unlink } = program
|
||||
|
||||
if (!pathToEarlyAccessRepo && !unlink) {
|
||||
throw new Error('Must provide either `--path-to-early-access-repo <PATH>` or `--unlink`')
|
||||
}
|
||||
|
||||
let earlyAccessLocalRepoDir
|
||||
|
||||
// If creating symlinks, run some extra validation
|
||||
if (!unlink && pathToEarlyAccessRepo) {
|
||||
earlyAccessLocalRepoDir = path.resolve(process.cwd(), pathToEarlyAccessRepo)
|
||||
|
||||
let dirStats
|
||||
try {
|
||||
dirStats = fs.statSync(earlyAccessLocalRepoDir)
|
||||
} catch (err) {
|
||||
dirStats = null
|
||||
}
|
||||
|
||||
if (!dirStats) {
|
||||
throw new Error(`The local "${earlyAccessRepo}" repo directory does not exist:`, earlyAccessLocalRepoDir)
|
||||
}
|
||||
if (dirStats && !dirStats.isDirectory()) {
|
||||
throw new Error(`A non-directory entry exists at the local "${earlyAccessRepo}" repo directory location:`, earlyAccessLocalRepoDir)
|
||||
}
|
||||
}
|
||||
|
||||
const destinationDirNames = ['content', 'data', 'assets/images']
|
||||
const destinationDirsMap = destinationDirNames
|
||||
.reduce(
|
||||
(map, dirName) => {
|
||||
map[dirName] = path.join(process.cwd(), dirName, earlyAccessDirName)
|
||||
return map
|
||||
},
|
||||
{}
|
||||
)
|
||||
|
||||
// Remove all existing early access directories from this repo
|
||||
destinationDirNames.forEach((dirName) => {
|
||||
const destDir = destinationDirsMap[dirName]
|
||||
rimraf(destDir)
|
||||
console.log(`- Removed symlink for early access directory '${dirName}' from this repo`)
|
||||
})
|
||||
|
||||
// If removing symlinks, just stop here!
|
||||
if (unlink) {
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
//
|
||||
// Otherwise, keep going...
|
||||
//
|
||||
|
||||
// Move the latest early access source directories into this repo
|
||||
destinationDirNames.forEach((dirName) => {
|
||||
const sourceDir = path.join(earlyAccessLocalRepoDir, dirName)
|
||||
const destDir = destinationDirsMap[dirName]
|
||||
|
||||
// If the source directory doesn't exist, skip it
|
||||
if (!fs.existsSync(sourceDir)) {
|
||||
console.warn(`Early access directory '${dirName}' does not exist. Skipping...`)
|
||||
return
|
||||
}
|
||||
|
||||
// Create a symbolic link to the directory
|
||||
fs.symlinkSync(sourceDir, destDir, 'junction')
|
||||
|
||||
// Confirm the newly moved directory exist
|
||||
if (!fs.existsSync(destDir)) {
|
||||
throw new Error(`Failed to symlink early access directory '${dirName}'!`)
|
||||
}
|
||||
if (!fs.lstatSync(destDir).isSymbolicLink()) {
|
||||
throw new Error(`The early access directory '${dirName}' entry is not a symbolic link!`)
|
||||
}
|
||||
if (!fs.statSync(destDir).isDirectory()) {
|
||||
throw new Error(`The early access directory '${dirName}' entry's symbolic link does not refer to a directory!`)
|
||||
}
|
||||
|
||||
console.log(`+ Added symlink for early access directory '${dirName}' into this repo`)
|
||||
})
|
|
@ -0,0 +1,159 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
// [start-readme]
|
||||
//
|
||||
// This script is run on a writer's machine while developing Early Access content locally. It
|
||||
// updates the data and image paths to either include `early-access` or remove it.
|
||||
//
|
||||
// [end-readme]
|
||||
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const program = require('commander')
|
||||
const walk = require('walk-sync')
|
||||
const { escapeRegExp, last } = require('lodash')
|
||||
const yaml = require('js-yaml')
|
||||
const patterns = require('../../lib/patterns')
|
||||
const earlyAccessContent = path.posix.join(process.cwd(), 'content/early-access')
|
||||
const earlyAccessData = path.posix.join(process.cwd(), 'data/early-access')
|
||||
const earlyAccessImages = path.posix.join(process.cwd(), 'assets/images/early-access')
|
||||
|
||||
program
|
||||
.description('Update data and image paths.')
|
||||
.option('-p, --path-to-early-access-content-file <PATH>', 'Path to a specific content file. Defaults to all Early Access content files if not provided.')
|
||||
.option('-a, --add', 'Add "early-access" to data and image paths.')
|
||||
.option('-r, --remove', 'Remove "early-access" from data and image paths.')
|
||||
.parse(process.argv)
|
||||
|
||||
if (!(program.add || program.remove)) {
|
||||
console.error('Error! Must specify either `--add` or `--remove`.')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
let earlyAccessContentAndDataFiles
|
||||
if (program.pathToEarlyAccessContentFile) {
|
||||
earlyAccessContentAndDataFiles = path.posix.join(process.cwd(), program.pathToEarlyAccessContentFile)
|
||||
|
||||
if (!fs.existsSync(earlyAccessContentAndDataFiles)) {
|
||||
console.error(`Error! ${program.pathToEarlyAccessContentFile} can't be found. Make sure the path starts with 'content/early-access'.`)
|
||||
process.exit(1)
|
||||
}
|
||||
earlyAccessContentAndDataFiles = [earlyAccessContentAndDataFiles]
|
||||
} else {
|
||||
// Gather the EA content and data files
|
||||
earlyAccessContentAndDataFiles = walk(earlyAccessContent, { includeBasePath: true, directories: false })
|
||||
.concat(walk(earlyAccessData, { includeBasePath: true, directories: false }))
|
||||
}
|
||||
|
||||
// Update the EA content and data files
|
||||
earlyAccessContentAndDataFiles
|
||||
.forEach(file => {
|
||||
const oldContents = fs.readFileSync(file, 'utf8')
|
||||
|
||||
// Get all the data references in each file that exist in data/early-access
|
||||
const dataRefs = (oldContents.match(patterns.dataReference) || [])
|
||||
.filter(dataRef => dataRef.includes('variables') ? checkVariable(dataRef) : checkReusable(dataRef))
|
||||
|
||||
// Get all the image references in each file that exist in assets/images/early-access
|
||||
const imageRefs = (oldContents.match(patterns.imagePath) || [])
|
||||
.filter(imageRef => checkImage(imageRef))
|
||||
|
||||
const replacements = {}
|
||||
|
||||
if (program.add) {
|
||||
dataRefs
|
||||
// Since we're adding early-access to the path, filter for those that do not already include it
|
||||
.filter(dataRef => !dataRef.includes('data early-access.'))
|
||||
// Add to the { oldRef: newRef } replacements object
|
||||
.forEach(dataRef => {
|
||||
replacements[dataRef] = dataRef.replace(/({% data )(.*)/, '$1early-access.$2')
|
||||
})
|
||||
|
||||
imageRefs
|
||||
// Since we're adding early-access to the path, filter for those that do not already include it
|
||||
.filter(imageRef => !imageRef.split('/').includes('early-access'))
|
||||
// Add to the { oldRef: newRef } replacements object
|
||||
.forEach(imageRef => {
|
||||
replacements[imageRef] = imageRef.replace('/assets/images/', '/assets/images/early-access/')
|
||||
})
|
||||
}
|
||||
|
||||
if (program.remove) {
|
||||
dataRefs
|
||||
// Since we're removing early-access from the path, filter for those that include it
|
||||
.filter(dataRef => dataRef.includes('{% data early-access.'))
|
||||
// Add to the { oldRef: newRef } replacements object
|
||||
.forEach(dataRef => {
|
||||
replacements[dataRef] = dataRef.replace('early-access.', '')
|
||||
})
|
||||
|
||||
imageRefs
|
||||
// Since we're removing early-access from the path, filter for those that include it
|
||||
.filter(imageRef => imageRef.split('/').includes('early-access'))
|
||||
// Add to the { oldRef: newRef } replacements object
|
||||
.forEach(imageRef => {
|
||||
replacements[imageRef] = imageRef.replace('/assets/images/early-access/', '/assets/images/')
|
||||
})
|
||||
}
|
||||
|
||||
// Return early if nothing to replace
|
||||
if (!Object.keys(replacements).length) {
|
||||
return
|
||||
}
|
||||
|
||||
// Make the replacement in the content
|
||||
let newContents = oldContents
|
||||
Object.entries(replacements).forEach(([oldRef, newRef]) => {
|
||||
newContents = newContents.replace(new RegExp(escapeRegExp(oldRef), 'g'), newRef)
|
||||
})
|
||||
|
||||
// Write the updated content
|
||||
fs.writeFileSync(file, newContents)
|
||||
})
|
||||
|
||||
console.log('Done! Run "git status" in your docs-early-access checkout to see the changes.\n')
|
||||
|
||||
function checkVariable (dataRef) {
|
||||
// Get the data filepath from the data reference,
|
||||
// where the data reference looks like: {% data variables.foo.bar %}
|
||||
// and the data filepath looks like: data/variables/foo.yml with key of 'bar'.
|
||||
const variablePathArray = dataRef.match(/{% data (.*?) %}/)[1].split('.')
|
||||
// If early access is part of the path, remove it (since the path below already includes it)
|
||||
.filter(n => n !== 'early-access')
|
||||
|
||||
// Given a string `variables.foo.bar` split into an array, we want the last segment 'bar', which is the variable key.
|
||||
// Then pop 'bar' off the array because it's not really part of the filepath.
|
||||
// The filepath we want is `variables/foo.yml`.
|
||||
const variableKey = last(variablePathArray); variablePathArray.pop()
|
||||
const variablePath = path.posix.join(earlyAccessData, `${variablePathArray.join('/')}.yml`)
|
||||
|
||||
// If the variable file doesn't exist in data/early-access, exclude it
|
||||
if (!fs.existsSync(variablePath)) return false
|
||||
|
||||
// If the variable file exists but doesn't have the referenced key, exclude it
|
||||
const variableFileContent = yaml.safeLoad(fs.readFileSync(variablePath, 'utf8'))
|
||||
return variableFileContent[variableKey]
|
||||
}
|
||||
|
||||
function checkReusable (dataRef) {
|
||||
// Get the data filepath from the data reference,
|
||||
// where the data reference looks like: {% data reusables.foo.bar %}
|
||||
// and the data filepath looks like: data/reusables/foo/bar.md.
|
||||
const reusablePath = dataRef.match(/{% data (.*?) %}/)[1].split('.')
|
||||
// If early access is part of the path, remove it (since the path below already includes it)
|
||||
.filter(n => n !== 'early-access')
|
||||
.join('/')
|
||||
|
||||
// If the reusable file doesn't exist in data/early-access, exclude it
|
||||
return fs.existsSync(`${path.posix.join(earlyAccessData, reusablePath)}.md`)
|
||||
}
|
||||
|
||||
function checkImage (imageRef) {
|
||||
const imagePath = imageRef
|
||||
.replace('/assets/images/', '')
|
||||
// If early access is part of the path, remove it (since the path below already includes it)
|
||||
.replace('early-access', '')
|
||||
|
||||
// If the image file doesn't exist in assets/images/early-access, exclude it
|
||||
return fs.existsSync(path.posix.join(earlyAccessImages, imagePath))
|
||||
}
|
|
@ -180,7 +180,6 @@ async function createRedirectPages (permalinks, pages, finalDirectory) {
|
|||
console.log('done creating redirect files!\n')
|
||||
}
|
||||
|
||||
// prior art: https://github.com/github/help-docs-archived-enterprise-versions/blob/master/2.12/user/leave-a-repo/index.html
|
||||
// redirect html files already exist in <=2.12 because these versions were deprecated on the old static site
|
||||
function getRedirectHtml (newPath) {
|
||||
return `<!DOCTYPE html>
|
||||
|
|
|
@ -9,4 +9,4 @@ These scripts update the [static JSON files](../../lib/graphql/static) used to
|
|||
render GraphQL docs. See the [`lib/graphql/README`](../../lib/graphql/README.md)
|
||||
for more info.
|
||||
|
||||
**Note**: The changelog script pulls content from [the internal-developer repo](https://github.com/github/internal-developer.github.com/tree/master/content/v4/changelog). It relies on [graphql-docs automation](https://github.com/github/graphql-docs/blob/master/lib/graphql_docs/update_internal_developer/change_log.rb) running daily to update the changelog files in internal-developer.
|
||||
**Note**: The changelog script pulls content from the internal-developer repo. It relies on graphql-docs automation running daily to update the changelog files in internal-developer.
|
||||
|
|
|
@ -127,7 +127,7 @@ function cleanPreviewTitle (title) {
|
|||
|
||||
/**
|
||||
* Turn the given title into an HTML-ready anchor.
|
||||
* (ported from https://github.com/github/graphql-docs/blob/master/lib/graphql_docs/update_internal_developer/change_log.rb#L281)
|
||||
* (ported from graphql-docs/lib/graphql_docs/update_internal_developer/change_log.rb#L281)
|
||||
* @param {string} [previewTitle]
|
||||
* @return {string}
|
||||
*/
|
||||
|
@ -155,7 +155,7 @@ function cleanMessagesFromChanges (changes) {
|
|||
* Split `changesToReport` into two parts,
|
||||
* one for changes in the main schema,
|
||||
* and another for changes that are under preview.
|
||||
* (Ported from https://github.com/github/graphql-docs/blob/7e6a5ccbf13cc7d875fee65527b25bc49e886b41/lib/graphql_docs/update_internal_developer/change_log.rb#L230)
|
||||
* (Ported from /graphql-docs/lib/graphql_docs/update_internal_developer/change_log.rb#L230)
|
||||
* @param {Array<object>} changesToReport
|
||||
* @param {object} previews
|
||||
* @return {object}
|
||||
|
@ -203,7 +203,7 @@ function segmentPreviewChanges (changesToReport, previews) {
|
|||
// Deprecations are covered by "upcoming changes."
|
||||
// By listing the changes explicitly here, we can make sure that,
|
||||
// if the library changes, we don't miss publishing anything that we mean to.
|
||||
// This was originally ported from https://github.com/github/graphql-docs/blob/7e6a5ccbf13cc7d875fee65527b25bc49e886b41/lib/graphql_docs/update_internal_developer/change_log.rb#L35-L103
|
||||
// This was originally ported from graphql-docs/lib/graphql_docs/update_internal_developer/change_log.rb#L35-L103
|
||||
const CHANGES_TO_REPORT = [
|
||||
ChangeType.FieldArgumentDefaultChanged,
|
||||
ChangeType.FieldArgumentTypeChanged,
|
||||
|
|
|
@ -8,7 +8,7 @@ if ARGV.empty?
|
|||
exit 1
|
||||
end
|
||||
|
||||
# borrowed from https://github.com/github/graphql-docs/blob/master/lib/graphql_docs/update_internal_developer/idl.rb
|
||||
# borrowed from graphql-docs/lib/graphql_docs/update_internal_developer/idl.rb
|
||||
class Printer < GraphQL::Language::DocumentFromSchemaDefinition
|
||||
def build_object_type_node(object_type)
|
||||
apply_directives_to_node(object_type, super)
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
# [start-readme]
|
||||
#
|
||||
# Run this script to manually purge the [Fastly cache](https://github.com/github/docs-internal#fastly-cdn).
|
||||
# Run this script to manually purge the Fastly cache.
|
||||
# Note this script requires a `FASTLY_SERVICE_ID` and `FASTLY_TOKEN` in your `.env` file.
|
||||
#
|
||||
# [end-readme]
|
||||
|
|
|
@ -9,7 +9,7 @@ const { getPathWithoutLanguage } = require('../lib/path-utils')
|
|||
|
||||
// [start-readme]
|
||||
//
|
||||
// Run this script to manually purge the [Fastly cache](https://github.com/github/docs-internal#fastly-cdn)
|
||||
// Run this script to manually purge the Fastly cache
|
||||
// for all language variants of a single URL or for a batch of URLs in a file. This script does
|
||||
// not require authentication.
|
||||
//
|
||||
|
|
|
@ -14,7 +14,15 @@ const contentFiles = walk(contentDir, { includeBasePath: true })
|
|||
// [start-readme]
|
||||
//
|
||||
// Run this script to standardize frontmatter fields in all content files,
|
||||
// per the order decided in https://github.com/github/docs-internal/issues/9658#issuecomment-485536265.
|
||||
// per the order:
|
||||
// - title
|
||||
// - intro
|
||||
// - product callout
|
||||
// - productVersion
|
||||
// - map topic status
|
||||
// - hidden status
|
||||
// - layout
|
||||
// - redirect
|
||||
//
|
||||
// [end-readme]
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ const jsonFile = require(filename)
|
|||
// [start-readme]
|
||||
//
|
||||
// Run this script during Enterprise releases and deprecations.
|
||||
// It uses the GitHub API to get dates from [`enterprise-releases`](https://github.com/github/enterprise-releases/blob/master/releases.json) and updates `lib/enterprise-dates.json`.
|
||||
// It uses the GitHub API to get dates from enterprise-releases and updates `lib/enterprise-dates.json`.
|
||||
// The help site uses this JSON to display dates at the top of some Enterprise versions.
|
||||
//
|
||||
// This script requires that you have a GitHub Personal Access Token in a `.env` file.
|
||||
|
@ -26,8 +26,7 @@ const jsonFile = require(filename)
|
|||
|
||||
main()
|
||||
|
||||
// GHE Release Lifecycle Dates:
|
||||
// https://github.com/github/enterprise-releases/blob/master/releases.json
|
||||
// GHE Release Lifecycle Dates
|
||||
async function main () {
|
||||
let raw
|
||||
try {
|
||||
|
@ -38,7 +37,7 @@ async function main () {
|
|||
}
|
||||
const json = prepareData(raw)
|
||||
if (json === prettify(jsonFile)) {
|
||||
console.log('This repo is already in sync with https://github.com/github/enterprise-releases/blob/master/releases.json!')
|
||||
console.log('This repo is already in sync with enterprise-releases!')
|
||||
} else {
|
||||
fs.writeFileSync(filename, json, 'utf8')
|
||||
console.log(`${filename} has been updated!`)
|
||||
|
|
|
@ -5,7 +5,6 @@ const authenticateToAWS = require('../lib/authenticate-to-aws.js')
|
|||
// [start-readme]
|
||||
//
|
||||
// This script is used by other scripts to update temporary AWS credentials and authenticate to S3.
|
||||
// See docs at [Setting up awssume and S3cmd](https://github.com/github/product-documentation/tree/master/doc-team-workflows/workflow-information-for-all-writers/setting-up-awssume-and-s3cmd.md).
|
||||
//
|
||||
// [end-readme]
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ describe('category pages', () => {
|
|||
|
||||
const walkOptions = {
|
||||
globs: ['*/index.md', 'enterprise/*/index.md'],
|
||||
ignore: ['{rest,graphql,developers}/**', 'enterprise/index.md', '**/articles/**'],
|
||||
ignore: ['{rest,graphql,developers}/**', 'enterprise/index.md', '**/articles/**', 'early-access/**'],
|
||||
directories: false,
|
||||
includeBasePath: true
|
||||
}
|
||||
|
|
|
@ -1,9 +1,46 @@
|
|||
const config = require('../../lib/crowdin-config').read()
|
||||
const loadPages = require('../../lib/pages')
|
||||
const ignoredPagePaths = config.files[0].ignore
|
||||
const ignoredDataPaths = config.files[2].ignore
|
||||
|
||||
describe('crowdin.yml config file', () => {
|
||||
let pages
|
||||
beforeAll(async (done) => {
|
||||
pages = await loadPages()
|
||||
done()
|
||||
})
|
||||
|
||||
test('has expected file stucture', async () => {
|
||||
expect(config.files.length).toBe(3)
|
||||
expect(config.files[0].source).toBe('/content/**/*.md')
|
||||
expect(config.files[0].ignore).toContain('/content/README.md')
|
||||
})
|
||||
|
||||
test('ignores all Early Access paths', async () => {
|
||||
expect(ignoredPagePaths).toContain('/content/early-access')
|
||||
expect(ignoredDataPaths).toContain('/data/early-access')
|
||||
})
|
||||
|
||||
test('ignores all hidden pages', async () => {
|
||||
const hiddenPages = pages
|
||||
.filter(page => page.hidden && page.languageCode === 'en')
|
||||
.map(page => `/content/${page.relativePath}`)
|
||||
const overlooked = hiddenPages.filter(page => !isIgnored(page, ignoredPagePaths))
|
||||
const message = `Found some hidden pages that are not yet excluded from localization.
|
||||
Please copy and paste the lines below into the \`ignore\` section of /crowdin.yml: \n\n"${overlooked.join('",\n"')}"`
|
||||
|
||||
// This may not be true anymore given the separation of Early Access docs
|
||||
// expect(hiddenPages.length).toBeGreaterThan(0)
|
||||
expect(ignoredPagePaths.length).toBeGreaterThan(0)
|
||||
expect(overlooked, message).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
|
||||
// file is ignored if its exact filename in the list,
|
||||
// or if it's within an ignored directory
|
||||
function isIgnored (filename, ignoredPagePaths) {
|
||||
return ignoredPagePaths.some(ignoredPath => {
|
||||
const isDirectory = !ignoredPath.endsWith('.md')
|
||||
return ignoredPath === filename || (isDirectory && filename.startsWith(ignoredPath))
|
||||
})
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ const fs = require('fs')
|
|||
const walk = require('walk-sync')
|
||||
const { zip } = require('lodash')
|
||||
const yaml = require('js-yaml')
|
||||
const frontmatter = require('../../lib/frontmatter')
|
||||
const languages = require('../../lib/languages')
|
||||
const { tags } = require('../../lib/liquid-tags/extended-markdown')
|
||||
const ghesReleaseNotesSchema = require('../../lib/release-notes-schema')
|
||||
|
@ -66,6 +67,18 @@ const languageLinkRegex = new RegExp(`(?=^|[^\\]]\\s*)\\[[^\\]]+\\](?::\\n?[ \\t
|
|||
// - [link text](/github/site-policy/enterprise/2.2/admin/blah)
|
||||
const versionLinkRegEx = /(?=^|[^\]]\s*)\[[^\]]+\](?::\n?[ \t]+|\s*\()(?:(?:https?:\/\/(?:help|docs|developer)\.github\.com)?\/enterprise\/\d+(\.\d+)+(?:\/[^)\s]*)?)(?:\)|\s+|$)/gm
|
||||
|
||||
// Things matched by this RegExp:
|
||||
// - [link text](/early-access/github/blah)
|
||||
// - [link text] (https://docs.github.com/early-access/github/blah)
|
||||
// - [link-definition-ref]: http://help.github.com/early-access/github/blah
|
||||
// - etc.
|
||||
//
|
||||
// Things intentionally NOT matched by this RegExp:
|
||||
// - [Node.js](https://nodejs.org/early-access/)
|
||||
// - etc.
|
||||
//
|
||||
const earlyAccessLinkRegex = /(?=^|[^\]]\s*)\[[^\]]+\](?::\n?[ \t]+|\s*\()(?:(?:https?:\/\/(?:help|docs|developer)\.github\.com)?\/early-access(?:\/[^)\s]*)?)(?:\)|\s+|$)/gm
|
||||
|
||||
// - [link text](https://docs.github.com/github/blah)
|
||||
// - [link text] (https://help.github.com/github/blah)
|
||||
// - [link-definition-ref]: http://developer.github.com/v3/
|
||||
|
@ -79,6 +92,33 @@ const versionLinkRegEx = /(?=^|[^\]]\s*)\[[^\]]+\](?::\n?[ \t]+|\s*\()(?:(?:http
|
|||
//
|
||||
const domainLinkRegex = /(?=^|[^\]]\s*)\[[^\]]+\](?::\n?[ \t]+|\s*\()(?:https?:)?\/\/(?:help|docs|developer)\.github\.com(?!\/changes\/)[^)\s]*(?:\)|\s+|$)/gm
|
||||
|
||||
// Things matched by this RegExp:
|
||||
// - ![image text](/assets/images/early-access/github/blah.gif)
|
||||
// - ![image text] (https://docs.github.com/assets/images/early-access/github/blah.gif)
|
||||
// - [image-definition-ref]: http://help.github.com/assets/images/early-access/github/blah.gif
|
||||
// - [link text](/assets/images/early-access/github/blah.gif)
|
||||
// - etc.
|
||||
//
|
||||
// Things intentionally NOT matched by this RegExp:
|
||||
// - [Node.js](https://nodejs.org/assets/images/early-access/blah.gif)
|
||||
// - etc.
|
||||
//
|
||||
const earlyAccessImageRegex = /(?=^|[^\]]\s*)\[[^\]]+\](?::\n?[ \t]+|\s*\()(?:(?:https?:\/\/(?:help|docs|developer)\.github\.com)?\/assets\/images\/early-access(?:\/[^)\s]*)?)(?:\)|\s+|$)/gm
|
||||
|
||||
// Things matched by this RegExp:
|
||||
// - ![image text](/assets/early-access/images/github/blah.gif)
|
||||
// - ![image text] (https://docs.github.com/images/early-access/github/blah.gif)
|
||||
// - [image-definition-ref]: http://help.github.com/assets/early-access/github/blah.gif
|
||||
// - [link text](/early-access/assets/images/github/blah.gif)
|
||||
// - [link text](/early-access/images/github/blah.gif)
|
||||
// - etc.
|
||||
//
|
||||
// Things intentionally NOT matched by this RegExp:
|
||||
// - [Node.js](https://nodejs.org/assets/early-access/images/blah.gif)
|
||||
// - etc.
|
||||
//
|
||||
const badEarlyAccessImageRegex = /(?=^|[^\]]\s*)\[[^\]]+\](?::\n?[ \t]+|\s*\()(?:(?:https?:\/\/(?:help|docs|developer)\.github\.com)?\/(?:(?:assets|images)\/early-access|early-access\/(?:assets|images))(?:\/[^)\s]*)?)(?:\)|\s+|$)/gm
|
||||
|
||||
// {{ site.data.example.pizza }}
|
||||
const oldVariableRegex = /{{\s*?site\.data\..*?}}/g
|
||||
|
||||
|
@ -98,6 +138,9 @@ const relativeArticleLinkErrorText = 'Found unexpected relative article links:'
|
|||
const languageLinkErrorText = 'Found article links with hard-coded language codes:'
|
||||
const versionLinkErrorText = 'Found article links with hard-coded version numbers:'
|
||||
const domainLinkErrorText = 'Found article links with hard-coded domain names:'
|
||||
const earlyAccessLinkErrorText = 'Found article links leaking Early Access docs:'
|
||||
const earlyAccessImageErrorText = 'Found article images/links leaking Early Access images:'
|
||||
const badEarlyAccessImageErrorText = 'Found article images/links leaking incorrect Early Access images:'
|
||||
const oldVariableErrorText = 'Found article uses old {{ site.data... }} syntax. Use {% data example.data.string %} instead!'
|
||||
const oldOcticonErrorText = 'Found octicon variables with the old {{ octicon-name }} syntax. Use {% octicon "name" %} instead!'
|
||||
const oldExtendedMarkdownErrorText = 'Found extended markdown tags with the old {{#note}} syntax. Use {% note %}/{% endnote %} instead!'
|
||||
|
@ -105,7 +148,7 @@ const oldExtendedMarkdownErrorText = 'Found extended markdown tags with the old
|
|||
describe('lint-files', () => {
|
||||
const mdWalkOptions = {
|
||||
globs: ['**/*.md'],
|
||||
ignore: ['**/README.md'],
|
||||
ignore: ['**/README.md', 'early-access'],
|
||||
directories: false,
|
||||
includeBasePath: true
|
||||
}
|
||||
|
@ -121,10 +164,19 @@ describe('lint-files', () => {
|
|||
describe.each([...contentMarkdownTuples, ...reusableMarkdownTuples])(
|
||||
'in "%s"',
|
||||
(markdownRelPath, markdownAbsPath) => {
|
||||
let content
|
||||
let content, isHidden, isEarlyAccess
|
||||
|
||||
beforeAll(async () => {
|
||||
content = await fs.promises.readFile(markdownAbsPath, 'utf8')
|
||||
const fileContents = await fs.promises.readFile(markdownAbsPath, 'utf8')
|
||||
const { data, content: bodyContent } = frontmatter(fileContents)
|
||||
|
||||
content = bodyContent
|
||||
isHidden = data.hidden === true
|
||||
isEarlyAccess = markdownRelPath.split('/').includes('early-access')
|
||||
})
|
||||
|
||||
test('hidden docs must be Early Access', async () => {
|
||||
expect(isHidden).toBe(isEarlyAccess)
|
||||
})
|
||||
|
||||
test('relative URLs must start with "/"', async () => {
|
||||
|
@ -206,6 +258,32 @@ describe('lint-files', () => {
|
|||
expect(matches.length, errorMessage).toBe(0)
|
||||
})
|
||||
|
||||
test('must not leak Early Access doc URLs', async () => {
|
||||
// Only execute for docs that are NOT Early Access
|
||||
if (!isEarlyAccess) {
|
||||
const matches = (content.match(earlyAccessLinkRegex) || [])
|
||||
const errorMessage = formatLinkError(earlyAccessLinkErrorText, matches)
|
||||
expect(matches.length, errorMessage).toBe(0)
|
||||
}
|
||||
})
|
||||
|
||||
test('must not leak Early Access image URLs', async () => {
|
||||
// Only execute for docs that are NOT Early Access
|
||||
if (!isEarlyAccess) {
|
||||
const matches = (content.match(earlyAccessImageRegex) || [])
|
||||
const errorMessage = formatLinkError(earlyAccessImageErrorText, matches)
|
||||
expect(matches.length, errorMessage).toBe(0)
|
||||
}
|
||||
})
|
||||
|
||||
test('must have correctly formatted Early Access image URLs', async () => {
|
||||
// Execute for ALL docs (not just Early Access) to ensure non-EA docs
|
||||
// are not leaking incorrectly formatted EA image URLs
|
||||
const matches = (content.match(badEarlyAccessImageRegex) || [])
|
||||
const errorMessage = formatLinkError(badEarlyAccessImageErrorText, matches)
|
||||
expect(matches.length, errorMessage).toBe(0)
|
||||
})
|
||||
|
||||
test('does not use old site.data variable syntax', async () => {
|
||||
const matches = (content.match(oldVariableRegex) || [])
|
||||
const matchesWithExample = matches.map(match => {
|
||||
|
@ -248,17 +326,19 @@ describe('lint-files', () => {
|
|||
}
|
||||
|
||||
const variableYamlAbsPaths = walk(variablesDir, yamlWalkOptions).sort()
|
||||
const variableYamlRelPaths = variableYamlAbsPaths.map(p => path.relative(rootDir, p))
|
||||
const variableYamlRelPaths = variableYamlAbsPaths.map(p => slash(path.relative(rootDir, p)))
|
||||
const variableYamlTuples = zip(variableYamlRelPaths, variableYamlAbsPaths)
|
||||
|
||||
describe.each(variableYamlTuples)(
|
||||
'in "%s"',
|
||||
(yamlRelPath, yamlAbsPath) => {
|
||||
let dictionary
|
||||
let dictionary, isEarlyAccess
|
||||
|
||||
beforeAll(async () => {
|
||||
const fileContents = await fs.promises.readFile(yamlAbsPath, 'utf8')
|
||||
dictionary = yaml.safeLoad(fileContents, { filename: yamlRelPath })
|
||||
|
||||
isEarlyAccess = yamlRelPath.split('/').includes('early-access')
|
||||
})
|
||||
|
||||
test('relative URLs must start with "/"', async () => {
|
||||
|
@ -321,6 +401,59 @@ describe('lint-files', () => {
|
|||
expect(matches.length, errorMessage).toBe(0)
|
||||
})
|
||||
|
||||
test('must not leak Early Access doc URLs', async () => {
|
||||
// Only execute for docs that are NOT Early Access
|
||||
if (!isEarlyAccess) {
|
||||
const matches = []
|
||||
|
||||
for (const [key, content] of Object.entries(dictionary)) {
|
||||
if (typeof content !== 'string') continue
|
||||
const valMatches = (content.match(earlyAccessLinkRegex) || [])
|
||||
if (valMatches.length > 0) {
|
||||
matches.push(...valMatches.map((match) => `Key "${key}": ${match}`))
|
||||
}
|
||||
}
|
||||
|
||||
const errorMessage = formatLinkError(earlyAccessLinkErrorText, matches)
|
||||
expect(matches.length, errorMessage).toBe(0)
|
||||
}
|
||||
})
|
||||
|
||||
test('must not leak Early Access image URLs', async () => {
|
||||
// Only execute for docs that are NOT Early Access
|
||||
if (!isEarlyAccess) {
|
||||
const matches = []
|
||||
|
||||
for (const [key, content] of Object.entries(dictionary)) {
|
||||
if (typeof content !== 'string') continue
|
||||
const valMatches = (content.match(earlyAccessImageRegex) || [])
|
||||
if (valMatches.length > 0) {
|
||||
matches.push(...valMatches.map((match) => `Key "${key}": ${match}`))
|
||||
}
|
||||
}
|
||||
|
||||
const errorMessage = formatLinkError(earlyAccessImageErrorText, matches)
|
||||
expect(matches.length, errorMessage).toBe(0)
|
||||
}
|
||||
})
|
||||
|
||||
test('must have correctly formatted Early Access image URLs', async () => {
|
||||
// Execute for ALL docs (not just Early Access) to ensure non-EA docs
|
||||
// are not leaking incorrectly formatted EA image URLs
|
||||
const matches = []
|
||||
|
||||
for (const [key, content] of Object.entries(dictionary)) {
|
||||
if (typeof content !== 'string') continue
|
||||
const valMatches = (content.match(badEarlyAccessImageRegex) || [])
|
||||
if (valMatches.length > 0) {
|
||||
matches.push(...valMatches.map((match) => `Key "${key}": ${match}`))
|
||||
}
|
||||
}
|
||||
|
||||
const errorMessage = formatLinkError(badEarlyAccessImageErrorText, matches)
|
||||
expect(matches.length, errorMessage).toBe(0)
|
||||
})
|
||||
|
||||
test('does not use old site.data variable syntax', async () => {
|
||||
const matches = []
|
||||
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
const { isEqual, get, uniqBy } = require('lodash')
|
||||
const { isEqual, get, uniqWith } = require('lodash')
|
||||
const loadSiteData = require('../../lib/site-data')
|
||||
const loadPages = require('../../lib/pages')
|
||||
const getDataReferences = require('../../lib/get-liquid-data-references')
|
||||
const frontmatter = require('@github-docs/frontmatter')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
|
||||
describe('data references', () => {
|
||||
let data
|
||||
let pages
|
||||
let data, pages
|
||||
|
||||
beforeAll(async (done) => {
|
||||
data = await loadSiteData()
|
||||
pages = await loadPages()
|
||||
|
@ -20,15 +21,34 @@ describe('data references', () => {
|
|||
expect(pages.length).toBeGreaterThan(0)
|
||||
|
||||
pages.forEach(page => {
|
||||
const file = path.join('content', page.relativePath)
|
||||
const pageRefs = getDataReferences(page.markdown)
|
||||
pageRefs.forEach(key => {
|
||||
const value = get(data.en, key)
|
||||
const file = path.join('content', page.relativePath)
|
||||
if (typeof value !== 'string') errors.push({ key, value, file })
|
||||
})
|
||||
})
|
||||
|
||||
errors = uniqBy(errors, isEqual) // remove duplicates
|
||||
errors = uniqWith(errors, isEqual) // remove duplicates
|
||||
expect(errors.length, JSON.stringify(errors, null, 2)).toBe(0)
|
||||
})
|
||||
|
||||
test('every data reference found in metadata of English content files is defined and has a value', () => {
|
||||
let errors = []
|
||||
expect(pages.length).toBeGreaterThan(0)
|
||||
|
||||
pages.forEach(page => {
|
||||
const metadataFile = path.join('content', page.relativePath)
|
||||
const fileContents = fs.readFileSync(path.join(__dirname, '../..', metadataFile))
|
||||
const { data: metadata } = frontmatter(fileContents, { filepath: page.fullPath })
|
||||
const metadataRefs = getDataReferences(JSON.stringify(metadata))
|
||||
metadataRefs.forEach(key => {
|
||||
const value = get(data.en, key)
|
||||
if (typeof value !== 'string') errors.push({ key, value, metadataFile })
|
||||
})
|
||||
})
|
||||
|
||||
errors = uniqWith(errors, isEqual) // remove duplicates
|
||||
expect(errors.length, JSON.stringify(errors, null, 2)).toBe(0)
|
||||
})
|
||||
|
||||
|
@ -39,17 +59,18 @@ describe('data references', () => {
|
|||
expect(reusables.length).toBeGreaterThan(0)
|
||||
|
||||
reusables.forEach(reusablesPerFile => {
|
||||
let reusableFile = path.join(__dirname, '../../data/reusables/', getFilenameByValue(allReusables, reusablesPerFile))
|
||||
reusableFile = getFilepath(reusableFile)
|
||||
|
||||
const reusableRefs = getDataReferences(JSON.stringify(reusablesPerFile))
|
||||
|
||||
reusableRefs.forEach(key => {
|
||||
const value = get(data.en, key)
|
||||
let reusableFile = path.join(__dirname, '../../data/reusables/', getFilenameByValue(allReusables, reusablesPerFile))
|
||||
reusableFile = getFilepath(reusableFile)
|
||||
if (typeof value !== 'string') errors.push({ key, value, reusableFile })
|
||||
})
|
||||
})
|
||||
|
||||
errors = uniqBy(errors, isEqual) // remove duplicates
|
||||
errors = uniqWith(errors, isEqual) // remove duplicates
|
||||
expect(errors.length, JSON.stringify(errors, null, 2)).toBe(0)
|
||||
})
|
||||
|
||||
|
@ -60,17 +81,18 @@ describe('data references', () => {
|
|||
expect(variables.length).toBeGreaterThan(0)
|
||||
|
||||
variables.forEach(variablesPerFile => {
|
||||
let variableFile = path.join(__dirname, '../../data/variables/', getFilenameByValue(allVariables, variablesPerFile))
|
||||
variableFile = getFilepath(variableFile)
|
||||
|
||||
const variableRefs = getDataReferences(JSON.stringify(variablesPerFile))
|
||||
|
||||
variableRefs.forEach(key => {
|
||||
const value = get(data.en, key)
|
||||
let variableFile = path.join(__dirname, '../../data/variables/', getFilenameByValue(allVariables, variablesPerFile))
|
||||
variableFile = getFilepath(variableFile)
|
||||
if (typeof value !== 'string') errors.push({ key, value, variableFile })
|
||||
})
|
||||
})
|
||||
|
||||
errors = uniqBy(errors, isEqual) // remove duplicates
|
||||
errors = uniqWith(errors, isEqual) // remove duplicates
|
||||
expect(errors.length, JSON.stringify(errors, null, 2)).toBe(0)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -0,0 +1,60 @@
|
|||
const walkSync = require('walk-sync')
|
||||
const fs = require('fs').promises
|
||||
|
||||
const REPO_REGEXP = /\/\/github\.com\/github\/(?!docs[/'"\n])([\w-.]+)/gi
|
||||
|
||||
// These are a list of known public repositories in the GitHub organization
|
||||
const ALLOW_LIST = new Set([
|
||||
'site-policy',
|
||||
'roadmap',
|
||||
'linguist',
|
||||
'super-linter',
|
||||
'backup-utils',
|
||||
'codeql-action-sync-tool',
|
||||
'codeql-action',
|
||||
'platform-samples',
|
||||
'github-services',
|
||||
'explore',
|
||||
'markup',
|
||||
'hubot',
|
||||
'VisualStudio',
|
||||
'codeql',
|
||||
'gitignore',
|
||||
'feedback',
|
||||
'semantic',
|
||||
'git-lfs',
|
||||
'git-sizer',
|
||||
'dmca',
|
||||
'gov-takedowns',
|
||||
'janky',
|
||||
'rest-api-description',
|
||||
'smimesign',
|
||||
'tweetsodium',
|
||||
'choosealicense.com'
|
||||
])
|
||||
|
||||
describe('check for repository references', () => {
|
||||
const filenames = walkSync(process.cwd(), {
|
||||
directories: false,
|
||||
ignore: [
|
||||
'.git',
|
||||
'dist',
|
||||
'node_modules',
|
||||
'translations',
|
||||
'content/early-access',
|
||||
'lib/rest/**/*.json',
|
||||
'lib/webhooks/**/*.json',
|
||||
'ownership.yaml',
|
||||
'docs/index.yaml',
|
||||
'lib/excluded-links.js'
|
||||
]
|
||||
})
|
||||
|
||||
test.each(filenames)('in file %s', async (filename) => {
|
||||
const file = await fs.readFile(filename, 'utf8')
|
||||
const matches = Array.from(file.matchAll(REPO_REGEXP))
|
||||
.map(([, repoName]) => repoName)
|
||||
.filter(repoName => !ALLOW_LIST.has(repoName))
|
||||
expect(matches).toHaveLength(0)
|
||||
})
|
||||
})
|
|
@ -1,64 +0,0 @@
|
|||
const MockExpressResponse = require('mock-express-response')
|
||||
const middleware = require('../../middleware/early-access-paths')
|
||||
|
||||
describe('GET /early-access-paths.json', () => {
|
||||
beforeEach(() => {
|
||||
delete process.env['early-access-shared-secret']
|
||||
})
|
||||
|
||||
test('responds with 401 if shared secret is missing', async () => {
|
||||
const req = {
|
||||
path: '/early-access-paths.json',
|
||||
headers: {}
|
||||
}
|
||||
const res = new MockExpressResponse()
|
||||
const next = jest.fn()
|
||||
await middleware(req, res, next)
|
||||
|
||||
expect(res._getJSON()).toEqual({ error: '401 Unauthorized' })
|
||||
})
|
||||
|
||||
test('responds with an array of hidden paths', async () => {
|
||||
process.env.EARLY_ACCESS_SHARED_SECRET = 'bananas'
|
||||
|
||||
const req = {
|
||||
path: '/early-access-paths.json',
|
||||
headers: {
|
||||
'early-access-shared-secret': 'bananas'
|
||||
},
|
||||
context: {
|
||||
pages: [
|
||||
{
|
||||
hidden: true,
|
||||
languageCode: 'en',
|
||||
permalinks: [
|
||||
{ href: '/some-hidden-page' }
|
||||
],
|
||||
redirects: {
|
||||
'/old-hidden-page': '/new-hidden-page'
|
||||
}
|
||||
},
|
||||
{
|
||||
hidden: false,
|
||||
languageCode: 'en'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
const res = new MockExpressResponse()
|
||||
const next = jest.fn()
|
||||
await middleware(req, res, next)
|
||||
|
||||
expect(res._getJSON()).toEqual(['/some-hidden-page', '/old-hidden-page'])
|
||||
})
|
||||
|
||||
test('ignores requests to other paths', async () => {
|
||||
const req = {
|
||||
path: '/not-early-access'
|
||||
}
|
||||
const res = new MockExpressResponse()
|
||||
const next = jest.fn()
|
||||
await middleware(req, res, next)
|
||||
expect(next).toHaveBeenCalled()
|
||||
})
|
||||
})
|
|
@ -1,80 +0,0 @@
|
|||
|
||||
const middleware = require('../../middleware/early-access-proxy')
|
||||
const nock = require('nock')
|
||||
const MockExpressResponse = require('mock-express-response')
|
||||
|
||||
describe('Early Access middleware', () => {
|
||||
const OLD_EARLY_ACCESS_HOSTNAME = process.env.EARLY_ACCESS_HOSTNAME
|
||||
|
||||
beforeAll(() => {
|
||||
process.env.EARLY_ACCESS_HOSTNAME = 'https://secret-website.com'
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
process.env.EARLY_ACCESS_HOSTNAME = OLD_EARLY_ACCESS_HOSTNAME
|
||||
})
|
||||
|
||||
const baseReq = {
|
||||
context: {
|
||||
earlyAccessPaths: ['/alpha-product/foo', '/beta-product/bar', '/baz']
|
||||
}
|
||||
}
|
||||
|
||||
test('are proxied from an obscured host', async () => {
|
||||
const mock = nock('https://secret-website.com')
|
||||
.get('/alpha-product/foo')
|
||||
.reply(200, 'yay here is your proxied content', { 'content-type': 'text/html' })
|
||||
const req = { ...baseReq, path: '/alpha-product/foo' }
|
||||
const res = new MockExpressResponse()
|
||||
const next = jest.fn()
|
||||
await middleware(req, res, next)
|
||||
expect(mock.isDone()).toBe(true)
|
||||
expect(res._getString()).toBe('yay here is your proxied content')
|
||||
})
|
||||
|
||||
test('follows redirects', async () => {
|
||||
const mock = nock('https://secret-website.com')
|
||||
.get('/alpha-product/foo')
|
||||
.reply(301, undefined, { Location: 'https://secret-website.com/alpha-product/foo2' })
|
||||
.get('/alpha-product/foo2')
|
||||
.reply(200, 'yay you survived the redirect', { 'content-type': 'text/html' })
|
||||
const req = { ...baseReq, path: '/alpha-product/foo' }
|
||||
const res = new MockExpressResponse()
|
||||
const next = jest.fn()
|
||||
await middleware(req, res, next)
|
||||
expect(mock.isDone()).toBe(true)
|
||||
expect(res._getString()).toBe('yay you survived the redirect')
|
||||
})
|
||||
|
||||
test('calls next() if no redirect is found', async () => {
|
||||
const req = { ...baseReq, path: '/en' }
|
||||
const res = new MockExpressResponse()
|
||||
const next = jest.fn()
|
||||
await middleware(req, res, next)
|
||||
expect(next).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
test('calls next() if proxy request respond with 404', async () => {
|
||||
const mock = nock('https://secret-website.com')
|
||||
.get('/beta-product/bar')
|
||||
.reply(404, 'no dice', { 'content-type': 'text/html' })
|
||||
const req = { ...baseReq, path: '/beta-product/bar' }
|
||||
const res = new MockExpressResponse()
|
||||
const next = jest.fn()
|
||||
await middleware(req, res, next)
|
||||
expect(mock.isDone()).toBe(true)
|
||||
expect(next).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
test('calls next() if proxy request responds with 500', async () => {
|
||||
const mock = nock('https://secret-website.com')
|
||||
.get('/beta-product/bar')
|
||||
.reply(500, 'no dice', { 'content-type': 'text/html' })
|
||||
const req = { ...baseReq, path: '/beta-product/bar' }
|
||||
const res = new MockExpressResponse()
|
||||
const next = jest.fn()
|
||||
await middleware(req, res, next)
|
||||
expect(mock.isDone()).toBe(true)
|
||||
expect(next).toHaveBeenCalled()
|
||||
})
|
||||
})
|
|
@ -12,22 +12,22 @@ describe('robots.txt', () => {
|
|||
let res, robots
|
||||
beforeAll(async (done) => {
|
||||
res = await get('/robots.txt')
|
||||
robots = robotsParser('https://help.github.com/robots.txt', res.text)
|
||||
robots = robotsParser('https://docs.github.com/robots.txt', res.text)
|
||||
done()
|
||||
})
|
||||
|
||||
it('allows indexing of the homepage and English content', async () => {
|
||||
expect(robots.isAllowed('https://help.github.com/')).toBe(true)
|
||||
expect(robots.isAllowed('https://help.github.com/en')).toBe(true)
|
||||
expect(robots.isAllowed('https://help.github.com/en/articles/verifying-your-email-address')).toBe(true)
|
||||
expect(robots.isAllowed('https://docs.github.com/')).toBe(true)
|
||||
expect(robots.isAllowed('https://docs.github.com/en')).toBe(true)
|
||||
expect(robots.isAllowed('https://docs.github.com/en/articles/verifying-your-email-address')).toBe(true)
|
||||
})
|
||||
|
||||
it('allows indexing of generally available localized content', async () => {
|
||||
Object.values(languages)
|
||||
.filter(language => !language.wip)
|
||||
.forEach(language => {
|
||||
expect(robots.isAllowed(`https://help.github.com/${language.code}`)).toBe(true)
|
||||
expect(robots.isAllowed(`https://help.github.com/${language.code}/articles/verifying-your-email-address`)).toBe(true)
|
||||
expect(robots.isAllowed(`https://docs.github.com/${language.code}`)).toBe(true)
|
||||
expect(robots.isAllowed(`https://docs.github.com/${language.code}/articles/verifying-your-email-address`)).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -35,8 +35,8 @@ describe('robots.txt', () => {
|
|||
Object.values(languages)
|
||||
.filter(language => language.wip)
|
||||
.forEach(language => {
|
||||
expect(robots.isAllowed(`https://help.github.com/${language.code}`)).toBe(false)
|
||||
expect(robots.isAllowed(`https://help.github.com/${language.code}/articles/verifying-your-email-address`)).toBe(false)
|
||||
expect(robots.isAllowed(`https://docs.github.com/${language.code}`)).toBe(false)
|
||||
expect(robots.isAllowed(`https://docs.github.com/${language.code}/articles/verifying-your-email-address`)).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -61,18 +61,18 @@ describe('robots.txt', () => {
|
|||
const { href } = products[id]
|
||||
const blockedPaths = [
|
||||
// English
|
||||
`https://help.github.com/en${href}`,
|
||||
`https://help.github.com/en${href}/overview`,
|
||||
`https://help.github.com/en${href}/overview/intro`,
|
||||
`https://help.github.com/en/enterprise/${enterpriseServerReleases.latest}/user${href}`,
|
||||
`https://help.github.com/en/enterprise/${enterpriseServerReleases.oldestSupported}/user${href}`,
|
||||
`https://docs.github.com/en${href}`,
|
||||
`https://docs.github.com/en${href}/overview`,
|
||||
`https://docs.github.com/en${href}/overview/intro`,
|
||||
`https://docs.github.com/en/enterprise/${enterpriseServerReleases.latest}/user${href}`,
|
||||
`https://docs.github.com/en/enterprise/${enterpriseServerReleases.oldestSupported}/user${href}`,
|
||||
|
||||
// Japanese
|
||||
`https://help.github.com/ja${href}`,
|
||||
`https://help.github.com/ja${href}/overview`,
|
||||
`https://help.github.com/ja${href}/overview/intro`,
|
||||
`https://help.github.com/ja/enterprise/${enterpriseServerReleases.latest}/user${href}`,
|
||||
`https://help.github.com/ja/enterprise/${enterpriseServerReleases.oldestSupported}/user${href}`
|
||||
`https://docs.github.com/ja${href}`,
|
||||
`https://docs.github.com/ja${href}/overview`,
|
||||
`https://docs.github.com/ja${href}/overview/intro`,
|
||||
`https://docs.github.com/ja/enterprise/${enterpriseServerReleases.latest}/user${href}`,
|
||||
`https://docs.github.com/ja/enterprise/${enterpriseServerReleases.oldestSupported}/user${href}`
|
||||
]
|
||||
|
||||
blockedPaths.forEach(path => {
|
||||
|
@ -81,28 +81,52 @@ describe('robots.txt', () => {
|
|||
})
|
||||
})
|
||||
|
||||
it('disallows indexing of early access "hidden" products', async () => {
|
||||
const hiddenProductIds = Object.values(products)
|
||||
.filter(product => product.hidden)
|
||||
.map(product => product.id)
|
||||
|
||||
hiddenProductIds.forEach(id => {
|
||||
const { versions } = products[id]
|
||||
const blockedPaths = versions.map(version => {
|
||||
return [
|
||||
// English
|
||||
`https://docs.github.com/en/${version}/${id}`,
|
||||
`https://docs.github.com/en/${version}/${id}/some-early-access-article`,
|
||||
// Japanese
|
||||
`https://docs.github.com/ja/${version}/${id}`,
|
||||
`https://docs.github.com/ja/${version}/${id}/some-early-access-article`
|
||||
]
|
||||
}).flat()
|
||||
|
||||
blockedPaths.forEach(path => {
|
||||
expect(robots.isAllowed(path)).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('allows indexing of non-WIP products', async () => {
|
||||
expect('actions' in products).toBe(true)
|
||||
expect(robots.isAllowed('https://help.github.com/en/actions')).toBe(true)
|
||||
expect(robots.isAllowed('https://help.github.com/en/actions/overview')).toBe(true)
|
||||
expect(robots.isAllowed('https://help.github.com/en/actions/overview/intro')).toBe(true)
|
||||
expect(robots.isAllowed(`https://help.github.com/en/enterprise/${enterpriseServerReleases.latest}/user/actions`)).toBe(true)
|
||||
expect(robots.isAllowed(`https://help.github.com/en/enterprise/${enterpriseServerReleases.oldestSupported}/user/actions`)).toBe(true)
|
||||
expect(robots.isAllowed('https://docs.github.com/en/actions')).toBe(true)
|
||||
expect(robots.isAllowed('https://docs.github.com/en/actions/overview')).toBe(true)
|
||||
expect(robots.isAllowed('https://docs.github.com/en/actions/overview/intro')).toBe(true)
|
||||
expect(robots.isAllowed(`https://docs.github.com/en/enterprise/${enterpriseServerReleases.latest}/user/actions`)).toBe(true)
|
||||
expect(robots.isAllowed(`https://docs.github.com/en/enterprise/${enterpriseServerReleases.oldestSupported}/user/actions`)).toBe(true)
|
||||
})
|
||||
|
||||
it('disallows indexing of deprecated enterprise releases', async () => {
|
||||
enterpriseServerReleases.deprecated.forEach(version => {
|
||||
const blockedPaths = [
|
||||
// English
|
||||
`https://help.github.com/en/enterprise-server@${version}/actions`,
|
||||
`https://help.github.com/en/enterprise/${version}/actions`,
|
||||
`https://help.github.com/en/enterprise-server@${version}/actions/overview`,
|
||||
`https://help.github.com/en/enterprise/${version}/actions/overview`,
|
||||
`https://docs.github.com/en/enterprise-server@${version}/actions`,
|
||||
`https://docs.github.com/en/enterprise/${version}/actions`,
|
||||
`https://docs.github.com/en/enterprise-server@${version}/actions/overview`,
|
||||
`https://docs.github.com/en/enterprise/${version}/actions/overview`,
|
||||
// Japanese
|
||||
`https://help.github.com/ja/enterprise-server@${version}/actions`,
|
||||
`https://help.github.com/ja/enterprise/${version}/actions`,
|
||||
`https://help.github.com/ja/enterprise-server@${version}/actions/overview`,
|
||||
`https://help.github.com/ja/enterprise/${version}/actions/overview`
|
||||
`https://docs.github.com/ja/enterprise-server@${version}/actions`,
|
||||
`https://docs.github.com/ja/enterprise/${version}/actions`,
|
||||
`https://docs.github.com/ja/enterprise-server@${version}/actions/overview`,
|
||||
`https://docs.github.com/ja/enterprise/${version}/actions/overview`
|
||||
]
|
||||
|
||||
blockedPaths.forEach(path => {
|
||||
|
|
|
@ -3,6 +3,7 @@ const enterpriseServerReleases = require('../../lib/enterprise-server-releases')
|
|||
const { get, getDOM, head } = require('../helpers/supertest')
|
||||
const path = require('path')
|
||||
const nonEnterpriseDefaultVersion = require('../../lib/non-enterprise-default-version')
|
||||
const loadPages = require('../../lib/pages')
|
||||
|
||||
describe('server', () => {
|
||||
jest.setTimeout(60 * 1000)
|
||||
|
@ -90,7 +91,7 @@ describe('server', () => {
|
|||
expect($.res.statusCode).toBe(400)
|
||||
})
|
||||
|
||||
// see https://github.com/github/docs-internal/issues/12427
|
||||
// see issue 12427
|
||||
test('renders a 404 for leading slashes', async () => {
|
||||
let $ = await getDOM('//foo.com/enterprise')
|
||||
expect($('h1').text()).toBe('Ooops!')
|
||||
|
@ -130,7 +131,7 @@ describe('server', () => {
|
|||
expect($('div.permissions-statement').text()).toContain('GitHub Pages site')
|
||||
})
|
||||
|
||||
// see https://github.com/github/docs-internal/issues/9678
|
||||
// see issue 9678
|
||||
test('does not use cached intros in map topics', async () => {
|
||||
let $ = await getDOM('/en/github/importing-your-projects-to-github/importing-a-git-repository-using-the-command-line')
|
||||
const articleIntro = $('.lead-mktg').text()
|
||||
|
@ -355,6 +356,46 @@ describe('server', () => {
|
|||
})
|
||||
})
|
||||
|
||||
describe.skip('Early Access articles', () => {
|
||||
let hiddenPageHrefs, hiddenPages
|
||||
|
||||
beforeAll(async (done) => {
|
||||
const $ = await getDOM('/early-access')
|
||||
hiddenPageHrefs = $('#article-contents ul > li > a').map((i, el) => $(el).attr('href')).get()
|
||||
|
||||
const allPages = await loadPages()
|
||||
hiddenPages = allPages.filter(page => page.languageCode === 'en' && page.hidden)
|
||||
|
||||
done()
|
||||
})
|
||||
|
||||
test('exist in the set of English pages', async () => {
|
||||
expect(hiddenPages.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
test('are listed at /early-access', async () => {
|
||||
expect(hiddenPageHrefs.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
test('are not listed at /early-access in production', async () => {
|
||||
const oldNodeEnv = process.env.NODE_ENV
|
||||
process.env.NODE_ENV = 'production'
|
||||
const res = await get('/early-access', { followRedirects: true })
|
||||
process.env.NODE_ENV = oldNodeEnv
|
||||
expect(res.statusCode).toBe(404)
|
||||
})
|
||||
|
||||
test('have noindex meta tags', async () => {
|
||||
const $ = await getDOM(hiddenPageHrefs[0])
|
||||
expect($('meta[content="noindex"]').length).toBe(1)
|
||||
})
|
||||
|
||||
test('public articles do not have noindex meta tags', async () => {
|
||||
const $ = await getDOM('/en/articles/set-up-git')
|
||||
expect($('meta[content="noindex"]').length).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('redirects', () => {
|
||||
test('redirects old articles to their English URL', async () => {
|
||||
const res = await get('/articles/deleting-a-team')
|
||||
|
|
|
@ -36,4 +36,9 @@ describe('sidebar', () => {
|
|||
expect($('.sidebar .is-current-page').length).toBe(1)
|
||||
expect($('.sidebar .is-current-page a').attr('href')).toContain(url)
|
||||
})
|
||||
|
||||
test('does not display Early Access as a product', async () => {
|
||||
expect($homePage('.sidebar li.sidebar-product[title*="Early"]').length).toBe(0)
|
||||
expect($homePage('.sidebar li.sidebar-product[title*="early"]').length).toBe(0)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
|
||||
const { GITHUB_ACTIONS, GITHUB_REPOSITORY } = process.env
|
||||
const runningActionsOnInternalRepo = GITHUB_ACTIONS === 'true' && GITHUB_REPOSITORY === 'github/docs-internal'
|
||||
const testViaActionsOnly = runningActionsOnInternalRepo ? test : test.skip
|
||||
|
||||
describe('cloning early-access', () => {
|
||||
testViaActionsOnly('the content directory exists', async () => {
|
||||
const eaContentDir = path.join(process.cwd(), 'content/early-access')
|
||||
expect(fs.existsSync(eaContentDir)).toBe(true)
|
||||
})
|
||||
|
||||
testViaActionsOnly('the data directory exists', async () => {
|
||||
const eaContentDir = path.join(process.cwd(), 'data/early-access')
|
||||
expect(fs.existsSync(eaContentDir)).toBe(true)
|
||||
})
|
||||
|
||||
testViaActionsOnly('the assets/images directory exists', async () => {
|
||||
const eaContentDir = path.join(process.cwd(), 'assets/images/early-access')
|
||||
expect(fs.existsSync(eaContentDir)).toBe(true)
|
||||
})
|
||||
})
|
|
@ -2,7 +2,6 @@
|
|||
title: API previews
|
||||
intro: You can use API previews to try out new features and provide feedback before these features become official.
|
||||
redirect_from:
|
||||
- /early-access/
|
||||
- /v3/previews
|
||||
versions:
|
||||
free-pro-team: '*'
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
title: Vistas previas de la API
|
||||
intro: Puedes utilizar las vistas previas de la API para probar características nuevas y proporcionar retroalimentación antes de que dichas características se hagan oficiales.
|
||||
redirect_from:
|
||||
- /early-access/
|
||||
- /v3/previews
|
||||
versions:
|
||||
free-pro-team: '*'
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
title: API プレビュー
|
||||
intro: API プレビューを使用して新機能を試し、これらの機能が正式なものになる前にフィードバックを提供できます。
|
||||
redirect_from:
|
||||
- /early-access/
|
||||
- /v3/previews
|
||||
versions:
|
||||
free-pro-team: '*'
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
title: API previews
|
||||
intro: You can use API previews to try out new features and provide feedback before these features become official.
|
||||
redirect_from:
|
||||
- /early-access/
|
||||
- /v3/previews
|
||||
versions:
|
||||
free-pro-team: '*'
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
title: Pré-visualizações da API
|
||||
intro: Você pode usar pré-visualizações da API para testar novos recursos e fornecer feedback antes que estes recursos se tornem oficiais.
|
||||
redirect_from:
|
||||
- /early-access/
|
||||
- /v3/previews
|
||||
versions:
|
||||
free-pro-team: '*'
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
title: API previews
|
||||
intro: You can use API previews to try out new features and provide feedback before these features become official.
|
||||
redirect_from:
|
||||
- /early-access/
|
||||
- /v3/previews
|
||||
versions:
|
||||
free-pro-team: '*'
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
title: API 预览
|
||||
intro: 您可以使用 API 预览来试用新功能并在这些功能正式发布之前提供反馈。
|
||||
redirect_from:
|
||||
- /early-access/
|
||||
- /v3/previews
|
||||
versions:
|
||||
free-pro-team: '*'
|
||||
|
|
Загрузка…
Ссылка в новой задаче