Merge branch 'main' into thejoebourneidentity-self-hosted-architectures

This commit is contained in:
Chiedo John 2021-02-01 10:09:21 -05:00 коммит произвёл GitHub
Родитель 13ed1531bc e12b83297c
Коммит 64d765f30a
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
173 изменённых файлов: 1432 добавлений и 551 удалений

2
.github/allowed-actions.js поставляемый
Просмотреть файл

@ -12,7 +12,7 @@ module.exports = [
'actions/setup-ruby@5f29a1cd8dfebf420691c4c9a0e832e2fae5a526', //actions/setup-ruby@v1.1.2
'actions/stale@af4072615903a8b031f986d25b1ae3bf45ec44d4', //actions/stale@v3.0.13
'crowdin/github-action@fd9429dd63d6c0f8a8cb4b93ad8076990bd6e688',
'crykn/copy_folder_to_another_repo_action@abc264e1c16eb3d7b1f7763bfdb0e1699ad43120',
'crykn/copy_folder_to_another_repo_action@0282e8b9fef06de92ddcae9fe6cb44df6226646c',
'cschleiden/actions-linter@43fd4e08e52ed40c0e2782dc2425694388851576',
'dawidd6/action-delete-branch@47743101a121ad657031e6704086271ca81b1911',
'docker://chinthakagodawita/autoupdate-action:v1',

57
.github/workflows/link-check-dotcom.yml поставляемый Normal file
Просмотреть файл

@ -0,0 +1,57 @@
name: 'Link Checker: Dotcom'
on:
workflow_dispatch:
push:
jobs:
see_if_should_skip:
continue-on-error: true
runs-on: ubuntu-latest
# Map a step output to a job output
outputs:
should_skip: ${{ steps.skip_check.outputs.should_skip }}
steps:
- id: skip_check
uses: fkirc/skip-duplicate-actions@36feb0d8d062137530c2e00bd278d138fe191289
with:
cancel_others: 'false'
github_token: ${{ github.token }}
paths: '[".github/workflows/link-check-dotcom.yml", "assets/**", "content/**", "data/**", "includes/**", "javascripts/**", "jest-puppeteer.config.js", "jest.config.js", "layouts/**", "lib/**", "middleware/**", "package-lock.json", "package.json", "server.js", "translations/**", "webpack.config.js"]'
build:
needs: see_if_should_skip
runs-on: ${{ fromJSON('["ubuntu-latest", "self-hosted"]')[github.repository == 'github/docs-internal'] }}
steps:
# Each of these ifs needs to be repeated at each step to make sure the required check still runs
# Even if if doesn't do anything
- if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
name: Checkout
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
name: Setup node
uses: actions/setup-node@c46424eee26de4078d34105d3de3cc4992202b1e
with:
node-version: 14.x
- if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
name: Install
run: npm ci
## TODO
# - if: ${{ github.repository == 'github/docs-internal' && needs.see_if_should_skip.outputs.should_skip != 'true' }}
# name: Clone early access
# run: npm run heroku-postbuild
# env:
# DOCUBOT_REPO_PAT: ${{ secrets.DOCUBOT_REPO_PAT }}
# GIT_BRANCH: ${{ github.ref }}
- if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
name: Build
run: npm run build
- if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
name: 'Link check: Dotcom'
env:
DOCS_VERSION: 'dotcom'
run: npm run link-check

Просмотреть файл

@ -1,4 +1,4 @@
name: Link Checker
name: 'Link Checker: GitHub AE'
on:
workflow_dispatch:
@ -17,7 +17,7 @@ jobs:
with:
cancel_others: 'false'
github_token: ${{ github.token }}
paths: '[".github/workflows/link-check-test.yml", "assets/**", "content/**", "data/**", "includes/**", "javascripts/**", "jest-puppeteer.config.js", "jest.config.js", "layouts/**", "lib/**", "middleware/**", "package-lock.json", "package.json", "server.js", "translations/**", "webpack.config.js"]'
paths: '[".github/workflows/link-check-ghae.yml", "assets/**", "content/**", "data/**", "includes/**", "javascripts/**", "jest-puppeteer.config.js", "jest.config.js", "layouts/**", "lib/**", "middleware/**", "package-lock.json", "package.json", "server.js", "translations/**", "webpack.config.js"]'
build:
needs: see_if_should_skip
runs-on: ${{ fromJSON('["ubuntu-latest", "self-hosted"]')[github.repository == 'github/docs-internal'] }}
@ -51,5 +51,7 @@ jobs:
run: npm run build
- if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
name: Link check
name: 'Link check: GitHub AE'
env:
DOCS_VERSION: 'github-ae'
run: npm run link-check

57
.github/workflows/link-check-ghes.yml поставляемый Normal file
Просмотреть файл

@ -0,0 +1,57 @@
name: 'Link Checker: Enterprise Server'
on:
workflow_dispatch:
push:
jobs:
see_if_should_skip:
continue-on-error: true
runs-on: ubuntu-latest
# Map a step output to a job output
outputs:
should_skip: ${{ steps.skip_check.outputs.should_skip }}
steps:
- id: skip_check
uses: fkirc/skip-duplicate-actions@36feb0d8d062137530c2e00bd278d138fe191289
with:
cancel_others: 'false'
github_token: ${{ github.token }}
paths: '[".github/workflows/link-check-ghes.yml", "assets/**", "content/**", "data/**", "includes/**", "javascripts/**", "jest-puppeteer.config.js", "jest.config.js", "layouts/**", "lib/**", "middleware/**", "package-lock.json", "package.json", "server.js", "translations/**", "webpack.config.js"]'
build:
needs: see_if_should_skip
runs-on: ${{ fromJSON('["ubuntu-latest", "self-hosted"]')[github.repository == 'github/docs-internal'] }}
steps:
# Each of these ifs needs to be repeated at each step to make sure the required check still runs
# Even if if doesn't do anything
- if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
name: Checkout
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
name: Setup node
uses: actions/setup-node@c46424eee26de4078d34105d3de3cc4992202b1e
with:
node-version: 14.x
- if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
name: Install
run: npm ci
## TODO
# - if: ${{ github.repository == 'github/docs-internal' && needs.see_if_should_skip.outputs.should_skip != 'true' }}
# name: Clone early access
# run: npm run heroku-postbuild
# env:
# DOCUBOT_REPO_PAT: ${{ secrets.DOCUBOT_REPO_PAT }}
# GIT_BRANCH: ${{ github.ref }}
- if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
name: Build
run: npm run build
- if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
name: 'Link check: Enterprise Server'
env:
DOCS_VERSION: 'enterprise-server'
run: npm run link-check

5
.github/workflows/site-policy-sync.yml поставляемый
Просмотреть файл

@ -29,13 +29,14 @@ jobs:
# Pushes to other repo
- name: Push folder to another repository
uses: crykn/copy_folder_to_another_repo_action@abc264e1c16eb3d7b1f7763bfdb0e1699ad43120
uses: crykn/copy_folder_to_another_repo_action@0282e8b9fef06de92ddcae9fe6cb44df6226646c
env:
API_TOKEN_GITHUB: ${{ secrets.API_TOKEN_SITEPOLICY }}
with:
source_folder: 'content/github/site-policy'
destination_repo: 'github/site-policy'
destination_branch: 'repo-sync'
destination_branch: 'main'
destination_branch_create: 'repo-sync'
destination_folder: 'Policies'
user_email: 'pcihon@users.noreply.github.com'
user_name: 'pcihon'

4
.github/workflows/test-translations.yml поставляемый
Просмотреть файл

@ -3,6 +3,7 @@
name: Node.js Tests - Translations
on:
workflow_dispatch:
schedule:
- cron: '10 20 * * *' # once a day at 20:10 UTC / 12:10 PST
@ -75,6 +76,9 @@ jobs:
- name: Install dependencies
run: npm ci
- name: Run build script
run: npm run build
- name: Run tests
run: npx jest tests/${{ matrix.test-group }}/
env:

Просмотреть файл

@ -78,25 +78,23 @@ jobs:
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
const changedFiles = ${{steps.filter.outputs.notAllowed_files}}
const restFiles = ${{steps.filter.outputs.openapi_files}}
const translationFiles = ${{steps.filter.outputs.translation_files}}
const markdownFiles = changedFiles.map(file => `- \`${file}\`\n`).join('')
constFilesArr = [
'translations/**',
'lib/rest/static/**',
'.github/workflows/**',
'.github/CODEOWNERS',
'translations/**',
'assets/fonts/**',
'data/graphql/**',
'lib/graphql/**',
'lib/redirects/**',
'lib/rest/**',
'lib/webhooks/**'
]
let reviewMessage = `👋 Hey there spelunker. It looks like you've modified some files that we can't accept as contributions.\n${markdownFiles}\n\nYou'll need to revert all of these ☝️ files using [GitHub Desktop](https://docs.github.com/en/free-pro-team@latest/desktop/contributing-and-collaborating-using-github-desktop/reverting-a-commit) or \`git checkout origin/main <file name>\`. Once you get those files reverted, we can continue with the review process. :octocat:`
const badFiles = badFilesArr.join('\n')
if (restFiles.length > 0) {
reviewMessage += "\n\nIt looks like you've modified the OpenAPI schema (`lib/rest/static/**`). While we aren't accepting changes to the schema directly, you can open an issue for any updates to the REST API docs. Head on over to the [`github/rest-api-description`](https://github.com/github/rest-api-description/issues/new?assignees=&labels=Inaccuracy&template=schema-inaccuracy.md&title=%5BSchema+Inaccuracy%5D+%3CDescribe+Problem%3E) repository to open an issue. ⚡"
}
if (translationFiles.length > 0) {
await github.issues.addLabels({
...context.repo,
issue_number: context.payload.number,
labels: ['localization']
})
reviewMessage += "\n\nIt looks like you've modified translated content. Unfortunately, we are not able to accept pull requests for translated content. Our translation process involves an integration with an external service at crowdin.com, where all translation activity happens. We hope to eventually open up the translation process to the open source community, but we're not there yet. See https://github.com/github/docs/blob/main/CONTRIBUTING.md#earth_asia-translations for more details."
}
let reviewMessage = `👋 Hey there spelunker. It looks like you've modified some files that we can't accept as contributions. The complete list of files we can't accept are:\n${badFiles}\n\nYou'll need to revert all of the files you changed in that list using [GitHub Desktop](https://docs.github.com/en/free-pro-team@latest/desktop/contributing-and-collaborating-using-github-desktop/reverting-a-commit) or \`git checkout origin/main <file name>\`. Once you get those files reverted, we can continue with the review process. :octocat:`
await github.pulls.createReview({
...context.repo,

Просмотреть файл

@ -11,6 +11,9 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'overview'
topics:
- 'Action development'
- 'Fundamentals'
---
{% data reusables.actions.enterprise-beta %}

Просмотреть файл

@ -6,6 +6,8 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'tutorial'
topics:
- 'Action development'
---
{% data reusables.actions.enterprise-beta %}
@ -32,7 +34,7 @@ Before you begin, you'll create a {% data variables.product.product_name %} repo
```
2. In the `hello-world-composite-run-steps-action` repository, create a new file called `goodbye.sh`, and add the following example code:
```bash
echo "Goodbye"
```
@ -65,12 +67,12 @@ Before you begin, you'll create a {% data variables.product.product_name %} repo
required: true
default: 'World'
outputs:
random-number:
random-number:
description: "Random number"
value: ${{ steps.random-number-generator.outputs.random-id }}
runs:
using: "composite"
steps:
steps:
- run: echo Hello ${{ inputs.who-to-greet }}.
shell: bash
- id: random-number-generator
@ -82,7 +84,7 @@ Before you begin, you'll create a {% data variables.product.product_name %} repo
{% endraw %}
This file defines the `who-to-greet` input, maps the random generated number to the `random-number` output variable, and runs the `goodbye.sh` script. It also tells the runner how to execute the composite run steps action.
For more information about managing outputs, see "[`outputs` for a composite run steps](/actions/creating-actions/metadata-syntax-for-github-actions#outputs-for-composite-run-steps-actions)".
For more information about managing outputs, see "[`outputs` for a composite run steps](/actions/creating-actions/metadata-syntax-for-github-actions#outputs-for-composite-run-steps-actions)".
For more information about how to use `github.action_path`, see "[`github context`](/actions/reference/context-and-expression-syntax-for-github-actions#github-context)".
@ -122,7 +124,7 @@ jobs:
uses: actions/hello-world-composite-run-steps-action@v1
with:
who-to-greet: 'Mona the Octocat'
- run: echo random-number ${{ steps.foo.outputs.random-number }}
- run: echo random-number ${{ steps.foo.outputs.random-number }}
shell: bash
```
{% endraw %}

Просмотреть файл

@ -11,6 +11,9 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'tutorial'
topics:
- 'Action development'
- 'Docker'
---
{% data reusables.actions.enterprise-beta %}
@ -93,7 +96,7 @@ This metadata defines one `who-to-greet` input and one `time` output parameter.
You can choose any base Docker image and, therefore, any language for your action. The following shell script example uses the `who-to-greet` input variable to print "Hello [who-to-greet]" in the log file.
Next, the script gets the current time and sets it as an output variable that actions running later in a job can use. In order for {% data variables.product.prodname_dotcom %} to recognize output variables, you must use a workflow command in a specific syntax: `echo "::set-output name=<output name>::<value>"`. For more information, see "[Workflow commands for {% data variables.product.prodname_actions %}](/actions/reference/workflow-commands-for-github-actions#setting-an-output-parameter)."
Next, the script gets the current time and sets it as an output variable that actions running later in a job can use. In order for {% data variables.product.prodname_dotcom %} to recognize output variables, you must use a workflow command in a specific syntax: `echo "::set-output name=<output name>::<value>"`. For more information, see "[Workflow commands for {% data variables.product.prodname_actions %}](/actions/reference/workflow-commands-for-github-actions#setting-an-output-parameter)."
1. Create a new `entrypoint.sh` file in the `hello-world-docker-action` directory.
@ -102,7 +105,7 @@ Next, the script gets the current time and sets it as an output variable that ac
**entrypoint.sh**
```shell{:copy}
#!/bin/sh -l
echo "Hello $1"
time=$(date)
echo "::set-output name=time::$time"

Просмотреть файл

@ -11,6 +11,9 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'tutorial'
topics:
- 'Action development'
- 'JavaScript'
---
{% data reusables.actions.enterprise-beta %}

Просмотреть файл

@ -11,11 +11,14 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'overview'
topics:
- 'CI'
- 'CD'
---
{% data reusables.actions.enterprise-beta %}
{% data reusables.actions.enterprise-github-hosted-runners %}
### About continuous integration
Continuous integration (CI) is a software practice that requires frequently committing code to a shared repository. Committing code more often detects errors sooner and reduces the amount of code a developer needs to debug when finding the source of an error. Frequent code updates also make it easier to merge changes from different members of a software development team. This is great for developers, who can spend more time writing code and less time debugging errors or resolving merge conflicts.

Просмотреть файл

@ -9,6 +9,8 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'overview'
topics:
- 'Packaging'
---
{% data reusables.actions.enterprise-beta %}

Просмотреть файл

@ -9,6 +9,9 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'overview'
topics:
- 'Containers'
- 'Docker'
---
{% data reusables.actions.enterprise-beta %}

Просмотреть файл

@ -8,6 +8,10 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'tutorial'
topics:
- 'CI'
- 'Java'
- 'Ant'
---
{% data reusables.actions.enterprise-beta %}

Просмотреть файл

@ -8,6 +8,10 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'tutorial'
topics:
- 'CI'
- 'Java'
- 'Gradle'
---
{% data reusables.actions.enterprise-beta %}

Просмотреть файл

@ -8,6 +8,10 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'tutorial'
topics:
- 'CI'
- 'Java'
- 'Maven'
---
{% data reusables.actions.enterprise-beta %}

Просмотреть файл

@ -9,6 +9,10 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'tutorial'
topics:
- 'CI'
- 'Node'
- 'JavaScript'
---
{% data reusables.actions.enterprise-beta %}

Просмотреть файл

@ -8,6 +8,9 @@ versions:
authors:
- potatoqualitee
type: 'tutorial'
topics:
- 'CI'
- 'Powershell'
---
{% data reusables.actions.enterprise-beta %}

Просмотреть файл

@ -8,6 +8,9 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'tutorial'
topics:
- 'CI'
- 'Python'
---
{% data reusables.actions.enterprise-beta %}

Просмотреть файл

@ -6,6 +6,9 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'tutorial'
topics:
- 'CI'
- 'Ruby'
---
{% data reusables.actions.enterprise-beta %}
@ -46,10 +49,7 @@ jobs:
steps:
- uses: actions/checkout@v2
- name: Set up Ruby
# To automatically get bug fixes and new Ruby versions for ruby/setup-ruby,
# change this to (see https://github.com/ruby/setup-ruby#versioning):
# uses: ruby/setup-ruby@v1
uses: ruby/setup-ruby@ec106b438a1ff6ff109590de34ddc62c540232e0
uses: ruby/setup-ruby@v1
with:
ruby-version: 2.6
- name: Install dependencies
@ -63,7 +63,7 @@ jobs:
The easiest way to specify a Ruby version is by using the `ruby/setup-ruby` action provided by the Ruby organization on GitHub. The action adds any supported Ruby version to `PATH` for each job run in a workflow. For more information see, the [`ruby/setup-ruby`](https://github.com/ruby/setup-ruby).
Using either Ruby's `ruby/setup-ruby` action or GitHub's `actions/setup-ruby` action is the recommended way of using Ruby with GitHub Actions because it ensures consistent behavior across different runners and different versions of Ruby.
Using Ruby's `ruby/setup-ruby` action is the recommended way of using Ruby with GitHub Actions because it ensures consistent behavior across different runners and different versions of Ruby.
The `setup-ruby` action takes a Ruby version as an input and configures that version on the runner.
@ -83,7 +83,7 @@ Alternatively, you can check a `.ruby-version` file into the root of your repos
### Testing with multiple versions of Ruby
You can add a matrix strategy to run your workflow with more than one version of Ruby. For example, you can test your code against the latest patch releases of versions 2.7, 2.6, and 2.5. The 'x' is a wildcard character that matches the latest patch release available for a version.
You can add a matrix strategy to run your workflow with more than one version of Ruby. For example, you can test your code against the latest patch releases of versions 2.7, 2.6, and 2.5. The 'x' is a wildcard character that matches the latest patch release available for a version.
{% raw %}
```yaml
@ -119,10 +119,7 @@ jobs:
steps:
- uses: actions/checkout@v2
- name: Set up Ruby ${{ matrix.ruby-version }}
# To automatically get bug fixes and new Ruby versions for ruby/setup-ruby,
# change this to (see https://github.com/ruby/setup-ruby#versioning):
# uses: ruby/setup-ruby@v1
uses: ruby/setup-ruby@ec106b438a1ff6ff109590de34ddc62c540232e0
uses: ruby/setup-ruby@v1
with:
ruby-version: ${{ matrix.ruby-version }}
- name: Install dependencies
@ -316,4 +313,3 @@ jobs:
GEM_HOST_API_KEY: "${{secrets.RUBYGEMS_AUTH_TOKEN}}"
```
{% endraw %}

Просмотреть файл

@ -10,6 +10,8 @@ redirect_from:
versions:
free-pro-team: '*'
type: 'tutorial'
topics:
- 'Workflows'
---
### About caching workflow dependencies

Просмотреть файл

@ -10,6 +10,9 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'tutorial'
topics:
- 'Containers'
- 'Docker'
---
{% data reusables.actions.enterprise-beta %}

Просмотреть файл

@ -10,6 +10,9 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'tutorial'
topics:
- 'Containers'
- 'Docker'
---
{% data reusables.actions.enterprise-beta %}

Просмотреть файл

@ -6,6 +6,10 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'tutorial'
topics:
- 'CD'
- 'Containers'
- 'Amazon ECS'
---
{% data reusables.actions.enterprise-beta %}

Просмотреть файл

@ -6,6 +6,10 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'tutorial'
topics:
- 'CD'
- 'Containers'
- 'Azure App Service'
---
{% data reusables.actions.enterprise-beta %}

Просмотреть файл

@ -6,6 +6,10 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'tutorial'
topics:
- 'CD'
- 'Containers'
- 'Google Kubernetes Engine'
---
{% data reusables.actions.enterprise-beta %}

Просмотреть файл

@ -25,6 +25,43 @@ learningTracks:
- hosting_your_own_runners
- create_actions
layout: product-sublanding
includeGuides:
- /actions/quickstart
- /actions/learn-github-actions/introduction-to-github-actions
- /actions/creating-actions/creating-a-docker-container-action
- /actions/guides/setting-up-continuous-integration-using-workflow-templates
- /actions/guides/building-and-testing-python
- /actions/guides/building-and-testing-nodejs
- /actions/guides/about-packaging-with-github-actions
- /actions/guides/publishing-docker-images
- /actions/guides/caching-dependencies-to-speed-up-workflows
- /actions/guides/about-continuous-integration
- /actions/guides/building-and-testing-powershell
- /actions/guides/building-and-testing-ruby
- /actions/guides/building-and-testing-java-with-maven
- /actions/guides/building-and-testing-java-with-gradle
- /actions/guides/building-and-testing-java-with-ant
- /actions/guides/publishing-nodejs-packages
- /actions/guides/publishing-java-packages-with-maven
- /actions/guides/publishing-java-packages-with-gradle
- /actions/guides/storing-workflow-data-as-artifacts
- /actions/guides/about-service-containers
- /actions/guides/creating-redis-service-containers
- /actions/guides/creating-postgresql-service-containers
- /actions/guides/deploying-to-amazon-elastic-container-service
- /actions/guides/deploying-to-azure-app-service
- /actions/guides/deploying-to-google-kubernetes-engine
- /actions/learn-github-actions/essential-features-of-github-actions
- /actions/learn-github-actions/security-hardening-for-github-actions
- /actions/creating-actions/about-actions
- /actions/creating-actions/creating-a-javascript-action
- /actions/creating-actions/creating-a-composite-run-steps-action
- /actions/learn-github-actions/migrating-from-azure-pipelines-to-github-actions
- /actions/learn-github-actions/migrating-from-circleci-to-github-actions
- /actions/learn-github-actions/migrating-from-gitlab-cicd-to-github-actions
- /actions/learn-github-actions/migrating-from-jenkins-to-github-actions
- /actions/learn-github-actions/migrating-from-travis-ci-to-github-actions
---
<!-- {% link_in_list /about-continuous-integration %} -->
<!-- {% link_in_list /setting-up-continuous-integration-using-workflow-templates %} -->

Просмотреть файл

@ -8,6 +8,10 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'tutorial'
topics:
- 'Packaging'
- 'Publishing'
- 'Docker'
---
{% data reusables.actions.enterprise-beta %}

Просмотреть файл

@ -8,6 +8,11 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'tutorial'
topics:
- 'Packaging'
- 'Publishing'
- 'Java'
- 'Gradle'
---
{% data reusables.actions.enterprise-beta %}
@ -148,7 +153,7 @@ jobs:
{% data reusables.github-actions.gradle-workflow-steps %}
1. Runs the `gradle publish` command to publish to {% data variables.product.prodname_registry %}. The `GITHUB_TOKEN` environment variable will be set with the content of the `GITHUB_TOKEN` secret.
For more information about using secrets in your workflow, see "[Creating and using encrypted secrets](/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets)."
### Publishing packages to the Maven Central Repository and {% data variables.product.prodname_registry %}

Просмотреть файл

@ -8,6 +8,11 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'tutorial'
topics:
- 'Packaging'
- 'Publishing'
- 'Java'
- 'Maven'
---
{% data reusables.actions.enterprise-beta %}
@ -155,7 +160,7 @@ This workflow performs the following steps:
1. Checks out a copy of project's repository.
1. Sets up the Java JDK, and also automatically configures the Maven _settings.xml_ file to add authentication for the `github` Maven repository to use the `GITHUB_TOKEN` environment variable.
1. {% data reusables.github-actions.publish-to-packages-workflow-step %}
For more information about using secrets in your workflow, see "[Creating and using encrypted secrets](/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets)."
### Publishing packages to the Maven Central Repository and {% data variables.product.prodname_registry %}

Просмотреть файл

@ -9,6 +9,11 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'tutorial'
topics:
- 'Packaging'
- 'Publishing'
- 'Node'
- 'JavaScript'
---
{% data reusables.actions.enterprise-beta %}
@ -167,7 +172,7 @@ jobs:
node-version: '12.x'
registry-url: 'https://registry.npmjs.org'
# Defaults to the user or organization that owns the workflow file
scope: '@octocat'
scope: '@octocat'
- run: yarn
- run: yarn publish
env:

Просмотреть файл

@ -12,6 +12,9 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'tutorial'
topics:
- 'Workflows'
- 'CI'
---
{% data reusables.actions.enterprise-beta %}

Просмотреть файл

@ -12,6 +12,8 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'tutorial'
topics:
- 'Workflows'
---
{% data reusables.actions.enterprise-beta %}
@ -19,7 +21,7 @@ type: 'tutorial'
### About workflow artifacts
Artifacts allow you to persist data after a job has completed, and share that data with another job in the same workflow. An artifact is a file or collection of files produced during a workflow run. For example, you can use artifacts to save your build and test output after a workflow run has ended.
Artifacts allow you to persist data after a job has completed, and share that data with another job in the same workflow. An artifact is a file or collection of files produced during a workflow run. For example, you can use artifacts to save your build and test output after a workflow run has ended.
{% data reusables.github-actions.artifact-log-retention-statement %} The retention period for a pull request restarts each time someone pushes a new commit to the pull request.
@ -128,7 +130,7 @@ The `retention-days` value cannot exceed the retention limit set by the reposito
### Downloading or deleting artifacts
During a workflow run, you can use the [`download-artifact`](https://github.com/actions/download-artifact)action to download artifacts that were previously uploaded in the same workflow run.
During a workflow run, you can use the [`download-artifact`](https://github.com/actions/download-artifact)action to download artifacts that were previously uploaded in the same workflow run.
After a workflow run has been completed, you can download or delete artifacts on {% data variables.product.prodname_dotcom %} or using the REST API. For more information, see "[Downloading workflow artifacts](/actions/managing-workflow-runs/downloading-workflow-artifacts)," "[Removing workflow artifacts](/actions/managing-workflow-runs/removing-workflow-artifacts)," and the "[Artifacts REST API](/rest/reference/actions#artifacts)."

Просмотреть файл

@ -6,6 +6,8 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'overview'
topics:
- 'Fundamentals'
---
{% data reusables.actions.enterprise-beta %}
@ -43,7 +45,7 @@ jobs:
- run: npm install -g bats
```
For example, to run a script as an action, you can store the script in your repository and supply the path and shell type.
For example, to run a script as an action, you can store the script in your repository and supply the path and shell type.
```yaml
jobs:

Просмотреть файл

@ -11,6 +11,8 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'how_to'
topics:
- 'Fundamentals'
---
{% data reusables.actions.enterprise-beta %}

Просмотреть файл

@ -10,6 +10,8 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'overview'
topics:
- 'Fundamentals'
---
{% data reusables.actions.enterprise-beta %}
@ -39,7 +41,7 @@ An event is a specific activity that triggers a workflow. For example, activity
#### Jobs
A job is a set of steps that execute on the same runner. By default, a workflow with multiple jobs will run those jobs in parallel. You can also configure a workflow to run jobs sequentially. For example, a workflow can have two sequential jobs that build and test code, where the test job is dependent on the status of the build job. If the build job fails, the test job will not run.
A job is a set of steps that execute on the same runner. By default, a workflow with multiple jobs will run those jobs in parallel. You can also configure a workflow to run jobs sequentially. For example, a workflow can have two sequential jobs that build and test code, where the test job is dependent on the status of the build job. If the build job fails, the test job will not run.
#### Steps
@ -180,7 +182,7 @@ To help you understand how YAML syntax is used to create a workflow file, this s
```
</td>
<td>
The <code>run</code> keyword tells the job to execute a command on the runner. In this case, you are using <code>npm</code> to install the <code>bats</code> software testing package.
The <code>run</code> keyword tells the job to execute a command on the runner. In this case, you are using <code>npm</code> to install the <code>bats</code> software testing package.
</td>
</tr>
<tr>

Просмотреть файл

@ -6,6 +6,8 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'how_to'
topics:
- 'Workflows'
---
{% data reusables.actions.enterprise-beta %}

Просмотреть файл

@ -7,6 +7,11 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'tutorial'
topics:
- 'Azure Pipelines'
- 'Migration'
- 'CI'
- 'CD'
---
{% data reusables.actions.enterprise-beta %}

Просмотреть файл

@ -7,6 +7,11 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'tutorial'
topics:
- 'CircleCI'
- 'Migration'
- 'CI'
- 'CD'
---
{% data reusables.actions.enterprise-beta %}

Просмотреть файл

@ -5,6 +5,11 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'tutorial'
topics:
- 'GitLab'
- 'Migration'
- 'CI'
- 'CD'
---
{% data reusables.actions.enterprise-beta %}

Просмотреть файл

@ -7,6 +7,11 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'tutorial'
topics:
- 'Jenkins'
- 'Migration'
- 'CI'
- 'CD'
---
{% data reusables.actions.enterprise-beta %}

Просмотреть файл

@ -7,6 +7,11 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'tutorial'
topics:
- 'Travis CI'
- 'Migration'
- 'CI'
- 'CD'
---
### Introduction

Просмотреть файл

@ -9,6 +9,8 @@ versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'overview'
topics:
- 'Security'
---
{% data reusables.actions.enterprise-beta %}
@ -22,7 +24,7 @@ This guide explains how to configure security hardening for certain {% data vari
Sensitive values should never be stored as plaintext in workflow files, but rather as secrets. [Secrets](/actions/configuring-and-managing-workflows/creating-and-storing-encrypted-secrets) can be configured at the organization{% if currentVersion == "free-pro-team@latest" or currentVersion ver_gt "enterprise-server@3.0" %}, repository, or environment{% else %} or repository{% endif %} level, and allow you to store sensitive information in {% data variables.product.product_name %}.
Secrets use [Libsodium sealed boxes](https://libsodium.gitbook.io/doc/public-key_cryptography/sealed_boxes), so that they are encrypted before reaching {% data variables.product.product_name %}. This occurs when the secret is submitted [using the UI](/actions/configuring-and-managing-workflows/creating-and-storing-encrypted-secrets#creating-encrypted-secrets-for-a-repository) or through the [REST API](/rest/reference/actions#secrets). This client-side encryption helps the minimize risks related to accidental logging (for example, exception logs and request logs, among others) within {% data variables.product.product_name %}'s infrastructure. Once the secret is uploaded, {% data variables.product.product_name %} is then able to decrypt it so that it can be injected into the workflow runtime.
Secrets use [Libsodium sealed boxes](https://libsodium.gitbook.io/doc/public-key_cryptography/sealed_boxes), so that they are encrypted before reaching {% data variables.product.product_name %}. This occurs when the secret is submitted [using the UI](/actions/configuring-and-managing-workflows/creating-and-storing-encrypted-secrets#creating-encrypted-secrets-for-a-repository) or through the [REST API](/rest/reference/actions#secrets). This client-side encryption helps the minimize risks related to accidental logging (for example, exception logs and request logs, among others) within {% data variables.product.product_name %}'s infrastructure. Once the secret is uploaded, {% data variables.product.product_name %} is then able to decrypt it so that it can be injected into the workflow runtime.
To help prevent accidental disclosure, {% data variables.product.product_name %} uses a mechanism that attempts to redact any secrets that appear in run logs. This redaction looks for exact matches of any configured secrets, as well as common encodings of the values, such as Base64. However, because there are multiple ways a secret value can be transformed, this redaction is not guaranteed. As a result, there are certain proactive steps and good practices you should follow to help ensure secrets are redacted, and to limit other risks associated with secrets:
@ -56,7 +58,7 @@ This means that a compromise of a single action within a workflow can be very si
{% if currentVersion ver_lt "enterprise-server@3.1" %}
{% warning %}
**Warning:** The short version of the commit SHA is insecure and should never be used for specifying an action's Git reference. Because of how repository networks work, any user can fork the repository and push a crafted commit to it that collides with the short SHA. This causes subsequent clones at that SHA to fail because it becomes an ambiguous commit. As a result, any workflows that use the shortened SHA will immediately fail.
{% endwarning %}
@ -104,7 +106,7 @@ As a result, self-hosted runners should almost [never be used for public reposit
When a self-hosted runner is defined at the organization or enterprise level, {% data variables.product.product_name %} can schedule workflows from multiple repositories onto the same runner. Consequently, a security compromise of these environments can result in a wide impact. To help reduce the scope of a compromise, you can create boundaries by organizing your self-hosted runners into separate groups. For more information, see "[Managing access to self-hosted runners using groups](/actions/hosting-your-own-runners/managing-access-to-self-hosted-runners-using-groups)."
You should also consider the environment of the self-hosted runner machines:
- What sensitive information resides on the machine configured as a self-hosted runner? For example, private SSH keys, API access tokens, among others.
- What sensitive information resides on the machine configured as a self-hosted runner? For example, private SSH keys, API access tokens, among others.
- Does the machine have network access to sensitive services? For example, Azure or AWS metadata services. The amount of sensitive information in this environment should be kept to a minimum, and you should always be mindful that any user capable of invoking workflows has access to this environment.
Some customers might attempt to partially mitigate these risks by implementing systems that automatically destroy the self-hosted runner after each job execution. However, this approach might not be as effective as intended, as there is no way to guarantee that a self-hosted runner only runs one job.
@ -116,7 +118,7 @@ You can use the audit log to monitor administrative tasks in an organization. Th
For example, you can use the audit log to track the `action:org.update_actions_secret` event, which tracks changes to organization secrets:
![Audit log entries](/assets/images/help/repository/audit-log-entries.png)
The following tables describe the {% data variables.product.prodname_actions %} events that you can find in the audit log. For more information on using the audit log, see
The following tables describe the {% data variables.product.prodname_actions %} events that you can find in the audit log. For more information on using the audit log, see
"[Reviewing the audit log for your organization](/github/setting-up-and-managing-organizations-and-teams/reviewing-the-audit-log-for-your-organization#searching-the-audit-log)."
#### Events for secret management
@ -134,14 +136,14 @@ The following tables describe the {% data variables.product.prodname_actions %}
|------------------|-------------------
| `action:org.register_self_hosted_runner` | Triggered when an organization owner [registers a new self-hosted runner](/actions/hosting-your-own-runners/adding-self-hosted-runners#adding-a-self-hosted-runner-to-an-organization).
| `action:org.remove_self_hosted_runner` | Triggered when an organization owner [removes a self-hosted runner](/actions/hosting-your-own-runners/removing-self-hosted-runners#removing-a-runner-from-an-organization).
| `action:repo.register_self_hosted_runner` | Triggered when a repository admin [registers a new self-hosted runner](/actions/hosting-your-own-runners/adding-self-hosted-runners#adding-a-self-hosted-runner-to-a-repository).
| `action:repo.remove_self_hosted_runner` | Triggered when a repository admin [removes a self-hosted runner](/actions/hosting-your-own-runners/removing-self-hosted-runners#removing-a-runner-from-a-repository).
| `action:repo.register_self_hosted_runner` | Triggered when a repository admin [registers a new self-hosted runner](/actions/hosting-your-own-runners/adding-self-hosted-runners#adding-a-self-hosted-runner-to-a-repository).
| `action:repo.remove_self_hosted_runner` | Triggered when a repository admin [removes a self-hosted runner](/actions/hosting-your-own-runners/removing-self-hosted-runners#removing-a-runner-from-a-repository).
#### Events for self-hosted runner groups
| Action | Description
|------------------|-------------------
| `action:org.runner_group_created` | Triggered when an organization admin [creates a self-hosted runner group](/actions/hosting-your-own-runners/managing-access-to-self-hosted-runners-using-groups#creating-a-self-hosted-runner-group-for-an-organization).
| `action:org.runner_group_removed` | Triggered when an organization admin removes a self-hosted runner group.
| `action:org.runner_group_renamed` | Triggered when an organization admin renames a self-hosted runner group.
| `action:org.runner_group_runners_added` | Triggered when an organization admin [adds a self-hosted runner to a group](/actions/hosting-your-own-runners/managing-access-to-self-hosted-runners-using-groups#moving-a-self-hosted-runner-to-a-group).
| `action:org.runner_group_runners_removed` | Triggered when an organization admin removes a self-hosted runner from a group.
| `action:org.runner_group_created` | Triggered when an organization admin [creates a self-hosted runner group](/actions/hosting-your-own-runners/managing-access-to-self-hosted-runners-using-groups#creating-a-self-hosted-runner-group-for-an-organization).
| `action:org.runner_group_removed` | Triggered when an organization admin removes a self-hosted runner group.
| `action:org.runner_group_renamed` | Triggered when an organization admin renames a self-hosted runner group.
| `action:org.runner_group_runners_added` | Triggered when an organization admin [adds a self-hosted runner to a group](/actions/hosting-your-own-runners/managing-access-to-self-hosted-runners-using-groups#moving-a-self-hosted-runner-to-a-group).
| `action:org.runner_group_runners_removed` | Triggered when an organization admin removes a self-hosted runner from a group.

Просмотреть файл

@ -7,6 +7,9 @@ redirect_from:
versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
type: 'quick_start'
topics:
- 'Fundamentals'
---
{% data reusables.actions.enterprise-beta %}

Просмотреть файл

@ -268,11 +268,19 @@ Creates or updates an environment variable for any actions running next in a job
#### Example
```bash
echo "action_state=yellow" >> $GITHUB_ENV
{% raw %}
```
Running `$action_state` in a future step will now return `yellow`
steps:
- name: Set the value
id: step_one
run: |
echo "action_state=yellow" >> $GITHUB_ENV
- name: Use the value
id: step_two
run: |
echo "${{ env.action_state }}" # This will output 'yellow'
```
{% endraw %}
#### Multiline strings

Просмотреть файл

@ -1199,7 +1199,7 @@ For more information about branch, tag, and path filter syntax, see "[`on.<push|
| `'**'` | Matches all branch and tag names. This is the default behavior when you don't use a `branches` or `tags` filter. | `all/the/branches`<br/><br/>`every/tag` |
| `'*feature'` | The `*` character is a special character in YAML. When you start a pattern with `*`, you must use quotes. | `mona-feature`<br/><br/>`feature`<br/><br/>`ver-10-feature` |
| `v2*` | Matches branch and tag names that start with `v2`. | `v2`<br/><br/>`v2.0`<br/><br/>`v2.9` |
| `v[12].[0-9]+.[0-9]+` | Matches all semantic versioning tags with major version 1 or 2 | `v1.10.1`<br/><br/>`v2.0.0` |
| `v[12].[0-9]+.[0-9]+` | Matches all semantic versioning branches and tags with major version 1 or 2 | `v1.10.1`<br/><br/>`v2.0.0` |
#### Patterns to match file paths

Просмотреть файл

@ -86,7 +86,7 @@ Name | Description
Name | Description
---------------------:| -------------------------------------------------------
`repo.access` | The visibility of a repository changed to private{% if enterpriseServerVersions contains currentVersion %}, public,{% endif %} or internal.
`repo.archive` | A repository was archived. For more information, see "[Archiving a {% data variables.product.prodname_dotcom %} repository](/github/creating-cloning-and-archiving-repositories/archiving-a-github-repository)."
`repo.archived` | A repository was archived. For more information, see "[Archiving a {% data variables.product.prodname_dotcom %} repository](/github/creating-cloning-and-archiving-repositories/archiving-a-github-repository)."
`repo.add_member` | A collaborator was added to a repository.
`repo.config` | A site admin blocked force pushes. For more information, see [Blocking force pushes to a repository](/enterprise/{{ currentVersion }}/admin/guides/developer-workflow/blocking-force-pushes-to-a-repository/) to a repository.
`repo.create` | A repository was created.
@ -95,7 +95,7 @@ Name | Description
`repo.rename` | A repository was renamed.
`repo.transfer` | A user accepted a request to receive a transferred repository.
`repo.transfer_start` | A user sent a request to transfer a repository to another user or organization.
`repo.unarchive` | A repository was unarchived. For more information, see "[Archiving a {% data variables.product.prodname_dotcom %} repository](/github/creating-cloning-and-archiving-repositories/archiving-a-github-repository)."{% if enterpriseServerVersions contains currentVersion %}
`repo.unarchived` | A repository was unarchived. For more information, see "[Archiving a {% data variables.product.prodname_dotcom %} repository](/github/creating-cloning-and-archiving-repositories/archiving-a-github-repository)."{% if enterpriseServerVersions contains currentVersion %}
`repo.config.disable_anonymous_git_access`| Anonymous Git read access is disabled for a repository. For more information, see "[Enabling anonymous Git read access for a repository](/enterprise/{{ currentVersion }}/user/articles/enabling-anonymous-git-read-access-for-a-repository)."
`repo.config.enable_anonymous_git_access` | Anonymous Git read access is enabled for a repository. For more information, see "[Enabling anonymous Git read access for a repository](/enterprise/{{ currentVersion }}/user/articles/enabling-anonymous-git-read-access-for-a-repository)."
`repo.config.lock_anonymous_git_access` | A repository's anonymous Git read access setting is locked, preventing repository administrators from changing (enabling or disabling) this setting. For more information, see "[Preventing users from changing anonymous Git read access](/enterprise/{{ currentVersion }}/admin/guides/user-management/preventing-users-from-changing-anonymous-git-read-access)."

Просмотреть файл

@ -15,9 +15,9 @@ versions:
{% data variables.product.prodname_dependabot %} takes the effort out of maintaining your dependencies. You can use it to ensure that your repository automatically keeps up with the latest releases of the packages and applications it depends on.
You enable {% data variables.product.prodname_dependabot_version_updates %} by checking a configuration file in to your repository. The configuration file specifies the location of the manifest, or other package definition files, stored in your repository. {% data variables.product.prodname_dependabot %} uses this information to check for outdated packages and applications. {% data variables.product.prodname_dependabot %} determines if there is a new version of a dependency by looking at the semantic versioning ([semver](https://semver.org/)) of the dependency to decide whether it should update to that version. For certain package managers, {% data variables.product.prodname_dependabot_version_updates %} also supports vendoring. Vendored (or cached) dependencies are dependencies that are checked in to a specific directory in a repository, rather than referenced in a manifest. Vendored dependencies are available at build time even if package servers are unavailable. {% data variables.product.prodname_dependabot_version_updates %} can be configured to check vendored dependencies for new versions and update them if necessary.
You enable {% data variables.product.prodname_dependabot_version_updates %} by checking a configuration file into your repository. The configuration file specifies the location of the manifest, or of other package definition files, stored in your repository. {% data variables.product.prodname_dependabot %} uses this information to check for outdated packages and applications. {% data variables.product.prodname_dependabot %} determines if there is a new version of a dependency by looking at the semantic versioning ([semver](https://semver.org/)) of the dependency to decide whether it should update to that version. For certain package managers, {% data variables.product.prodname_dependabot_version_updates %} also supports vendoring. Vendored (or cached) dependencies are dependencies that are checked in to a specific directory in a repository rather than referenced in a manifest. Vendored dependencies are available at build time even if package servers are unavailable. {% data variables.product.prodname_dependabot_version_updates %} can be configured to check vendored dependencies for new versions and update them if necessary.
When {% data variables.product.prodname_dependabot %} identifies an outdated dependency, it raises a pull request to update the manifest to the latest version of the dependency. For vendored dependencies, {% data variables.product.prodname_dependabot %} raises a pull request to directly replace the outdated dependency with the new version. You check that your tests pass, review the changelog and release notes included in the pull request summary, and then merge it. For more information, see "[Enabling and disabling version updates](/github/administering-a-repository/enabling-and-disabling-version-updates)."
When {% data variables.product.prodname_dependabot %} identifies an outdated dependency, it raises a pull request to update the manifest to the latest version of the dependency. For vendored dependencies, {% data variables.product.prodname_dependabot %} raises a pull request to replace the outdated dependency with the new version directly. You check that your tests pass, review the changelog and release notes included in the pull request summary, and then merge it. For more information, see "[Enabling and disabling version updates](/github/administering-a-repository/enabling-and-disabling-version-updates)."
If you enable security updates, {% data variables.product.prodname_dependabot %} also raises pull requests to update vulnerable dependencies. For more information, see "[About {% data variables.product.prodname_dependabot_security_updates %}](/github/managing-security-vulnerabilities/about-dependabot-security-updates)."

Просмотреть файл

@ -23,7 +23,7 @@ You can choose the default branch for a repository. The default branch is the ba
{% endnote %}
{% endif %}
{% if currentVersion == "free-pro-team@latest" %}
{% if currentVersion == "free-pro-team@latest" or currentVersion ver_gt "enterprise-server@3.0" %}
You can also rename the default branch. For more information, see "[Renaming a branch](/github/administering-a-repository/renaming-a-branch)."

Просмотреть файл

@ -4,6 +4,7 @@ intro: You can change the name of a branch in a repository.
permissions: People with write permissions to a repository can rename a branch in the repository. People with admin permissions can rename the default branch.
versions:
free-pro-team: '*'
enterprise-server: '>=3.1'
---
### About renaming branches
@ -32,5 +33,5 @@ From the local clone of the repository on a computer, run the following commands
```shell
$ git branch -m <em>OLD-BRANCH-NAME</em> <em>NEW-BRANCH-NAME</em>
$ git fetch origin
$ git branch -u origin/<em>NEW-DEFAULT-BRANCH-NAME</em> <em>NEW-DEFAULT-BRANCH-NAME</em>
$ git branch -u origin/<em>NEW-BRANCH-NAME</em> <em>NEW-BRANCH-NAME</em>
```

Просмотреть файл

@ -25,15 +25,13 @@ Familiarize yourself with Git by visiting the [official Git project site](https:
{% endif %}
Become better acquainted with {% data variables.product.product_name %} through our [bootcamp](/categories/bootcamp/) articles. See our [{% data variables.product.prodname_dotcom %} flow](https://guides.github.com/introduction/flow) for a process introduction. Refer to our [overview guides](https://guides.github.com) to walk through basic concepts.
Become better acquainted with {% data variables.product.product_name %} through our [getting started](/categories/getting-started-with-github/) articles. See our [{% data variables.product.prodname_dotcom %} flow](https://guides.github.com/introduction/flow) for a process introduction. Refer to our [overview guides](https://guides.github.com) to walk through basic concepts.
{% data reusables.support.ask-and-answer-forum %}
#### Branches, forks, and pull requests
Learn about [Git branching](http://learngitbranching.js.org/) using an interactive tool. Read about [forks](/articles/about-forks) and [pull requests](/articles/using-pull-requests) as well as [how we use pull requests](https://github.com/blog/1124-how-we-use-pull-requests-to-build-github) at {% data variables.product.prodname_dotcom %}.
Access quick references about the [command line](https://hub.github.com) as well as {% data variables.product.prodname_dotcom %} [checklists, cheat sheets, and more](https://services.github.com/on-demand/resources).
Learn about [Git branching](http://learngitbranching.js.org/) using an interactive tool. Read about [forks](/articles/about-forks) and [pull requests](/articles/using-pull-requests) as well as [how we use pull requests](https://github.com/blog/1124-how-we-use-pull-requests-to-build-github) at {% data variables.product.prodname_dotcom %}. Access references about using {% data variables.product.prodname_dotcom %} from the [command line](https://cli.github.com/).
#### Tune in
@ -41,9 +39,9 @@ Our {% data variables.product.prodname_dotcom %} [YouTube Training and Guides ch
### Training
#### Free classes
#### Free courses
{% data variables.product.product_name %} offers a series of interactive, [on-demand training courses](https://services.github.com/on-demand/) including {% data variables.product.prodname_dotcom %} 101: [Introduction](https://services.github.com/on-demand/intro-to-github/), {% data variables.product.prodname_dotcom %} 102: [GitHub Desktop](https://services.github.com/on-demand/github-desktop), and {% data variables.product.prodname_dotcom %} 103: [Command Line](https://services.github.com/on-demand/github-cli).
{% data variables.product.product_name %} offers a series of interactive, [on-demand training courses](https://lab.github.com/) including [Introduction to {% data variables.product.prodname_dotcom %}](https://lab.github.com/githubtraining/introduction-to-github); courses on programming languages and tools such as HTML, Python, and NodeJS; and courses on {% data variables.product.product_name %} specific tools such as {% data variables.product.prodname_actions %}.
#### {% data variables.product.prodname_dotcom %}'s web-based educational programs

Просмотреть файл

@ -11,7 +11,7 @@ versions:
To transfer an open issue to another repository, you must have write permissions on the repository the issue is in and the repository you're transferring the issue to. For more information, see "[Repository permission levels for an organization](/articles/repository-permission-levels-for-an-organization)."
You can only transfer issues between repositories owned by the same user or organization account.{% if currentVersion == "free-pro-team@latest" or enterpriseServerVersions contains currentVersion %}You can't transfer an issue from a private repository to a public repository.{% endif %}
You can only transfer issues between repositories owned by the same user or organization account. {% if currentVersion == "free-pro-team@latest" or enterpriseServerVersions contains currentVersion %}You can't transfer an issue from a private repository to a public repository.{% endif %}
When you transfer an issue, comments and assignees are retained. The issue's labels and milestones are not retained. This issue will stay on any user-owned or organization-wide project boards and be removed from any repository project boards. For more information, see "[About project boards](/articles/about-project-boards)."

Просмотреть файл

@ -108,7 +108,7 @@ In addition to managing organization-level settings, organization owners have ad
| Make a repository a template (see "[Creating a template repository](/articles/creating-a-template-repository)") | | | | | **X** |
| Change a repository's settings | | | | | **X** |
| Manage team and collaborator access to the repository | | | | | **X** |
| Edit the repository's default branch | | | | | **X** |{% if currentVersion == "free-pro-team@latest" %}
| Edit the repository's default branch | | | | | **X** |{% if currentVersion == "free-pro-team@latest" or currentVersion ver_gt "enterprise-server@3.0" %}
| Rename the repository's default branch (see "[Renaming a branch](/github/administering-a-repository/renaming-a-branch)") | | | | | **X** |
| Rename a branch other than the repository's default branch (see "[Renaming a branch](/github/administering-a-repository/renaming-a-branch)") | | | **X** | **X** | **X** |{% endif %}
| Manage webhooks and deploy keys | | | | | **X** |{% if currentVersion == "free-pro-team@latest" %}

Просмотреть файл

@ -140,7 +140,7 @@ For example, you can make a GraphQL request to see all the new organization memb
{% note %}
**Note**: The audit log REST API is available as a limited beta for users of {% data variables.product.prodname_ghe_cloud %} only. To join the beta, talk to your services or sales contact at {% data variables.product.company_short %}.
**Note:** The audit log REST API is available as a public beta for users of {% data variables.product.prodname_ghe_cloud %} only.
{% endnote %}
@ -249,7 +249,7 @@ An overview of some of the most common actions that are recorded as events in th
{% note %}
**Note:** To access Git events in the audit log, you must use the audit log REST API. This functionality is available as a limited beta for users of {% data variables.product.prodname_ghe_cloud %} only. To join the beta, talk to your services or sales contact at {% data variables.product.company_short %}.
**Note:** To access Git events in the audit log, you must use the audit log REST API. This functionality is available as a public beta for users of {% data variables.product.prodname_ghe_cloud %} only.
For more information about the audit log REST API, see "[Organizations](/rest/reference/orgs#get-the-audit-log-for-an-organization)" in the REST API documentation.

Просмотреть файл

@ -29,7 +29,7 @@ The repository owner has full control of the repository. In addition to the acti
| :- | :- |
| {% if currentVersion == "free-pro-team@latest" %}Invite collaborators{% else %}Add collaborators{% endif %} | "[Inviting collaborators to a personal repository](/github/setting-up-and-managing-your-github-user-account/inviting-collaborators-to-a-personal-repository)" |
| Change the visibility of the repository | "[Setting repository visibility](/github/administering-a-repository/setting-repository-visibility)" |{% if currentVersion == "free-pro-team@latest" %}
| Limit interactions with the repository | "[Limiting interactions in your repository](/github/building-a-strong-community/limiting-interactions-in-your-repository)" |{% endif %}{% if currentVersion == "free-pro-team@latest" %}
| Limit interactions with the repository | "[Limiting interactions in your repository](/github/building-a-strong-community/limiting-interactions-in-your-repository)" |{% endif %}{% if currentVersion == "free-pro-team@latest" or currentVersion ver_gt "enterprise-server@3.0" %}
| Rename a branch, including the default branch | "[Renaming a branch](/github/administering-a-repository/renaming-a-branch)" |{% endif %}
| Merge a pull request on a protected branch, even if there are no approving reviews | "[About protected branches](/github/administering-a-repository/about-protected-branches)" |
| Delete the repository | "[Deleting a repository](/github/administering-a-repository/deleting-a-repository)" |
@ -62,7 +62,7 @@ Collaborators can also perform the following actions.
| Action | More information |
| :- | :- |
| Fork the repository | "[About forks](/github/collaborating-with-issues-and-pull-requests/about-forks)" |{% if currentVersion == "free-pro-team@latest" %}
| Fork the repository | "[About forks](/github/collaborating-with-issues-and-pull-requests/about-forks)" |{% if currentVersion == "free-pro-team@latest" or currentVersion ver_gt "enterprise-server@3.0" %}
| Rename a branch other than the default branch | "[Renaming a branch](/github/administering-a-repository/renaming-a-branch)" |{% endif %}
| Create, edit, and delete comments on commits, pull requests, and issues in the repository | <ul><li>"[About issues](/github/managing-your-work-on-github/about-issues)"</li><li>"[Commenting on a pull request](/github/collaborating-with-issues-and-pull-requests/commenting-on-a-pull-request)"</li><li>"[Managing disruptive comments](/github/building-a-strong-community/managing-disruptive-comments)"</li></ul> |
| Create, assign, close, and re-open issues in the repository | "[Managing your work with issues](/github/managing-your-work-on-github/managing-your-work-with-issues)" |

Просмотреть файл

@ -11,7 +11,7 @@ versions:
free-pro-team: '*'
---
_These terms apply to Customers who licensed the Products prior to January 4, 2021._
_These terms apply to Customers who licensed the Products prior to January 4, 2021. Customers who purchase GitHub Products after that date are directed to https://www.github.com/enterprise-legal for current terms._
{% tip %}

Просмотреть файл

@ -8,7 +8,7 @@ redirect_from:
- /github/site-policy/github-ae-data-protection-agreement
---
_These terms apply to Customers who licensed the Products prior to January 4, 2021._
_These terms apply to Customers who licensed the Products prior to January 4, 2021. Customers who purchase GitHub Products after that date are directed to https://www.github.com/enterprise-legal for current terms._
## INTRODUCTION

Просмотреть файл

@ -1,26 +1,26 @@
---
title: GitHub AE Product Specific Terms
hidden: true
versions:
versions:
free-pro-team: '*'
redirect_from:
- /github/site-policy/ghem-supplemental-terms-for-microsoft-volume-licensing
- /github/site-policy/github-ae-product-specific-terms
---
_These terms apply to Customers who licensed the Products prior to January 4, 2021._
_These terms apply to Customers who licensed the Products prior to January 4, 2021. Customers who purchase GitHub Products after that date are directed to https://www.github.com/enterprise-legal for current terms._
The Agreement consists of these GitHub AE Product Specific Terms, the General Terms that Customer accepted, and any additional terms GitHub or its Affiliates present when an order is placed.
The Agreement consists of these GitHub AE Product Specific Terms, the General Terms that Customer accepted, and any additional terms GitHub or its Affiliates present when an order is placed.
### 1. Accounts.
### 1. Accounts.
**Account Responsibility.** Customer controls and is responsible for End User accounts and Content.
**Account Responsibility.** Customer controls and is responsible for End User accounts and Content.
**Account Security.** Customer is responsible for maintaining the security of its account login credentials.
**Account Security.** Customer is responsible for maintaining the security of its account login credentials.
**Use Policies.** Customers End Users must comply with the Acceptable Use Policy.
**Use Policies.** Customers End Users must comply with the Acceptable Use Policy.
**Suspension.** GitHub may suspend use of the Online Service during any period of Customers material breach.
**Suspension.** GitHub may suspend use of the Online Service during any period of Customers material breach.
**Access.** GitHub does not access Customer Content unless required for support matters or security purposes.
@ -30,18 +30,18 @@ The Agreement consists of these GitHub AE Product Specific Terms, the General Te
**Ownership of Content.** Customer owns Content it creates and will fully comply with any third-party licenses relating to Content that Customer posts.
**License Grant to GitHub.** Unless Customer Content comes with a separate license granting GitHub the rights it needs to run the Online Service, Customer grants to GitHub the right to use Customer Content and make incidental copies as necessary to provide the Online Service or support, or for security reasons. In addition, GitHub may be compelled by law to disclose Customer Content.
**License Grant to GitHub.** Unless Customer Content comes with a separate license granting GitHub the rights it needs to run the Online Service, Customer grants to GitHub the right to use Customer Content and make incidental copies as necessary to provide the Online Service or support, or for security reasons. In addition, GitHub may be compelled by law to disclose Customer Content.
### 3. Non-GitHub Products.
GitHub may make non-GitHub products available through the Online Service. If Customer uses any non-GitHub products with the Online Service, Customer may not do so in any way that would subject GitHubs intellectual property to obligations beyond those expressly included in the Agreement. GitHub assumes no responsibility or liability for any non-GitHub products. Customers use of non-GitHub products is governed by the terms between Customer and the publisher of the non-GitHub products (if any).
### 4. Support and SLA.
The Online Service includes Support and the SLA.
The Online Service includes Support and the SLA.
### 5. Data Protection and Security.
The terms of the Data Protection Agreement apply to the Online Service.
### 6. Notices.
### 6. Notices.
Notices to GitHub must be sent to: GitHub, Inc. Attn: Legal Dept., 88 Colin P. Kelly St, San Francisco, CA 94107 USA.
### 7. Definitions.
@ -51,7 +51,7 @@ Notices to GitHub must be sent to: GitHub, Inc. Attn: Legal Dept., 88 Colin P. K
“**Customer Content**” means Content that Customer creates, owns, or to which Customer holds the rights.
“**Data Protection Agreement**” means the GitHub AE Data Protection Agreement, available on https://docs.github.com/github/site-policy.
“**Data Protection Agreement**” means the GitHub AE Data Protection Agreement, available on https://docs.github.com/github/site-policy.
“**Digital Millennium Copyright Act Takedown Policy**” means GitHubs process for handling notices of copyright infringement, available on https://docs.github.com/github/site-policy.

Просмотреть файл

@ -9,7 +9,7 @@ versions:
free-pro-team: '*'
---
_These terms apply to Customers who licensed the Products prior to January 4, 2021._
_These terms apply to Customers who licensed the Products prior to January 4, 2021. Customers who purchase GitHub Products after that date are directed to https://www.github.com/enterprise-legal for current terms._
PLEASE READ THIS ADDENDUM CAREFULLY AS IT AMENDS THE TERMS OF YOUR ENTERPRISE LICENSE AGREEMENT WITH US AND SPECIFICALLY GOVERNS YOUR USE OF GITHUB CONNECT (AS DEFINED BELOW), UNLESS GITHUB (“WE” OR “US”) HAS EXECUTED A SEPARATE WRITTEN ADDENDUM WITH YOU FOR THAT PURPOSE.

Просмотреть файл

@ -7,11 +7,11 @@ versions:
free-pro-team: '*'
---
_These terms apply to Customers who licensed the Products prior to January 4, 2021._
_These terms apply to Customers who licensed the Products prior to January 4, 2021. Customers who purchase GitHub Products after that date are directed to https://www.github.com/enterprise-legal for current terms._
## INTRODUCTION
The parties agree that the GitHub Data Protection and Security Exhibit (together, the **"Data Protection Addendum"** or **“DPA”**) sets forth their obligations with respect to the processing and security of Customer Personal Data in connection with the GitHub Enterprise Cloud hosted service (the **“Service”**). GitHub makes the commitments in this DPA to all customers using the Service. Separate terms, including different privacy and security terms, govern Customers use of non-GitHub products.
The parties agree that the GitHub Data Protection and Security Exhibit (together, the **"Data Protection Addendum"** or **“DPA”**) sets forth their obligations with respect to the processing and security of Customer Personal Data in connection with the GitHub Enterprise Cloud hosted service (the **“Service”**). GitHub makes the commitments in this DPA to all customers using the Service. Separate terms, including different privacy and security terms, govern Customers use of non-GitHub products.
In the event of any conflict or inconsistency between the DPA and any other terms in Customers agreements with GitHub, the DPA shall prevail. The provisions of the DPA supersede any conflicting provisions of the GitHub Privacy Statement that otherwise may apply to processing of Customer Personal Data as defined herein. For clarity, consistent with Clause 10 of the Standard Contractual Clauses in Attachment 1, the Standard Contractual Clauses prevail over any other terms in the DPA.
@ -25,7 +25,7 @@ In the event of any conflict or inconsistency between the DPA and any other term
a. The EU General Data Protection Regulation 2016/679 (**"GDPR"**), along with any implementing or corresponding equivalent national laws or regulations, once in effect and applicable; and
b. The California Consumer Privacy Act of 2018, Cal. Civ. Code §§1798.100 et seq. (**"CCPA"**); and
c. The UK Data Protection Act 2018 and implementation of GDPR contained therein.
1.2 "**Controller**," "**Data Subject**," "**Member State**," "**Personal Data**," "**Personal Data Breach**," "**Processing**," "**Processor**," and "**Supervisory Authority**" have the meanings given to them in the Applicable Data Protection Laws. In the event of a conflict, the meanings given in the GDPR will supersede.
@ -46,10 +46,10 @@ In the event of any conflict or inconsistency between the DPA and any other term
### 2. Status and Compliance.
#### 2.1 Data Processing.
#### 2.1 Data Processing.
GitHub acts as a Processor in regard to any Customer Personal Data it receives in connection with the Agreement, and GitHub will process Customer Personal Data only for Permitted Purposes in accordance with Customer's instructions as represented by the Agreement and other written communications. In the event that GitHub is unable to comply with Customer's instructions, such as due to conflicts with the Applicable Data Protection Laws, or where processing is required by the Applicable Data Protection Laws or other legal requirements, GitHub will notify Customer to the extent permissible. GitHub processes all Customer Personal Data in the United States or in the European Union; however, GitHub's subprocessors may process data outside of the United States or the European Union. Additionally, GitHub acts as a Processor for any Customer Repository Data.
#### 2.2 Data Controllers.
#### 2.2 Data Controllers.
GitHub receives Customer Personal Data both from Customer and directly from Data Subjects who create End User accounts. Customer is a Controller only for the Customer Personal Data it transfers directly to GitHub.
#### 2.3 GitHub Compliance; Data Transfers.
@ -152,7 +152,7 @@ d. GitHub is no longer carrying on business, is dissolved, enters receivership,
e. Customer objects to a subprocessor pursuant to Section 6.5, and GitHub has not been able to provide an alternative solution within ninety days.
#### 7.3 Breach.
#### 7.3 Breach.
Failure to comply with the material provisions of this Addendum is considered a material breach under the Agreement.
#### 7.4 Failure to perform.
@ -179,137 +179,137 @@ c. provide Customer with reasonable assurance that GitHub has complied with its
Except as limited by the Applicable Data Protection Laws, any claims brought under this Addendum will be subject to the terms of the Agreement regarding Limitations of Liability.
## Attachment 1 – The Standard Contractual Clauses (Processors)
Execution of the applicable agreement by Customer includes execution of this Attachment 1 to the GitHub Data Protection Addendum, which is countersigned by GitHub, Inc.
Execution of the applicable agreement by Customer includes execution of this Attachment 1 to the GitHub Data Protection Addendum, which is countersigned by GitHub, Inc.
In countries where regulatory approval is required for use of the Standard Contractual Clauses, the Standard Contractual Clauses cannot be relied upon under European Commission 2010/87/EU (of February 2010) to legitimize export of data from the country, unless Customer has the required regulatory approval.
For the purposes of Article 46(2) of the General Data Protection Regulation (EU 2016/679) for the transfer of personal data to processors established in third countries which do not ensure an adequate level of data protection, Customer (as data exporter) and GitHub (as data importer, whose signature appears below), each a “party,” together “the parties,” have agreed on the following Contractual Clauses (the “Clauses” or “Standard Contractual Clauses”) in order to adduce adequate safeguards with respect to the protection of privacy and fundamental rights and freedoms of individuals for the transfer by the data exporter to the data importer of the personal data specified in Appendix 1.
#### Clause 1: Definitions
(a) 'personal data', 'special categories of data', 'process/processing', 'controller', 'processor', 'data subject' and 'supervisory authority' shall have the same meaning as in the General Data Protection Regulation (EU 2016/679) on the protection of individuals with regard to the processing of personal data and on the free movement of such data;
(a) 'personal data', 'special categories of data', 'process/processing', 'controller', 'processor', 'data subject' and 'supervisory authority' shall have the same meaning as in the General Data Protection Regulation (EU 2016/679) on the protection of individuals with regard to the processing of personal data and on the free movement of such data;
(b) 'the data exporter' means the controller who transfers the personal data;
(b) 'the data exporter' means the controller who transfers the personal data;
(c) 'the data importer' means the processor who agrees to receive from the data exporter personal data intended for processing on his behalf after the transfer in accordance with his instructions and the terms of the Clauses and who is not subject to a third country's system ensuring adequate protection within the meaning of Article 45(2) of the General Data Protection Regulation (EU 2016/679);
(c) 'the data importer' means the processor who agrees to receive from the data exporter personal data intended for processing on his behalf after the transfer in accordance with his instructions and the terms of the Clauses and who is not subject to a third country's system ensuring adequate protection within the meaning of Article 45(2) of the General Data Protection Regulation (EU 2016/679);
(d) 'the subprocessor' means any processor engaged by the data importer or by any other subprocessor of the data importer who agrees to receive from the data importer or from any other subprocessor of the data importer personal data exclusively intended for processing activities to be carried out on behalf of the data exporter after the transfer in accordance with his instructions, the terms of the Clauses and the terms of the written subcontract;
(d) 'the subprocessor' means any processor engaged by the data importer or by any other subprocessor of the data importer who agrees to receive from the data importer or from any other subprocessor of the data importer personal data exclusively intended for processing activities to be carried out on behalf of the data exporter after the transfer in accordance with his instructions, the terms of the Clauses and the terms of the written subcontract;
(e) 'the applicable data protection law' means the legislation protecting the fundamental rights and freedoms of individuals and, in particular, their right to privacy with respect to the processing of personal data applicable to a data controller in the Member State in which the data exporter is established;
(e) 'the applicable data protection law' means the legislation protecting the fundamental rights and freedoms of individuals and, in particular, their right to privacy with respect to the processing of personal data applicable to a data controller in the Member State in which the data exporter is established;
(f) 'technical and organisational security measures' means those measures aimed at protecting personal data against accidental or unlawful destruction or accidental loss, alteration, unauthorised disclosure or access, in particular where the processing involves the transmission of data over a network, and against all other unlawful forms of processing.
(f) 'technical and organisational security measures' means those measures aimed at protecting personal data against accidental or unlawful destruction or accidental loss, alteration, unauthorised disclosure or access, in particular where the processing involves the transmission of data over a network, and against all other unlawful forms of processing.
#### Clause 2: Details of the transfer
The details of the transfer and in particular the special categories of personal data where applicable are specified in Appendix 1 below which forms an integral part of the Clauses.
#### Clause 3: Third-party beneficiary clause
1. The data subject can enforce against the data exporter this Clause, Clause 4(b) to (i), Clause 5(a) to (e), and (g) to (j), Clause 6(1) and (2), Clause 7, Clause 8(2), and Clauses 9 to 12 as third-party beneficiary.
1. The data subject can enforce against the data exporter this Clause, Clause 4(b) to (i), Clause 5(a) to (e), and (g) to (j), Clause 6(1) and (2), Clause 7, Clause 8(2), and Clauses 9 to 12 as third-party beneficiary.
2. The data subject can enforce against the data importer this Clause, Clause 5(a) to (e) and (g), Clause 6, Clause 7, Clause 8(2), and Clauses 9 to 12, in cases where the data exporter has factually disappeared or has ceased to exist in law unless any successor entity has assumed the entire legal obligations of the data exporter by contract or by operation of law, as a result of which it takes on the rights and obligations of the data exporter, in which case the data subject can enforce them against such entity.
2. The data subject can enforce against the data importer this Clause, Clause 5(a) to (e) and (g), Clause 6, Clause 7, Clause 8(2), and Clauses 9 to 12, in cases where the data exporter has factually disappeared or has ceased to exist in law unless any successor entity has assumed the entire legal obligations of the data exporter by contract or by operation of law, as a result of which it takes on the rights and obligations of the data exporter, in which case the data subject can enforce them against such entity.
3. The data subject can enforce against the subprocessor this Clause, Clause 5(a) to (e) and (g), Clause 6, Clause 7, Clause 8(2), and Clauses 9 to 12, in cases where both the data exporter and the data importer have factually disappeared or ceased to exist in law or have become insolvent, unless any successor entity has assumed the entire legal obligations of the data exporter by contract or by operation of law as a result of which it takes on the rights and obligations of the data exporter, in which case the data subject can enforce them against such entity. Such third-party liability of the subprocessor shall be limited to its own processing operations under the Clauses.
3. The data subject can enforce against the subprocessor this Clause, Clause 5(a) to (e) and (g), Clause 6, Clause 7, Clause 8(2), and Clauses 9 to 12, in cases where both the data exporter and the data importer have factually disappeared or ceased to exist in law or have become insolvent, unless any successor entity has assumed the entire legal obligations of the data exporter by contract or by operation of law as a result of which it takes on the rights and obligations of the data exporter, in which case the data subject can enforce them against such entity. Such third-party liability of the subprocessor shall be limited to its own processing operations under the Clauses.
4. The parties do not object to a data subject being represented by an association or other body if the data subject so expressly wishes and if permitted by national law.
4. The parties do not object to a data subject being represented by an association or other body if the data subject so expressly wishes and if permitted by national law.
#### Clause 4: Obligations of the data exporter
The data exporter agrees and warrants:
The data exporter agrees and warrants:
(a) that the processing, including the transfer itself, of the personal data has been and will continue to be carried out in accordance with the relevant provisions of the applicable data protection law (and, where applicable, has been notified to the relevant authorities of the Member State where the data exporter is established) and does not violate the relevant provisions of that State;
(a) that the processing, including the transfer itself, of the personal data has been and will continue to be carried out in accordance with the relevant provisions of the applicable data protection law (and, where applicable, has been notified to the relevant authorities of the Member State where the data exporter is established) and does not violate the relevant provisions of that State;
(b) that it has instructed and throughout the duration of the personal data processing services will instruct the data importer to process the personal data transferred only on the data exporter's behalf and in accordance with the applicable data protection law and the Clauses;
(b) that it has instructed and throughout the duration of the personal data processing services will instruct the data importer to process the personal data transferred only on the data exporter's behalf and in accordance with the applicable data protection law and the Clauses;
(c) that the data importer will provide sufficient guarantees in respect of the technical and organisational security measures specified in Appendix 2 below;
(c) that the data importer will provide sufficient guarantees in respect of the technical and organisational security measures specified in Appendix 2 below;
(d) that after assessment of the requirements of the applicable data protection law, the security measures are appropriate to protect personal data against accidental or unlawful destruction or accidental loss, alteration, unauthorised disclosure or access, in particular where the processing involves the transmission of data over a network, and against all other unlawful forms of processing, and that these measures ensure a level of security appropriate to the risks presented by the processing and the nature of the data to be protected having regard to the state of the art and the cost of their implementation;
(d) that after assessment of the requirements of the applicable data protection law, the security measures are appropriate to protect personal data against accidental or unlawful destruction or accidental loss, alteration, unauthorised disclosure or access, in particular where the processing involves the transmission of data over a network, and against all other unlawful forms of processing, and that these measures ensure a level of security appropriate to the risks presented by the processing and the nature of the data to be protected having regard to the state of the art and the cost of their implementation;
(e) that it will ensure compliance with the security measures;
(e) that it will ensure compliance with the security measures;
(f) that, if the transfer involves special categories of data, the data subject has been informed or will be informed before, or as soon as possible after, the transfer that its data could be transmitted to a third country not providing adequate protection within the meaning of the General Data Protection Regulation (EU 2016/679);
(f) that, if the transfer involves special categories of data, the data subject has been informed or will be informed before, or as soon as possible after, the transfer that its data could be transmitted to a third country not providing adequate protection within the meaning of the General Data Protection Regulation (EU 2016/679);
(g) to forward any notification received from the data importer or any subprocessor pursuant to Clause 5(b) and Clause 8(3) to the data protection supervisory authority if the data exporter decides to continue the transfer or to lift the suspension;
(g) to forward any notification received from the data importer or any subprocessor pursuant to Clause 5(b) and Clause 8(3) to the data protection supervisory authority if the data exporter decides to continue the transfer or to lift the suspension;
(h) to make available to the data subjects upon request a copy of the Clauses, with the exception of Appendix 2, and a summary description of the security measures, as well as a copy of any contract for subprocessing services which has to be made in accordance with the Clauses, unless the Clauses or the contract contain commercial information, in which case it may remove such commercial information;
(h) to make available to the data subjects upon request a copy of the Clauses, with the exception of Appendix 2, and a summary description of the security measures, as well as a copy of any contract for subprocessing services which has to be made in accordance with the Clauses, unless the Clauses or the contract contain commercial information, in which case it may remove such commercial information;
(i) that, in the event of subprocessing, the processing activity is carried out in accordance with Clause 11 by a subprocessor providing at least the same level of protection for the personal data and the rights of data subject as the data importer under the Clauses; and
(i) that, in the event of subprocessing, the processing activity is carried out in accordance with Clause 11 by a subprocessor providing at least the same level of protection for the personal data and the rights of data subject as the data importer under the Clauses; and
(j) that it will ensure compliance with Clause 4(a) to (i).
#### Clause 5: Obligations of the data importer
The data importer agrees and warrants:
The data importer agrees and warrants:
(a) to process the personal data only on behalf of the data exporter and in compliance with its instructions and the Clauses; if it cannot provide such compliance for whatever reasons, it agrees to inform promptly the data exporter of its inability to comply, in which case the data exporter is entitled to suspend the transfer of data and/or terminate the contract;
(a) to process the personal data only on behalf of the data exporter and in compliance with its instructions and the Clauses; if it cannot provide such compliance for whatever reasons, it agrees to inform promptly the data exporter of its inability to comply, in which case the data exporter is entitled to suspend the transfer of data and/or terminate the contract;
(b) that it has no reason to believe that the legislation applicable to it prevents it from fulfilling the instructions received from the data exporter and its obligations under the contract and that in the event of a change in this legislation which is likely to have a substantial adverse effect on the warranties and obligations provided by the Clauses, it will promptly notify the change to the data exporter as soon as it is aware, in which case the data exporter is entitled to suspend the transfer of data and/or terminate the contract;
(b) that it has no reason to believe that the legislation applicable to it prevents it from fulfilling the instructions received from the data exporter and its obligations under the contract and that in the event of a change in this legislation which is likely to have a substantial adverse effect on the warranties and obligations provided by the Clauses, it will promptly notify the change to the data exporter as soon as it is aware, in which case the data exporter is entitled to suspend the transfer of data and/or terminate the contract;
(c) that it has implemented the technical and organisational security measures specified in Appendix 2 before processing the personal data transferred;
(c) that it has implemented the technical and organisational security measures specified in Appendix 2 before processing the personal data transferred;
(d) that it will promptly notify the data exporter about:
(d) that it will promptly notify the data exporter about:
(i) any legally binding request for disclosure of the personal data by a law enforcement authority unless otherwise prohibited, such as a prohibition under criminal law to preserve the confidentiality of a law enforcement investigation,
(i) any legally binding request for disclosure of the personal data by a law enforcement authority unless otherwise prohibited, such as a prohibition under criminal law to preserve the confidentiality of a law enforcement investigation,
(ii) any accidental or unauthorised access, and
(ii) any accidental or unauthorised access, and
(iii) any request received directly from the data subjects without responding to that request, unless it has been otherwise authorised to do so;
(iii) any request received directly from the data subjects without responding to that request, unless it has been otherwise authorised to do so;
(e) to deal promptly and properly with all inquiries from the data exporter relating to its processing of the personal data subject to the transfer and to abide by the advice of the supervisory authority with regard to the processing of the data transferred;
(e) to deal promptly and properly with all inquiries from the data exporter relating to its processing of the personal data subject to the transfer and to abide by the advice of the supervisory authority with regard to the processing of the data transferred;
(f) at the request of the data exporter to submit its data processing facilities for audit of the processing activities covered by the Clauses which shall be carried out by the data exporter or an inspection body composed of independent members and in possession of the required professional qualifications bound by a duty of confidentiality, selected by the data exporter, where applicable, in agreement with the supervisory authority;
(f) at the request of the data exporter to submit its data processing facilities for audit of the processing activities covered by the Clauses which shall be carried out by the data exporter or an inspection body composed of independent members and in possession of the required professional qualifications bound by a duty of confidentiality, selected by the data exporter, where applicable, in agreement with the supervisory authority;
(g) to make available to the data subject upon request a copy of the Clauses, or any existing contract for subprocessing, unless the Clauses or contract contain commercial information, in which case it may remove such commercial information, with the exception of Appendix 2 which shall be replaced by a summary description of the security measures in those cases where the data subject is unable to obtain a copy from the data exporter;
(g) to make available to the data subject upon request a copy of the Clauses, or any existing contract for subprocessing, unless the Clauses or contract contain commercial information, in which case it may remove such commercial information, with the exception of Appendix 2 which shall be replaced by a summary description of the security measures in those cases where the data subject is unable to obtain a copy from the data exporter;
(h) that, in the event of subprocessing, it has previously informed the data exporter and obtained its prior written consent;
(h) that, in the event of subprocessing, it has previously informed the data exporter and obtained its prior written consent;
(i) that the processing services by the subprocessor will be carried out in accordance with Clause 11; and
(j) to send promptly a copy of any subprocessor agreement it concludes under the Clauses to the data exporter.
#### Clause 6: Liability
1. The parties agree that any data subject who has suffered damage as a result of any breach of the obligations referred to in Clause 3 or in Clause 11 by any party or subprocessor is entitled to receive compensation from the data exporter for the damage suffered.
1. The parties agree that any data subject who has suffered damage as a result of any breach of the obligations referred to in Clause 3 or in Clause 11 by any party or subprocessor is entitled to receive compensation from the data exporter for the damage suffered.
2. If a data subject is not able to bring a claim for compensation in accordance with paragraph 1 against the data exporter, arising out of a breach by the data importer or his subprocessor of any of their obligations referred to in Clause 3 or in Clause 11, because the data exporter has factually disappeared or ceased to exist in law or has become insolvent, the data importer agrees that the data subject may issue a claim against the data importer as if it were the data exporter, unless any successor entity has assumed the entire legal obligations of the data exporter by contract of by operation of law, in which case the data subject can enforce its rights against such entity.
The data importer may not rely on a breach by a subprocessor of its obligations in order to avoid its own liabilities.
2. If a data subject is not able to bring a claim for compensation in accordance with paragraph 1 against the data exporter, arising out of a breach by the data importer or his subprocessor of any of their obligations referred to in Clause 3 or in Clause 11, because the data exporter has factually disappeared or ceased to exist in law or has become insolvent, the data importer agrees that the data subject may issue a claim against the data importer as if it were the data exporter, unless any successor entity has assumed the entire legal obligations of the data exporter by contract of by operation of law, in which case the data subject can enforce its rights against such entity.
The data importer may not rely on a breach by a subprocessor of its obligations in order to avoid its own liabilities.
3. If a data subject is not able to bring a claim against the data exporter or the data importer referred to in paragraphs 1 and 2, arising out of a breach by the subprocessor of any of their obligations referred to in Clause 3 or in Clause 11 because both the data exporter and the data importer have factually disappeared or ceased to exist in law or have become insolvent, the subprocessor agrees that the data subject may issue a claim against the data subprocessor with regard to its own processing operations under the Clauses as if it were the data exporter or the data importer, unless any successor entity has assumed the entire legal obligations of the data exporter or data importer by contract or by operation of law, in which case the data subject can enforce its rights against such entity. The liability of the subprocessor shall be limited to its own processing operations under the Clauses.
3. If a data subject is not able to bring a claim against the data exporter or the data importer referred to in paragraphs 1 and 2, arising out of a breach by the subprocessor of any of their obligations referred to in Clause 3 or in Clause 11 because both the data exporter and the data importer have factually disappeared or ceased to exist in law or have become insolvent, the subprocessor agrees that the data subject may issue a claim against the data subprocessor with regard to its own processing operations under the Clauses as if it were the data exporter or the data importer, unless any successor entity has assumed the entire legal obligations of the data exporter or data importer by contract or by operation of law, in which case the data subject can enforce its rights against such entity. The liability of the subprocessor shall be limited to its own processing operations under the Clauses.
#### Clause 7: Mediation and jurisdiction
1. The data importer agrees that if the data subject invokes against it third-party beneficiary rights and/or claims compensation for damages under the Clauses, the data importer will accept the decision of the data subject:
1. The data importer agrees that if the data subject invokes against it third-party beneficiary rights and/or claims compensation for damages under the Clauses, the data importer will accept the decision of the data subject:
(a) to refer the dispute to mediation, by an independent person or, where applicable, by the supervisory authority;
(a) to refer the dispute to mediation, by an independent person or, where applicable, by the supervisory authority;
(b) to refer the dispute to the courts in the Member State in which the data exporter is established.
(b) to refer the dispute to the courts in the Member State in which the data exporter is established.
2. The parties agree that the choice made by the data subject will not prejudice its substantive or procedural rights to seek remedies in accordance with other provisions of national or international law.
2. The parties agree that the choice made by the data subject will not prejudice its substantive or procedural rights to seek remedies in accordance with other provisions of national or international law.
#### Clause 8: Cooperation with supervisory authorities
1. The data exporter agrees to deposit a copy of this contract with the supervisory authority if it so requests or if such deposit is required under the applicable data protection law.
1. The data exporter agrees to deposit a copy of this contract with the supervisory authority if it so requests or if such deposit is required under the applicable data protection law.
2. The parties agree that the supervisory authority has the right to conduct an audit of the data importer, and of any subprocessor, which has the same scope and is subject to the same conditions as would apply to an audit of the data exporter under the applicable data protection law.
2. The parties agree that the supervisory authority has the right to conduct an audit of the data importer, and of any subprocessor, which has the same scope and is subject to the same conditions as would apply to an audit of the data exporter under the applicable data protection law.
3. The data importer shall promptly inform the data exporter about the existence of legislation applicable to it or any subprocessor preventing the conduct of an audit of the data importer, or any subprocessor, pursuant to paragraph 2. In such a case the data exporter shall be entitled to take the measures foreseen in Clause 5(b).
3. The data importer shall promptly inform the data exporter about the existence of legislation applicable to it or any subprocessor preventing the conduct of an audit of the data importer, or any subprocessor, pursuant to paragraph 2. In such a case the data exporter shall be entitled to take the measures foreseen in Clause 5(b).
#### Clause 9: Governing Law.
The Clauses shall be governed by the law of the Member State in which the data exporter is established.
The Clauses shall be governed by the law of the Member State in which the data exporter is established.
#### Clause 10: Variation of the contract
The parties undertake not to vary or modify the Clauses. This does not preclude the parties from adding clauses on business related issues where required as long as they do not contradict the Clause.
The parties undertake not to vary or modify the Clauses. This does not preclude the parties from adding clauses on business related issues where required as long as they do not contradict the Clause.
#### Clause 11: Subprocessing
1. The data importer shall not subcontract any of its processing operations performed on behalf of the data exporter under the Clauses without the prior written consent of the data exporter. Where the data importer subcontracts its obligations under the Clauses, with the consent of the data exporter, it shall do so only by way of a written agreement with the subprocessor which imposes the same obligations on the subprocessor as are imposed on the data importer under the Clauses. Where the subprocessor fails to fulfil its data protection obligations under such written agreement the data importer shall remain fully liable to the data exporter for the performance of the subprocessor's obligations under such agreement.
1. The data importer shall not subcontract any of its processing operations performed on behalf of the data exporter under the Clauses without the prior written consent of the data exporter. Where the data importer subcontracts its obligations under the Clauses, with the consent of the data exporter, it shall do so only by way of a written agreement with the subprocessor which imposes the same obligations on the subprocessor as are imposed on the data importer under the Clauses. Where the subprocessor fails to fulfil its data protection obligations under such written agreement the data importer shall remain fully liable to the data exporter for the performance of the subprocessor's obligations under such agreement.
2. The prior written contract between the data importer and the subprocessor shall also provide for a third-party beneficiary clause as laid down in Clause 3 for cases where the data subject is not able to bring the claim for compensation referred to in paragraph 1 of Clause 6 against the data exporter or the data importer because they have factually disappeared or have ceased to exist in law or have become insolvent and no successor entity has assumed the entire legal obligations of the data exporter or data importer by contract or by operation of law. Such third-party liability of the subprocessor shall be limited to its own processing operations under the Clauses.
2. The prior written contract between the data importer and the subprocessor shall also provide for a third-party beneficiary clause as laid down in Clause 3 for cases where the data subject is not able to bring the claim for compensation referred to in paragraph 1 of Clause 6 against the data exporter or the data importer because they have factually disappeared or have ceased to exist in law or have become insolvent and no successor entity has assumed the entire legal obligations of the data exporter or data importer by contract or by operation of law. Such third-party liability of the subprocessor shall be limited to its own processing operations under the Clauses.
3. The provisions relating to data protection aspects for subprocessing of the contract referred to in paragraph 1 shall be governed by the law of the Member State in which the data exporter is established.
3. The provisions relating to data protection aspects for subprocessing of the contract referred to in paragraph 1 shall be governed by the law of the Member State in which the data exporter is established.
4. The data exporter shall keep a list of subprocessing agreements concluded under the Clauses and notified by the data importer pursuant to Clause 5 (j), which shall be updated at least once a year. The list shall be available to the data exporter's data protection supervisory authority.
4. The data exporter shall keep a list of subprocessing agreements concluded under the Clauses and notified by the data importer pursuant to Clause 5 (j), which shall be updated at least once a year. The list shall be available to the data exporter's data protection supervisory authority.
#### Clause 12: Obligation after the termination of personal data processing services
1. The parties agree that on the termination of the provision of data processing services, the data importer and the subprocessor shall, at the choice of the data exporter, return all the personal data transferred and the copies thereof to the data exporter or shall destroy all the personal data and certify to the data exporter that it has done so, unless legislation imposed upon the data importer prevents it from returning or destroying all or part of the personal data transferred. In that case, the data importer warrants that it will guarantee the confidentiality of the personal data transferred and will not actively process the personal data transferred anymore.
1. The parties agree that on the termination of the provision of data processing services, the data importer and the subprocessor shall, at the choice of the data exporter, return all the personal data transferred and the copies thereof to the data exporter or shall destroy all the personal data and certify to the data exporter that it has done so, unless legislation imposed upon the data importer prevents it from returning or destroying all or part of the personal data transferred. In that case, the data importer warrants that it will guarantee the confidentiality of the personal data transferred and will not actively process the personal data transferred anymore.
2. The data importer and the subprocessor warrant that upon request of the data exporter and/or of the supervisory authority, it will submit its data processing facilities for an audit of the measures referred to in paragraph 1.
### Appendix 1 to the Standard Contractual Clauses
Data exporter: Customer is the data exporter.
Data exporter: Customer is the data exporter.
**Data importer:** The data importer is GitHub, Inc., a global producer of software and services.
**Data importer:** The data importer is GitHub, Inc., a global producer of software and services.
**Data subjects:** Data subjects include the data exporters representatives and end-users including employees, contractors, collaborators, and customers of the data exporter. Data subjects may also include individuals attempting to communicate or transfer personal information to users of the services provided by data importer. GitHub acknowledges that, depending on Customers use of the Service, Customer may elect to include personal data from any of the following types of data subjects in the Customer Personal Data:
@ -324,27 +324,27 @@ Data exporter: Customer is the data exporter.
**Categories of data:** The personal data transferred that is included in e-mail, documents and other data in an electronic form in the context of the Service. GitHub acknowledges that, depending on Customers use of the Service, Customer may elect to include personal data from any of the following categories in the Customer Personal Data:
- Authentication data (for example, username, email, password);
- Contact information (for example, email);
- Unique identification numbers and signatures (IP addresses, unique identifier in tracking cookies or similar technology).
- Unique identification numbers and signatures (IP addresses, unique identifier in tracking cookies or similar technology).
- Other unique identifying information. Data subjects may include more data such as real names, avatar images, and other personal information;
**Special categories of data (if appropriate):** The data importer does not intentionally collect or process any special categories of data in carrying out its services to the data exporter.
**Special categories of data (if appropriate):** The data importer does not intentionally collect or process any special categories of data in carrying out its services to the data exporter.
However, because the data importer provides storage services and does not control the categories of data it stores, the data exporter may choose to transfer special categories of data. Consequently, the data exporter is solely responsible for ensuring that it complies with all obligations imposed by applicable laws and regulations relating to the collection and processing of any special categories of data, including obtaining the explicit consent of the data subject prior to processing sensitive personal data.
**Processing operations:** The personal data transferred will be subject to the following basic processing activities:
**Processing operations:** The personal data transferred will be subject to the following basic processing activities:
GitHub uses personal data for the limited purposes set forth in the GitHub Privacy Statement, available at [https://docs.github.com/articles/github-privacy-statement](/articles/github-privacy-statement), and the “Data Processing” section of the DPA.
Subcontractors: In accordance with the DPA, the data importer may hire other companies to provide limited services on data importers behalf, such as providing customer support. Any such subcontractors will be permitted to obtain Customer Personal Data only to deliver the services the data importer has retained them to provide, and they are prohibited from using Customer Personal Data for any other purpose.
### Appendix 2 to the Standard Contractual Clauses
Description of the technical and organizational security measures implemented by the data importer in accordance with Clauses 4(d) and 5(c):
**1. Personnel.** Data importers personnel will not process Customer Personal Data without authorization. Personnel are obligated to maintain the confidentiality of any Customer Personal Data and this obligation continues even after their engagement ends.
**1. Personnel.** Data importers personnel will not process Customer Personal Data without authorization. Personnel are obligated to maintain the confidentiality of any Customer Personal Data and this obligation continues even after their engagement ends.
**2. Data Privacy Contact.** The data privacy officer of the data importer can be reached at the following address:
**2. Data Privacy Contact.** The data privacy officer of the data importer can be reached at the following address:
GitHub, Inc.
Attn: Privacy
88 Colin P. Kelly Jr. Street
San Francisco, CA 94107 USA
Attn: Privacy
88 Colin P. Kelly Jr. Street
San Francisco, CA 94107 USA
**3. Technical and Organization Measures.** The data importer has implemented and will maintain appropriate technical and organizational measures, internal controls, and information security routines intended to protect Customer Personal Data, as defined in the GitHub Security Exhibit, against accidental loss, destruction, or alteration; unauthorized disclosure or access; or unlawful destruction as follows: The technical and organizational measures, internal controls, and information security routines set forth in the GitHub Security Exhibit are hereby incorporated into this Appendix 2 by this reference and are binding on the data importer as if they were set forth in this Appendix 2 in their entirety.
Signature of GitHub, Inc. appears below.
@ -380,7 +380,7 @@ c. GitHub will maintain standard security industry practices to include, but are
- Security and Privacy Awareness Training
#### 1.2 Security Incident Management.
#### 1.2 Security Incident Management.
Throughout the duration of the Agreement, and where applicable, GitHub will provide a Security incident management program as follows:
a. Security Availability and Escalation. GitHub will maintain appropriate security contact and escalation processes on a 24-hours-per-day, 7-days-per-week basis to ensure customers and employees can submit issues to the GitHub Security team.
@ -412,7 +412,7 @@ e. all card access and video systems will be tied in to generator or UPS backup
#### 2.1 Requests for Information.
Upon Customer's written request and no more than once annually, GitHub will respond to one request for information to assess security and compliance risk-related information. The response will be provided in writing within thirty days of receipt of the request, pending needed clarifications of any request.
#### 2.2 Response Contents.
#### 2.2 Response Contents.
GitHub will include in its annual response relevant audit reports for production datacenter, IaaS, PaaS or private hosting providers, as deemed relevant by GitHub, in its sole discretion and based on data and services rendered.
#### 2.3 GitHub Security Audit Report.

Просмотреть файл

@ -8,7 +8,7 @@ versions:
free-pro-team: '*'
---
_These terms apply to Customers who licensed the Products prior to January 4, 2021._
_These terms apply to Customers who licensed the Products prior to January 4, 2021. Customers who purchase GitHub Products after that date are directed to https://www.github.com/enterprise-legal for current terms._
THANK YOU FOR CHOOSING GITHUB FOR YOUR COMPANY'S BUSINESS NEEDS. PLEASE READ THESE TERMS CAREFULLY AS IT GOVERNS YOUR EVALUATION USE OF THE SERVICE, UNLESS GITHUB HAS EXECUTED A SEPARATE WRITTEN AGREEMENT WITH YOU FOR THAT PURPOSE. BY CLICKING ON THE "I AGREE" OR SIMILAR BUTTON OR BY ACCESSING THE SERVICE ON A TRIAL BASIS, YOU ACCEPT ALL THE TERMS AND CONDITIONS OF THIS EVALUATION AGREEMENT. IF YOU ARE ENTERING INTO THIS EVALUATION AGREEMENT ON BEHALF OF A COMPANY OR OTHER LEGAL ENTITY (HEREINAFTER REFERRED TO AS "**CUSTOMER**"), CUSTOMER REPRESENTS THAT IT HAS THE LEGAL AUTHORITY TO BIND THE COMPANY OR OTHER LEGAL ENTITY TO THIS EVALUATION AGREEMENT. ALSO, BY ACCEPTING THESE TERMS, CUSTOMER AGREES THAT IT HAS READ GITHUB'S PRIVACY STATEMENT.

Просмотреть файл

@ -8,7 +8,7 @@ versions:
free-pro-team: '*'
---
_These terms apply to Customers who licensed the Products prior to January 4, 2021._
_These terms apply to Customers who licensed the Products prior to January 4, 2021. Customers who purchase GitHub Products after that date are directed to https://www.github.com/enterprise-legal for current terms._
PLEASE READ THIS AGREEMENT CAREFULLY AS IT GOVERNS YOUR USE OF THE PRODUCTS (AS DEFINED BELOW), UNLESS WE HAVE EXECUTED A SEPARATE WRITTEN AGREEMENT WITH YOU FOR THAT PURPOSE.
@ -45,11 +45,11 @@ If Customer has purchased the Products from a GitHub Partner, the following prov
"**Fees**" means the fees Customer is required to pay GitHub to use the Products during the applicable Subscription Term or Professional Services, as such fees are reflected on an Order Form or SOW.
"**GitHub Insights**" or “**Insights**” means the Software feature which provides Customer with metrics, analytics, and recommendations relating to their use of the Software. GitHub Insights does not include legacy features of GitHub including Organization insights and repository insights.
"**GitHub Insights**" or “**Insights**” means the Software feature which provides Customer with metrics, analytics, and recommendations relating to their use of the Software. GitHub Insights does not include legacy features of GitHub including Organization insights and repository insights.
"**GitHub Partner**" means a company authorized to resell GitHub Products under the terms and conditions of GitHub's Channel Partner Agreement.
"**Learning Lab for Enterprise Server**" means the Software feature that enables Users to learn about GitHub functionality, including associated Documentation.
"**Learning Lab for Enterprise Server**" means the Software feature that enables Users to learn about GitHub functionality, including associated Documentation.
"**License Effective Date**" means the effective date of each Order Form as stated therein.

Просмотреть файл

@ -10,7 +10,7 @@ versions:
free-pro-team: '*'
---
_These terms apply to Customers who licensed the Products prior to January 4, 2021._
_These terms apply to Customers who licensed the Products prior to January 4, 2021. Customers who purchase GitHub Products after that date are directed to https://www.github.com/enterprise-legal for current terms._
**Short version:** GitHub guarantees a 99.9% quarterly uptime commitment for the applicable GitHub service (the “**Service Level**” or “**SLA**”). If GitHub does not meet the SLA, then Customer will be entitled to a service credit to Customers account (“**Service Credits**”).
@ -18,12 +18,12 @@ For definitions of each Service feature (“**Service Feature**”) and to revie
## Uptime Guarantee
“**Uptime**” is the percentage of total possible minutes the applicable GitHub service was available in a given calendar quarter. GitHub commits to maintain at least 99.9% Uptime for the applicable GitHub service. The Uptime calculation for each Service Feature that may be included with the applicable GitHub service is described below (“**Uptime Calculation**”). If GitHub does not meet the SLA, Customer will be entitled to Service Credits based on the calculation below (“**Service Credits Calculation**”). Note, Downtime does not affect every customer at the same time or in the same way.
“**Uptime**” is the percentage of total possible minutes the applicable GitHub service was available in a given calendar quarter. GitHub commits to maintain at least 99.9% Uptime for the applicable GitHub service. The Uptime calculation for each Service Feature that may be included with the applicable GitHub service is described below (“**Uptime Calculation**”). If GitHub does not meet the SLA, Customer will be entitled to Service Credits based on the calculation below (“**Service Credits Calculation**”). Note, Downtime does not affect every customer at the same time or in the same way.
| **Service Feature** | **Uptime Calculation** | **Definitions** | **Service Credits Calculation** |
|---|---|---|---|
| **Issues**,<br>**Pull&nbsp;Requests**,<br>**Git&nbsp;Operations**,<br>**API&nbsp;Requests (for Service Features only)**,<br>**Webhooks**,<br>**Pages** | (total minutes in a calendar quarter - Downtime) / total minutes in a calendar quarter | “**Downtime**” is a period of time where either (a) the error rate exceeds five percent (5%) in a given minute for any Service Feature or (b) the Service was unavailable as determined by a combination of GitHub's internal and external monitoring systems. | A Service Credits claim may be based on either (not both) of the following calculations: <ul><li>10% of the amount Customer paid for a Service Feature in a calendar quarter where the Uptime for that Service Feature was less than or equal to 99.9%, but greater than 99.0%. <BR><BR>OR <BR><BR></li><li>25% of the amount Customer paid for a Service Feature in a calendar quarter where the Uptime of that Service Feature was less than 99.0%.</li></ul> | |
| **Actions** | (Total Triggered Executions – Unavailable Executions) / (Total Triggered Executions) x 100 | “**Total Triggered Executions**” is the total number of all Actions executions triggered by Customer in a calendar quarter. <br><br> “**Unavailable Executions**” is the total number of executions within Total Triggered Executions which failed to run in a calendar quarter. An execution failed to run when the Actions history log did not capture any output five (5) minutes after the trigger was successfully fired. | Same as above |
| **Actions** | (Total Triggered Executions – Unavailable Executions) / (Total Triggered Executions) x 100 | “**Total Triggered Executions**” is the total number of all Actions executions triggered by Customer in a calendar quarter. <br><br> “**Unavailable Executions**” is the total number of executions within Total Triggered Executions which failed to run in a calendar quarter. An execution failed to run when the Actions history log did not capture any output five (5) minutes after the trigger was successfully fired. | Same as above |
| **Packages** | Transfers Uptime = same as Actions <br> <br> Storage Uptime = 100% - Average Error Rate* <br> <br> *The Uptime Calculation excludes public usage and storage transactions that do not count toward either Total Storage Transactions or Failed Storage Transactions (including pre-authentication failures; authentication failures; attempted transactions for storage accounts over their prescribed quotas). | “**Error Rate**” is the total number of Failed Storage Transactions divided by the Total Storage Transactions during a set time interval (currently set at one hour). If the Total Storage Transactions in a given one-hour interval is zero, the error rate for that interval is 0%. <br><br> “**Average Error Rate**” is the sum of Error Rates for each hour in a calendar quarter divided by the total number of hours in a calendar quarter. | Same as above |
## Exclusions
@ -32,4 +32,4 @@ Excluded from the Uptime Calculation are Service Feature failures resulting from
## Service Credits Redemption
If GitHub does not meet this SLA, Customer may redeem Service Credits only upon written request to GitHub within thirty (30) days of the end of the calendar quarter. Written requests for Service Credits redemption and GitHub Enterprise Cloud custom monthly or quarterly reports should be sent to [GitHub Support](https://support.github.com/contact).
Service Credits may take the form of a refund or credit to Customers account, cannot be exchanged into a cash amount, are limited to a maximum of ninety (90) days of paid service per calendar quarter, require Customer to have paid any outstanding invoices, and expire upon termination of Customers agreement with GitHub. Service Credits are the sole and exclusive remedy for any failure by GitHub to meet any obligations in this SLA.
Service Credits may take the form of a refund or credit to Customers account, cannot be exchanged into a cash amount, are limited to a maximum of ninety (90) days of paid service per calendar quarter, require Customer to have paid any outstanding invoices, and expire upon termination of Customers agreement with GitHub. Service Credits are the sole and exclusive remedy for any failure by GitHub to meet any obligations in this SLA.

Просмотреть файл

@ -9,7 +9,7 @@ versions:
free-pro-team: '*'
---
_These terms apply to Customers who licensed the Products prior to January 4, 2021._
_These terms apply to Customers who licensed the Products prior to January 4, 2021. Customers who purchase GitHub Products after that date are directed to https://www.github.com/enterprise-legal for current terms._
BY CLICKING THE "I AGREE" OR SIMILAR BUTTON OR BY USING ANY OF THE PRODUCTS (DEFINED BELOW), CUSTOMER ACCEPTS THE TERMS AND CONDITIONS OF THIS AGREEMENT. IF CUSTOMER IS ENTERING INTO THIS AGREEMENT ON BEHALF OF A LEGAL ENTITY, CUSTOMER REPRESENTS THAT IT HAS THE LEGAL AUTHORITY TO BIND THE LEGAL ENTITY TO THIS AGREEMENT.
@ -212,7 +212,7 @@ This Section 2 details terms applicable to Customers use of the Software.
### 2.1 License Grant.
GitHub grants to Customer a non-exclusive, non-transferable, worldwide, royalty-free, limited-term license to install and use the Software for Customers internal business purposes during the applicable Subscription Term, in accordance with the Documentation, and only for the number of Subscription Licenses stated in Customers Order Form. The Software includes components licensed to GitHub by third parties, including software whose licenses require GitHub to make the source code for those components available. The source code for such components will be provided upon request. Without limiting the foregoing, this license permits Customer to download and run Microsoft SQL Server Standard Edition container image for Linux files (“**SQL Server Images**”), which may be used only with the Software as documented. Customers right to use the SQL Server Images ends when Customer no longer has rights to use the Software, and Customer must uninstall the SQL Server Images when its right to use them ends. Microsoft Corporation may disable SQL Server Images at any time.
GitHub grants to Customer a non-exclusive, non-transferable, worldwide, royalty-free, limited-term license to install and use the Software for Customers internal business purposes during the applicable Subscription Term, in accordance with the Documentation, and only for the number of Subscription Licenses stated in Customers Order Form. The Software includes components licensed to GitHub by third parties, including software whose licenses require GitHub to make the source code for those components available. The source code for such components will be provided upon request. Without limiting the foregoing, this license permits Customer to download and run Microsoft SQL Server Standard Edition container image for Linux files (“**SQL Server Images**”), which may be used only with the Software as documented. Customers right to use the SQL Server Images ends when Customer no longer has rights to use the Software, and Customer must uninstall the SQL Server Images when its right to use them ends. Microsoft Corporation may disable SQL Server Images at any time.
### 2.2 License Restrictions.
@ -320,11 +320,11 @@ Customer is responsible for managing access to its Private Repositories, includi
GitHub considers Customer Content in Customers Private Repositories to be Customers Confidential Information. GitHub will protect and keep strictly confidential the Customer Content of Private Repositories in accordance with Section 1.4.
#### 3.4.3 Access.
GitHub personnel may only access Customers Private Repositories in the situations described in our [Privacy Statement](/github/site-policy/github-privacy-statement#repository-contents).
GitHub personnel may only access Customers Private Repositories in the situations described in our [Privacy Statement](/github/site-policy/github-privacy-statement#repository-contents).
Customer may choose to enable additional access to its Private Repositories. For example, Customer may enable various GitHub services or features that require additional rights to Customer Content in Private Repositories. These rights may vary depending on the service or feature, but GitHub will continue to treat Customer Content in Customers Private Repositories as Customers Confidential Information. If those services or features require rights in addition to those it needs to provide the Service, GitHub will provide an explanation of those rights.
Additionally, we may be [compelled by law](/github/site-policy/github-privacy-statement#for-legal-disclosure) to disclose the contents of your private repositories.
Additionally, we may be [compelled by law](/github/site-policy/github-privacy-statement#for-legal-disclosure) to disclose the contents of your private repositories.
GitHub will provide notice regarding our access to private repository content, unless [for legal disclosure](/github/site-policy/github-privacy-statement#for-legal-disclosure), to comply with our legal obligations, or where otherwise bound by requirements under law, for automated scanning, or if in response to a security threat or other risk to security.

Просмотреть файл

@ -9,7 +9,7 @@ versions:
free-pro-team: '*'
---
_These terms apply to Customers who licensed the Products prior to January 4, 2021._
_These terms apply to Customers who licensed the Products prior to January 4, 2021. Customers who purchase GitHub Products after that date are directed to https://www.github.com/enterprise-legal for current terms._
The following GitHub Supplemental Terms (including any applicable Order Forms) supplement Customer's Microsoft volume licensing agreement ("**Microsoft Customer Agreement**") and, together with the Microsoft Customer Agreement, govern Customer's use of the Products (as defined below). The Microsoft Customer Agreement is incorporated herein by this reference. Capitalized terms used but not defined in these supplemental terms have the meanings assigned to them in the Microsoft Customer Agreement.
@ -39,7 +39,7 @@ This Section 1 details terms applicable to Customers use of the Software.
### 1.1 License Grant.
GitHub grants to Customer a non-exclusive, non-transferable, worldwide, royalty-free, limited-term license to install and use the Software for Customers internal business purposes during the applicable Subscription Term, in accordance with the Documentation, and only for the number of Subscription Licenses stated in Customers Order Form. The Software includes components licensed to GitHub by third parties, including software whose licenses require GitHub to make the source code for those components available. The source code for such components will be provided upon request. Without limiting the foregoing, this license permits Customer to download and run Microsoft SQL Server Standard Edition container image for Linux files (“**SQL Server Images**”), which may be used only with the Software as documented. Customers right to use the SQL Server Images ends when Customer no longer has rights to use the Software, and Customer must uninstall the SQL Server Images when its right to use them ends. Microsoft Corporation may disable SQL Server Images at any time.
GitHub grants to Customer a non-exclusive, non-transferable, worldwide, royalty-free, limited-term license to install and use the Software for Customers internal business purposes during the applicable Subscription Term, in accordance with the Documentation, and only for the number of Subscription Licenses stated in Customers Order Form. The Software includes components licensed to GitHub by third parties, including software whose licenses require GitHub to make the source code for those components available. The source code for such components will be provided upon request. Without limiting the foregoing, this license permits Customer to download and run Microsoft SQL Server Standard Edition container image for Linux files (“**SQL Server Images**”), which may be used only with the Software as documented. Customers right to use the SQL Server Images ends when Customer no longer has rights to use the Software, and Customer must uninstall the SQL Server Images when its right to use them ends. Microsoft Corporation may disable SQL Server Images at any time.
### 1.2 Restrictions.

Просмотреть файл

@ -10,7 +10,7 @@ versions:
free-pro-team: '*'
---
Effective date: **January 21, 2021**
Effective date: **January 29, 2021**
GitHub provides a great deal of transparency regarding how we use your data, how we collect your data, and with whom we share your data. To that end, we provide this page, which details [our subprocessors](#github-subprocessors), and how we use [cookies](#cookies-on-github).
@ -27,7 +27,6 @@ When we share your information with third party subprocessors, such as our vendo
| Discourse | Community forum software provider | United States | United States |
| Eloqua | Marketing campaign automation | United States | United States |
| Google Apps | Internal company infrastructure | United States | United States |
| Google Analytics | Analytics and performance | United States | United States |
| MailChimp | Customer ticketing mail services provider | United States | United States |
| Mailgun | Transactional mail services provider | United States | United States |
| Microsoft | Microsoft Services | United States | United States |

Просмотреть файл

@ -199,7 +199,14 @@ If we receive a request for information under certain exigent circumstances (whe
### Cost reimbursement
We reserve the right to seek reimbursement for administrative costs associated with responding to requests for information, as allowed by law.
Under state and federal law, GitHub can seek reimbursement for costs associated with compliance with a valid legal demand, such as a subpoena, court order or search warrant. We only charge to recover some costs, and these reimbursements cover only a portion of the costs we actually incur to comply with legal orders.
While we do not charge in emergency situations or in other exigent circumstances, we seek reimbursement for all other legal requests in accordance with the following schedule, unless otherwise required by law:
- Initial search of up to 25 identifiers: Free
- Production of subscriber information/data for up to 5 accounts: Free
- Production of subscriber information/data for more than 5 accounts: $20 per account
- Secondary searches: $10 per search
### Data preservation
@ -215,14 +222,6 @@ c/o Corporation Service Company
2710 Gateway Oaks Drive, Suite 150N
Sacramento, CA 95833-3505
```
Under state and federal law, GitHub can seek reimbursement for costs associated with compliance with a valid legal demand, such as a subpoena, court order or search warrant. We only charge to recover some costs, and these reimbursements cover only a portion of the costs we actually incur to comply with legal orders.
While we do not charge in emergency situations or in other exigent circumstances, we seek reimbursement for all other legal requests in accordance with the following schedule, unless otherwise required by law:
- Initial search of up to 25 identifiers: Free
- Production of subscriber information/data for up to 5 accounts: Free
- Production of subscriber information/data for more than 5 accounts: $20 per account
- Secondary searches: $10 per search
Please make your requests as specific and narrow as possible, including the following information:

Просмотреть файл

@ -16,7 +16,7 @@
[Markdown](http://daringfireball.net/projects/markdown/) is a human-friendly syntax for formatting plain text. Our documentation is written with [GitHub Flavored Markdown](https://docs.github.com/en/github/writing-on-github/about-writing-and-formatting-on-github), a custom version of Markdown used across GitHub.
This site's Markdown rendering is powered by the [`/lib/render-content`](/lib/render-content) and [`hubdown`](https://github.com/electron/hubdown) npm packages, which are in turn built on the [`remark`](https://remark.js.org/) Markdown processor.
This site's Markdown rendering is powered by [`/lib/render-content`](/lib/render-content), which is in turn built on the [`remark`](https://remark.js.org/) Markdown processor.
## Callout tags

Просмотреть файл

@ -1,6 +1,6 @@
date: '2021-01-12'
release_candidate: true
intro: Release notes are now published on the documentation site. The new location makes it easier to learn about new releases and features at the same time. Historical release notes are available on [GitHub Enterprise Releases](https://enterprise.github.com/releases).
intro: Release candidate versions should be tested on non-production environments. For more information about the Release Candidate Program, see the [GitHub Blog](https://github.blog/2020-12-03-improving-the-ghes-release-process-release-candidates/) or "[About upgrades to new releases](/admin/overview/about-upgrades-to-new-releases)".
sections:
features:
- heading: GitHub Actions

Просмотреть файл

@ -0,0 +1,23 @@
date: '2021-01-29'
release_candidate: true
intro: Release candidate versions should be tested on non-production environments. For more information about the Release Candidate Program, see the [GitHub Blog](https://github.blog/2020-12-03-improving-the-ghes-release-process-release-candidates/) or "[About upgrades to new releases](/admin/overview/about-upgrades-to-new-releases)."
sections:
bugs:
- heading: Fixes for known issues from Release Candidate 1
notes:
- If you disabled GitHub Actions following an unsuccessful attempt to set up GitHub Actions, then you will not be able to create the first user and use the appliance.
- The "Mandatory message viewed" audit log event was not being saved.
- '`ghe-config-apply` needed to run on a replica during an initial setup before `ghe-repl-setup` could run to start replication.'
- Removing yourself as an enterprise owner returned a 404.
- heading: Fixes for other issues
notes:
- Issues with migrations and upgrades to 3.0.0 have been fixed.
- Backup Utilities versioning now works for release candidate versions.
- Generating a support bundle resulted in an error in the orchestrator logs.
- A large restore could result in Redis running out of memory.
- The checkbox to enable GitHub Actions in the Management Console is now visible with any authentication method.
- GitHub Actions can only be enabled if the required storage is also configured.
- '`ghe-repl-status` could silently fail if MSSQL replication is not configured.'
known_issues:
- The known issues for Release Candidate 1 still apply, excluding the bug fixes listed.

Просмотреть файл

@ -1,34 +1,35 @@
The following table shows, for each package manager, whether {% data variables.product.prodname_dependabot %} supports: dependencies in private {% data variables.product.prodname_dotcom %} repositories, and vendored dependencies.
The following table shows, for each package manager:
- The YAML value to use in the *dependabot.yml* file
- The supported versions of the package manager
- Whether dependencies in private {% data variables.product.prodname_dotcom %} repositories are supported
- Whether vendored dependencies are supported
Package manager | Private {% data variables.product.prodname_dotcom %} repositories | Vendoring
--- | :---:| :---:
Bundler: `bundler` | | **✓** |
Cargo: `cargo` | **✓** | |
Composer: `composer` | **✓** | |
Docker: `docker` | **✓** | |
Elixir: `hex` | | |
Elm: `elm` | **✓** | |
git submodule: `gitsubmodule` | **✓** | |
GitHub Actions: `github-actions` | **✓** | |
Go modules: `gomod` | **✓** | **✓** |
Gradle: `gradle` | **✓** | |
Maven: `maven` | **✓** | |
Mix: `mix` | **✓** | |
npm: `npm` | **✓** | |
NuGet: `nuget` | **✓** | |
pip: `pip` | | |
Terraform: `terraform` | **✓** | |
Package manager | YAML value | Supported versions | Private repositories | Vendoring
--- | --- | --- |:---:|:---:
Bundler | `bundler` | v1 | | **✓** |
Cargo | `cargo` | v1 | **✓** | |
Composer | `composer` | v1,v2 | **✓** | |
Docker | `docker` | v1 | **✓** | |
Elixir | `mix` | v1 | **✓** | |
Elm | `elm` | v0.18, v0.19 | **✓** | |
git submodule | `gitsubmodule` | N/A (no version) | **✓** | |
GitHub Actions | `github-actions` | N/A (no version) | **✓** | |
Go modules | `gomod` | v1 | **✓** | **✓** |
Gradle | `gradle` | see (A) below | **✓** | |
Maven | `maven` | see (B) below | **✓** | |
npm | `npm` | v6 | **✓** | |
NuGet | `nuget` | <= 4.8.</br>see (C) below | **✓** | |
pip | `pip` | v20 | | |
pipenv | `pip` | <= 2018.11.26 | | |
pip-compile | `pip` | 5.5.0 | | |
poetry | `pip` | v1 | | |
Terraform | `terraform` | <= 0.11 | **✓** | |
yarn | `npm` | v1 | | |
{% note %}
(A) {% data variables.product.prodname_dependabot %} doesn't run Gradle but supports updates to the following files: `build.gradle` and `build.gradle.kts` (for Kotlin projects).
**Note**: {% data variables.product.prodname_dependabot %} also supports the following package managers:
(B) {% data variables.product.prodname_dependabot %} doesn't run Maven but supports updates to `pom.xml` files.
-`yarn` (v1 only) (specify `npm`)
(C) {% data variables.product.prodname_dependabot %} doesn't run the NuGet CLI but does support most features up until version 4.8.
-`.gradle.kts` files, for Kotlin projects (specify `gradle`)
-`pipenv`, `pip-compile`, and `poetry` (specify `pip`)
For example, if you use `poetry` to manage your Python dependencies and want {% data variables.product.prodname_dependabot %} to monitor your dependency manifest file for new versions, use `package-ecosystem: "pip"` in your *dependabot.yml* file.
{% endnote %}
For package managers such as `pipenv` and `poetry`, you need to use the `pip` YAML value. For example, if you use `poetry` to manage your Python dependencies and want {% data variables.product.prodname_dependabot %} to monitor your dependency manifest file for new versions, use `package-ecosystem: "pip"` in your *dependabot.yml* file.

Просмотреть файл

@ -2,6 +2,6 @@ When you select the **Rebase and merge** option on a pull request on {% data var
To rebase and merge pull requests, you must have [write permissions](/articles/repository-permission-levels-for-an-organization/) in the repository, and the repository must [allow rebase merging](/articles/configuring-commit-rebasing-for-pull-requests/).
The rebase and merge behavior on {% data variables.product.product_name %} deviates slightly from `git rebase`. Rebase and merge on {% data variables.product.prodname_dotcom %} will always update the committer information and create new commit SHAs, whereas `git rebase` outside of {% data variables.product.prodname_dotcom %} does not change the committer information when the rebase happens on top of an ancestor commit. For more information about `git rebase`, see [the "Git rebase" chapter from the _Pro Git_ book](https://git-scm.com/docs/git-rebase).
The rebase and merge behavior on {% data variables.product.product_name %} deviates slightly from `git rebase`. Rebase and merge on {% data variables.product.prodname_dotcom %} will always update the committer information and create new commit SHAs, whereas `git rebase` outside of {% data variables.product.prodname_dotcom %} does not change the committer information when the rebase happens on top of an ancestor commit. For more information about `git rebase`, see [the official Git documentation](https://git-scm.com/docs/git-rebase).
For a visual representation of `git rebase`, see [The "Git Branching - Rebasing" chapter from the _Pro Git_ book](https://git-scm.com/book/en/Git-Branching-Rebasing).

Просмотреть файл

@ -1,3 +1,3 @@
Key | Type | Description
----|------|-------------
`action`|`string` | The action that was performed. Can be one of `opened`, `edited`, `closed`, `assigned`, `unassigned`, `review_requested`, `review_request_removed`, `ready_for_review`, `labeled`, `unlabeled`, `synchronize`, `locked`, `unlocked`, or `reopened`. If the action is `closed` and the `merged` key is `false`, the pull request was closed with unmerged commits. If the action is `closed` and the `merged` key is `true`, the pull request was merged.
`action`|`string` | The action that was performed. Can be one of `opened`, `edited`, `closed`, `assigned`, `unassigned`, `review_requested`, `review_request_removed`, `ready_for_review`, `converted_to_draft`, `labeled`, `unlabeled`, `synchronize`, `locked`, `unlocked`, or `reopened`. If the action is `closed` and the `merged` key is `false`, the pull request was closed with unmerged commits. If the action is `closed` and the `merged` key is `true`, the pull request was merged.

Просмотреть файл

@ -153,6 +153,7 @@ product_sublanding:
no_result: Sorry, there is no guide that match your filter.
filters:
type: Type
topic: Topic
all: All
guide_types:
overview: Overview

Просмотреть файл

@ -1,32 +1,39 @@
{% assign currentCategory = siteTree[currentLanguage][currentVersion].products[currentProduct].categories[breadcrumbs.category.href] %}
{% assign maxArticles = 9 %}
<div class="container-xl px-3 px-md-6 pt-3 pb-2">
<h2 class="mb-3 font-mktg">{% data ui.product_sublanding.all_guides %}</h2>
<form class="my-2">
<label for="type" class="text-uppercase f6 text-gray d-block">{% data ui.product_sublanding.filters.type %}</label>
<select class="form-select js-filter-card-filter-dropdown f4 text-bold border-0 rounded-0 border-top box-shadow-none pl-0 js-filter-card-filter-dropdown" name="type" aria-label="guide types">
<option value="">{% data ui.product_sublanding.filters.all %}</option>
{% for type in site.data.ui.product_sublanding.guide_types %}
<option value="{{ type[0] }}">{{ type[1] }}</option>
{% endfor %}
</select>
<form class="mt-2 mb-5 d-flex d-flex">
<div>
<label for="type" class="text-uppercase f6 text-gray d-block">{% data ui.product_sublanding.filters.type %}</label>
<select class="form-select js-filter-card-filter-dropdown f4 text-bold border-0 rounded-0 border-top box-shadow-none pl-0 js-filter-card-filter-dropdown" name="type" aria-label="guide types">
<option value="">{% data ui.product_sublanding.filters.all %}</option>
{% for type in site.data.ui.product_sublanding.guide_types %}
<option value="{{ type[0] }}">{{ type[1] }}</option>
{% endfor %}
</select>
</div>
<div class="mx-4">
<label for="topic" class="text-uppercase f6 text-gray d-block">{% data ui.product_sublanding.filters.topic %}</label>
<select class="form-select js-filter-card-filter-dropdown f4 text-bold border-0 rounded-0 border-top box-shadow-none pl-0 js-filter-card-filter-dropdown" name="topics" aria-label="guide topics">
<option value="">{% data ui.product_sublanding.filters.all %}</option>
{% for topic in page.allTopics %}
<option value="{{ topic }}">{{ topic }}</option>
{% endfor %}
</select>
</div>
</form>
<div class="d-flex flex-wrap mr-0 mr-md-n6 mr-lg-n8">
{% for article in currentCategory.articles %}
{% for article in page.includeGuides %}
{% assign card_display_class = "" %}
{% if forloop.index > maxArticles %}
{% assign card_display_class = "d-none" %}
{% endif %}
{% capture link_card %}
{% link_as_article_card {{ article[1].href }} %}
{% link_as_article_card {{ article.href }} %}
{% endcapture %}
{{ link_card | replace: "<display condition>", card_display_class }}

Просмотреть файл

@ -1,7 +1,14 @@
<div class="d-flex col-12 col-md-4 pr-0 pr-md-6 pr-lg-8 <display condition> js-filter-card" data-type="{{ type }}">
<a class="no-underline d-flex flex-column py-4 border-bottom" href="{{ fullPath }}">
<div class="d-flex col-12 col-md-4 pr-0 pr-md-6 pr-lg-8 <display condition> js-filter-card" data-type="{{ type.key }}" data-topics="{{ topics | join: ',' }}">
<a class="no-underline d-flex flex-column py-3 border-bottom" href="{{ fullPath }}">
<h4 class="h4 text-gray-dark mb-1">{{ title }}</h4>
<div class="text-purple h6 text-uppercase">{{ type }}</div>
<div class="text-purple h6 text-uppercase">{{ type.value }}</div>
<p class="text-gray my-3">{{ intro }}</p>
{% if topics.length %}
<div>
{% for topic in topics %}
<span class="IssueLabel bg-gradient--purple-pink text-white mr-1">{{ topic }}</span>
{% endfor %}
</div>
{% endif %}
</a>
</div>

Просмотреть файл

@ -1 +1,2 @@
<script id="search-options" type="application/json">{{ searchOptions }}</script>
<script src="{{ builtAssets.main.js }}"></script>

Просмотреть файл

@ -1,15 +1,19 @@
import Clipboard from 'clipboard'
export default () => {
const clipboard = new Clipboard('button.js-btn-copy')
const buttons = Array.from(document.querySelectorAll('button.js-btn-copy'))
clipboard.on('success', evt => {
const btn = evt.trigger
const beforeTooltip = btn.getAttribute('aria-label')
btn.setAttribute('aria-label', 'Copied!')
if (!buttons) return
setTimeout(() => {
btn.setAttribute('aria-label', beforeTooltip)
}, 2000)
})
buttons.forEach(button =>
button.addEventListener('click', async evt => {
const text = button.dataset.clipboardText
await navigator.clipboard.writeText(text)
const beforeTooltip = button.getAttribute('aria-label')
button.setAttribute('aria-label', 'Copied!')
setTimeout(() => {
button.setAttribute('aria-label', beforeTooltip)
}, 2000)
})
)
}

Просмотреть файл

@ -1,15 +1,17 @@
const { getPlatformFromUserAgent } = require('platform-utils')
import parseUserAgent from './user-agent'
const supportedPlatforms = ['mac', 'windows', 'linux']
const detectedPlatforms = new Set()
// Emphasize content for the visitor's OS (inferred from user agent string)
export default function displayPlatformSpecificContent () {
let platform = getDefaultPlatform() || getPlatformFromUserAgent()
let platform = getDefaultPlatform() || parseUserAgent().os
// adjust platform names to fit existing mac/windows/linux scheme
if (!platform) platform = 'mac' // default to 'mac' on mobile
if (platform === 'darwin') platform = 'mac'
if (platform === 'ios') platform = 'mac'
if (platform === 'android') platform = 'linux'
if (platform.startsWith('win')) platform = 'windows'
const platformsInContent = findPlatformSpecificContent(platform)

Просмотреть файл

@ -6,30 +6,39 @@ function matchCardBySearch (card, searchString) {
function matchCardByAttribute (card, attribute, value) {
if (attribute in card.dataset) {
return card.dataset[attribute] === value
const allValues = card.dataset[attribute].split(',')
return allValues.some(key => key === value)
}
return false
}
export default function cardsFilter () {
const inputFilter = document.querySelector('.js-filter-card-filter')
const dropdownFilter = document.querySelector('.js-filter-card-filter-dropdown')
const dropdownFilters = document.querySelectorAll('.js-filter-card-filter-dropdown')
const cards = Array.from(document.querySelectorAll('.js-filter-card'))
const showMoreButton = document.querySelector('.js-filter-card-show-more')
const noResults = document.querySelector('.js-filter-card-no-results')
// if jsFilterCardMax not set, assume no limit (well, at 99)
const maxCards = showMoreButton ? parseInt(showMoreButton.dataset.jsFilterCardMax || 99) : null
const noFilter = () => {
showMoreButton.classList.remove('d-none')
for (let index = 0; index < cards.length; index++) {
const card = cards[index]
// Hide all but the first n number of cards
if (index > maxCards - 1) {
card.classList.add('d-none')
} else {
card.classList.remove('d-none')
}
}
}
const filterEventHandler = (evt) => {
const { currentTarget } = evt
const value = currentTarget.value
// Show or hide the "Show more" button if there is a value
if (value) {
showMoreButton.classList.add('d-none')
} else {
showMoreButton.classList.remove('d-none')
}
showMoreButton.classList.add('d-none')
// Track whether or not we had at least one match
let hasMatches = false
@ -37,29 +46,34 @@ export default function cardsFilter () {
for (let index = 0; index < cards.length; index++) {
const card = cards[index]
// Filter was emptied
if (!value) {
// Make sure we don't show the "No results" blurb
hasMatches = true
// Hide all but the first n number of cards
if (index > maxCards - 1) {
card.classList.add('d-none')
} else {
card.classList.remove('d-none')
}
continue
}
let cardMatches = false
if (currentTarget.tagName === 'INPUT') {
// Filter was emptied
if (!value) {
noFilter()
// return hasMatches = true, so we don't show the "No results" blurb
hasMatches = true
continue
}
cardMatches = matchCardBySearch(card, value)
}
if (currentTarget.tagName === 'SELECT' && currentTarget.name) {
cardMatches = matchCardByAttribute(card, currentTarget.name, value)
const matches = []
// check all the other dropdowns
dropdownFilters.forEach(({ name, value }) => {
if (!name || !value) return
matches.push(matchCardByAttribute(card, name, value))
})
// if none of the filters is selected
if (matches.length === 0) {
noFilter()
// return hasMatches = true, so we don't show the "No results" blurb
hasMatches = true
continue
}
cardMatches = matches.every(value => value)
}
if (cardMatches) {
@ -89,8 +103,8 @@ export default function cardsFilter () {
})
}
if (dropdownFilter) {
dropdownFilter.addEventListener('change', filterEventHandler)
if (dropdownFilters) {
dropdownFilters.forEach(filter => filter.addEventListener('change', filterEventHandler))
}
if (showMoreButton) {

Просмотреть файл

@ -1,12 +1,8 @@
import { tags } from './hyperscript'
import { sendEvent } from './events'
const searchWithYourKeyboard = require('search-with-your-keyboard')
const truncate = require('html-truncate')
const languages = require('../lib/languages')
const allVersions = require('../lib/all-versions')
const nonEnterpriseDefaultVersion = require('../lib/non-enterprise-default-version')
import searchWithYourKeyboard from 'search-with-your-keyboard'
import truncate from 'html-truncate'
const languageCodes = Object.keys(languages)
const maxContentLength = 300
let $searchInputContainer
@ -29,8 +25,13 @@ export default function search () {
$searchOverlay = document.querySelector('.search-overlay-desktop')
// There's an index for every version/language combination
version = deriveVersionFromPath()
language = deriveLanguageCodeFromPath()
const {
languages,
versions,
nonEnterpriseDefaultVersion
} = JSON.parse(document.getElementById('search-options').text)
version = deriveVersionFromPath(versions, nonEnterpriseDefaultVersion)
language = deriveLanguageCodeFromPath(languages)
// Find search placeholder text in a <meta> tag, falling back to a default
const $placeholderMeta = document.querySelector('meta[name="site.data.ui.search.placeholder"]')
@ -109,23 +110,16 @@ function closeSearch () {
onSearch()
}
function deriveLanguageCodeFromPath () {
function deriveLanguageCodeFromPath (languageCodes) {
let languageCode = location.pathname.split('/')[1]
if (!languageCodes.includes(languageCode)) languageCode = 'en'
return languageCode
}
function deriveVersionFromPath () {
function deriveVersionFromPath (allVersions, nonEnterpriseDefaultVersion) {
// fall back to the non-enterprise default version (FPT currently) on the homepage, 404 page, etc.
const versionStr = location.pathname.split('/')[2] || nonEnterpriseDefaultVersion
const versionObject = allVersions[versionStr] || allVersions[nonEnterpriseDefaultVersion]
// if GHES, returns the release number like 2.21, 2.22, etc.
// if FPT, returns 'dotcom'
// if GHAE, returns 'ghae'
return versionObject.plan === 'enterprise-server'
? versionObject.currentRelease
: versionObject.miscBaseName
return allVersions[versionStr] || allVersions[nonEnterpriseDefaultVersion]
}
// Wait for the event to stop triggering for X milliseconds before responding
@ -287,7 +281,7 @@ function tmplSearchResult ({ url, breadcrumbs, heading, title, content }) {
)
}
// Convert em to mark tags in search responses
// Convert mark tags in search responses
function markify (text) {
const { mark } = tags
return text

Просмотреть файл

@ -1,8 +1,8 @@
import escape from 'lodash/escape'
const wordsLongerThan18Chars = /[\S]{18,}/g
const camelCaseChars = /([a-z])([A-Z])/g
const underscoresAfter12thChar = /([\w:]{12}[^_]*?)_/g
const slashChars = /([/\\])/g
const { escape } = require('lodash')
// This module improves table rendering on reference pages by inserting a <wbr>
// tag in code terms that use camelcase, slashes, or underscores, inspired by

Просмотреть файл

@ -102,9 +102,11 @@
</div>
</div>
<div class="py-6 border-top border-gray">
{% include 'article-cards' %}
</div>
{% if page.includeGuides %}
<div class="py-6 border-top border-gray">
{% include 'article-cards' %}
</div>
{% endif %}
<div class="border-top">
{% include small-footer %}

Просмотреть файл

@ -1,8 +0,0 @@
const fs = require('fs')
const matter = require('gray-matter')
module.exports = function addFrontmatterToFile (frontmatter, file) {
const { content, data } = matter(fs.readFileSync(file, 'utf8'))
Object.assign(data, frontmatter)
fs.writeFileSync(file, matter.stringify(content, data, { lineWidth: 10000 }))
}

Просмотреть файл

@ -11,5 +11,7 @@ module.exports = [
// Oneoff links that link checkers think are broken but are not.
'https://haveibeenpwned.com/',
'https://www.ilo.org/dyn/normlex/en/f\\?p=NORMLEXPUB:12100:0::NO::P12100_ILO_CODE:P029'
'https://www.ilo.org/dyn/normlex/en/f\\?p=NORMLEXPUB:12100:0::NO::P12100_ILO_CODE:P029',
'https://www.linkedin.com/company/github',
'https://www.facebook.com/'
]

Просмотреть файл

@ -94,6 +94,12 @@ const schema = {
type: 'string',
enum: guideTypes
},
topics: {
type: 'array'
},
includeGuides: {
type: 'array'
},
learningTracks: {
type: 'array'
},

Просмотреть файл

@ -4,7 +4,12 @@ const Link = require('./link')
module.exports = class LinkAsArticleCard extends Link {
async renderPageProps (page, ctx, props) {
const renderedProps = await super.renderPageProps(page, ctx, props)
const { type } = page
return { ...renderedProps, type }
const { type: typeKey, topics = [] } = page
const typeVal = typeKey ? ctx.site.data.ui.product_sublanding.guide_types[typeKey] : null
return {
...renderedProps,
type: { key: typeKey, value: typeVal },
topics
}
}
}

Просмотреть файл

@ -4,13 +4,10 @@ const path = require('path')
const cheerio = require('cheerio')
const patterns = require('./patterns')
const getMapTopicContent = require('./get-map-topic-content')
const rewriteAssetPathsToS3 = require('./rewrite-asset-paths-to-s3')
const rewriteLocalLinks = require('./rewrite-local-links')
const getApplicableVersions = require('./get-applicable-versions')
const encodeBracketedParentheses = require('./encode-bracketed-parentheses')
const generateRedirectsForPermalinks = require('./redirects/permalinks')
const getEnglishHeadings = require('./get-english-headings')
const useEnglishHeadings = require('./use-english-headings')
const getTocItems = require('./get-toc-items')
const pathUtils = require('./path-utils')
const Permalink = require('./permalink')
@ -23,6 +20,7 @@ const slash = require('slash')
const statsd = require('./statsd')
const fmfromf = require('./read-frontmatter')
const getLinkData = require('./get-link-data')
const union = require('lodash/union')
class Page {
static async init (opts) {
@ -72,6 +70,7 @@ class Page {
this.rawProduct = this.product
this.rawPermissions = this.permissions
this.rawLearningTracks = this.learningTracks
this.rawIncludeGuides = this.includeGuides
// a page should only be available in versions that its parent product is available in
const versionsParentProductIsNotAvailableIn = getApplicableVersions(this.versions, this.fullPath)
@ -153,13 +152,13 @@ class Page {
this.markdown = await this.getMarkdown()
}
// use English IDs/anchors for translated headings, so links don't break (see #8572)
if (this.languageCode !== 'en') {
const englishHeadings = getEnglishHeadings(this, context.pages)
context.englishHeadings = englishHeadings
}
this.intro = await renderContent(this.rawIntro, context)
// rewrite local links in the intro to include current language code and GHE version if needed
const introHtml = cheerio.load(this.intro)
rewriteLocalLinks(introHtml, context.currentVersion, context.currentLanguage)
this.intro = introHtml('body').html()
this.introPlainText = await renderContent(this.rawIntro, context, { textOnly: true })
this.title = await renderContent(this.rawTitle, context, { textOnly: true, encodeEntities: true })
this.shortTitle = await renderContent(this.shortTitle, context, { textOnly: true, encodeEntities: true })
@ -190,6 +189,7 @@ class Page {
}))
}
context.relativePath = this.relativePath
const html = await renderContent(markdown, context)
// product frontmatter may contain liquid
@ -217,32 +217,29 @@ class Page {
this.learningTracks = learningTracks
}
const $ = cheerio.load(html)
// set a flag so layout knows whether to render a mac/windows/linux switcher element
this.includesPlatformSpecificContent = $('[class^="platform-"], .mac, .windows, .linux').length > 0
// rewrite asset paths to s3 if it's a dotcom article on any GHE version
// or if it's an enterprise article on any GHE version EXCEPT latest version
rewriteAssetPathsToS3($, context.currentVersion, this.relativePath)
// use English IDs/anchors for translated headings, so links don't break (see #8572)
if (this.languageCode !== 'en') {
const englishHeadings = getEnglishHeadings(this, context.pages)
if (englishHeadings) useEnglishHeadings($, englishHeadings)
if (this.rawIncludeGuides) {
this.allTopics = []
this.includeGuides = await getLinkData(this.rawIncludeGuides, context)
this.includeGuides.map((guide) => {
const { page } = guide
guide.type = page.type
if (page.topics) {
this.allTopics = union(this.allTopics, page.topics)
guide.topics = page.topics
}
delete guide.page
return guide
})
}
// rewrite local links to include current language code and GHE version if needed
rewriteLocalLinks($, context.currentVersion, context.currentLanguage)
// set a flag so layout knows whether to render a mac/windows/linux switcher element
this.includesPlatformSpecificContent = (
html.includes('extended-markdown mac') ||
html.includes('extended-markdown windows') ||
html.includes('extended-markdown linux')
)
// wrap ordered list images in a container div
$('ol > li img').each((i, el) => {
$(el).wrap('<div class="procedural-image-wrapper" />')
})
const cleanedHTML = $('body').html()
return cleanedHTML
return html
}
// Allow other modules (like custom liquid tags) to make one-off requests

Просмотреть файл

@ -0,0 +1,33 @@
const unified = require('unified')
const markdown = require('remark-parse')
const emoji = require('remark-gemoji-to-emoji')
const remark2rehype = require('remark-rehype')
const raw = require('rehype-raw')
const slug = require('rehype-slug')
const autolinkHeadings = require('rehype-autolink-headings')
const highlight = require('rehype-highlight')
const html = require('rehype-stringify')
const graphql = require('highlightjs-graphql').definer
const remarkCodeExtra = require('remark-code-extra')
const codeHeader = require('./plugins/code-header')
const rewriteLocalLinks = require('./plugins/rewrite-local-links')
const useEnglishHeadings = require('./plugins/use-english-headings')
const rewriteAssetPathsToS3 = require('./plugins/rewrite-asset-paths-to-s3')
const wrapInElement = require('./plugins/wrap-in-element')
module.exports = function createProcessor (context) {
return unified()
.use(markdown)
.use(remarkCodeExtra, { transform: codeHeader })
.use(emoji)
.use(remark2rehype, { allowDangerousHTML: true })
.use(slug)
.use(useEnglishHeadings, context)
.use(autolinkHeadings, { behavior: 'wrap' })
.use(highlight, { languages: { graphql }, subset: false })
.use(raw)
.use(rewriteAssetPathsToS3, context)
.use(wrapInElement, { selector: 'ol > li img', wrapper: 'div.procedural-image-wrapper' })
.use(rewriteLocalLinks, { languageCode: context.currentLanguage, version: context.currentVersion })
.use(html)
}

Просмотреть файл

@ -0,0 +1,45 @@
const visit = require('unist-util-visit')
const latestEnterpriseRelease = require('../../enterprise-server-releases').latest
const nonEnterpriseDefaultVersion = require('../../non-enterprise-default-version')
const { getS3BucketPathFromVersion } = require('../../s3-bucket-path-utils')
const allVersions = require('../../all-versions')
const s3BasePath = 'https://github-images.s3.amazonaws.com'
const matcher = node => (
node.type === 'element' &&
node.tagName === 'img' &&
node.properties.src &&
node.properties.src.startsWith('/assets/images')
)
// This module rewrites asset paths on Enterprise versions to S3 paths.
// Source example: /assets/images/foo.png
// Rewritten: https://github-images.s3.amazonaws.com/enterprise/2.20/assets/images/foo.png
// The one exception is Admin pages on the latest GHES release.
module.exports = function rewriteAssetPathsToS3 ({ currentVersion, relativePath }) {
// Bail if we don't have a relativePath in this context
if (!relativePath) return
// skip if this is the homepage
if (relativePath === 'index.md') return
// if the current version is non-enterprise, do not rewrite
if (currentVersion === nonEnterpriseDefaultVersion) return
// the relativePath starts with the product, like /admin/foo or /github/foo
const product = relativePath.split('/')[0]
// if this is an Admin page on the latest GHES release, do not rewrite
if (product === 'admin' && allVersions[currentVersion].currentRelease === latestEnterpriseRelease) return
// if the version is enterprise-server@2.22, use `enterprise/2.22` as the bucket path
// otherwise, use the plan name, e.g., `github-ae`
const bucketPath = getS3BucketPathFromVersion(currentVersion)
return tree => {
visit(tree, matcher, node => {
// Rewrite the node's src
node.properties.src = `${s3BasePath}/${bucketPath}${node.properties.src}`
})
}
}

Просмотреть файл

@ -0,0 +1,102 @@
const path = require('path')
const visit = require('unist-util-visit')
const externalRedirects = Object.keys(require('../../redirects/external-sites'))
const { getPathWithoutLanguage, getVersionStringFromPath } = require('../../path-utils')
const { getNewVersionedPath } = require('../../old-versions-utils')
const patterns = require('../../patterns')
const { deprecated, latest } = require('../../enterprise-server-releases')
const nonEnterpriseDefaultVersion = require('../../non-enterprise-default-version')
const allVersions = require('../../all-versions')
const supportedVersions = Object.keys(allVersions)
const supportedPlans = Object.values(allVersions).map(v => v.plan)
const removeFPTFromPath = require('../../remove-fpt-from-path')
// Matches any <a> tags with an href that starts with `/`
const matcher = node => (
node.type === 'element' &&
node.tagName === 'a' &&
node.properties &&
node.properties.href &&
node.properties.href.startsWith('/')
)
// Content authors write links like `/some/article/path`, but they need to be
// rewritten on the fly to match the current language and page version
module.exports = function rewriteLocalLinks ({ languageCode, version }) {
// There's no languageCode or version passed, so nothing to do
if (!languageCode || !version) return
return tree => {
visit(tree, matcher, node => {
const newHref = getNewHref(node, languageCode, version)
if (newHref) {
node.properties.href = newHref
}
})
}
}
function getNewHref (node, languageCode, version) {
const { href } = node.properties
// Exceptions to link rewriting
if (href.startsWith('/assets')) return
if (href.startsWith('/public')) return
if (externalRedirects.includes(href)) return
let newHref = href
// If the link has a hardcoded plan or version in it, do not update other than adding a language code
// Examples:
// /enterprise-server@2.20/rest/reference/oauth-authorizations
// /enterprise-server/rest/reference/oauth-authorizations (this redirects to the latest version)
// /enterprise-server@latest/rest/reference/oauth-authorizations (this redirects to the latest version)
const firstLinkSegment = href.split('/')[1]
if ([...supportedPlans, ...supportedVersions, 'enterprise-server@latest'].includes(firstLinkSegment)) {
newHref = path.join('/', languageCode, href)
}
// If the link includes a deprecated version, do not update other than adding a language code
// Example: /enterprise/11.10.340/admin/articles/upgrading-to-the-latest-release
const oldEnterpriseVersionNumber = href.match(patterns.getEnterpriseVersionNumber)
if (oldEnterpriseVersionNumber && deprecated.includes(oldEnterpriseVersionNumber[1])) {
newHref = path.join('/', languageCode, href)
}
if (newHref === href) {
// start clean with no language (TOC pages already include the lang codes via lib/liquid-tags/link.js)
const hrefWithoutLang = getPathWithoutLanguage(href)
// normalize any legacy links so they conform to new link structure
newHref = path.posix.join('/', languageCode, getNewVersionedPath(hrefWithoutLang))
// get the current version from the link
const versionFromHref = getVersionStringFromPath(newHref)
// ------ BEGIN ONE-OFF OVERRIDES ------//
// dotcom-only links always point to dotcom
if (node.properties.className && node.properties.className.includes('dotcom-only')) {
version = nonEnterpriseDefaultVersion
}
// desktop links always point to dotcom
if (patterns.desktop.test(hrefWithoutLang)) {
version = nonEnterpriseDefaultVersion
}
// admin links on dotcom always point to Enterprise
if (patterns.adminProduct.test(hrefWithoutLang) && version === nonEnterpriseDefaultVersion) {
version = `enterprise-server@${latest}`
}
// insights links on dotcom always point to Enterprise
if (patterns.insightsProduct.test(hrefWithoutLang) && version === nonEnterpriseDefaultVersion) {
version = `enterprise-server@${latest}`
}
// ------ END ONE-OFF OVERRIDES ------//
// update the version in the link
newHref = removeFPTFromPath(newHref.replace(versionFromHref, version))
}
newHref = newHref.replace(patterns.trailingSlash, '$1')
return newHref
}

Просмотреть файл

@ -0,0 +1,29 @@
const GithubSlugger = require('github-slugger')
const Entities = require('html-entities').XmlEntities
const toString = require('hast-util-to-string')
const visit = require('unist-util-visit')
const slugger = new GithubSlugger()
const entities = new Entities()
const matcher = node => (
node.type === 'element' &&
['h2', 'h3', 'h4'].includes(node.tagName)
)
// replace translated IDs and links in headings with English
module.exports = function useEnglishHeadings ({ englishHeadings }) {
if (!englishHeadings) return
return tree => {
visit(tree, matcher, node => {
slugger.reset()
// Get the plain text content of the heading node
const text = toString(node)
// find English heading in the collection
const englishHeading = englishHeadings[entities.encode(text)]
// get English slug
const englishSlug = slugger.slug(englishHeading)
// use English slug for heading ID and link
node.properties.id = englishSlug
})
}
}

Просмотреть файл

@ -0,0 +1,33 @@
const visit = require('unist-util-visit')
const { selectAll } = require('hast-util-select')
const parseSelector = require('hast-util-parse-selector')
/*
* Attacher
*/
module.exports = options => {
options = options || {}
const selector = options.selector || options.select || 'body'
const wrapper = options.wrapper || options.wrap
/*
* Transformer
*/
return tree => {
if (typeof wrapper !== 'string') {
throw new TypeError('Expected a `string` as wrapper')
}
if (typeof selector !== 'string') {
throw new TypeError('Expected a `string` as selector')
}
for (const match of selectAll(options.selector, tree)) {
visit(tree, match, (node, i, parent) => {
const wrapper = parseSelector('div')
wrapper.children = [node]
parent.children[i] = wrapper
})
}
}
}

Просмотреть файл

@ -1,11 +1,9 @@
const liquid = require('./liquid')
const codeHeader = require('./plugins/code-header')
const hubdown = require('hubdown')
const remarkCodeExtra = require('remark-code-extra')
const cheerio = require('cheerio')
const Entities = require('html-entities').XmlEntities
const entities = new Entities()
const stripHtmlComments = require('strip-html-comments')
const createProcessor = require('./create-processor')
// used below to remove extra newlines in TOC lists
const endLine = '</a>\r?\n'
@ -54,13 +52,9 @@ module.exports = async function renderContent (
// statements so that extra space doesn't mess with list numbering
template = template.replace(/(\r?\n){3}/g, '\n\n')
let { content: html } = await hubdown(template, {
// Disable automatic language guessing in syntax highlighting
highlight: { subset: false },
runBefore: [[
remarkCodeExtra, { transform: codeHeader }
]]
})
const processor = createProcessor(context)
const vFile = await processor.process(template)
let html = vFile.toString()
// Remove unwanted newlines (which appear as spaces) from inline tags inside tables
if (html.includes('<table>')) html = removeNewlinesFromInlineTags(html)

Просмотреть файл

@ -9264,6 +9264,20 @@
},
"descriptionHTML": "<p>A cursor, as given in the <a href=\"https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header\">Link header</a>. If specified, the query only searches for events before this cursor.</p>"
},
{
"name": "order",
"description": "The order of audit log events. To list newest events first, specify `desc`. To list oldest events first, specify `asc`.\n\nThe default is `desc`.",
"in": "query",
"required": false,
"schema": {
"type": "string",
"enum": [
"desc",
"asc"
]
},
"descriptionHTML": "<p>The order of audit log events. To list newest events first, specify <code>desc</code>. To list oldest events first, specify <code>asc</code>.</p>\n<p>The default is <code>desc</code>.</p>"
},
{
"name": "per_page",
"description": "Results per page (max 100)",
@ -9288,7 +9302,7 @@
}
],
"summary": "Get the audit log for an enterprise",
"description": "**Note:** The audit log REST API is currently in beta and is subject to change. To join the beta, talk to your services or sales contact at GitHub.\n\nGets the audit log for an enterprise. To use this endpoint, you must be an enterprise admin, and you must use an access token with the `admin:enterprise` scope.",
"description": "**Note:** The audit log REST API is currently in beta and is subject to change.\n\nGets the audit log for an enterprise. To use this endpoint, you must be an enterprise admin, and you must use an access token with the `admin:enterprise` scope.",
"operationId": "audit-log/get-audit-log",
"tags": [
"audit-log"
@ -9311,7 +9325,7 @@
"subcategoryLabel": "Audit log",
"notes": [],
"bodyParameters": [],
"descriptionHTML": "<p><strong>Note:</strong> The audit log REST API is currently in beta and is subject to change. To join the beta, talk to your services or sales contact at GitHub.</p>\n<p>Gets the audit log for an enterprise. To use this endpoint, you must be an enterprise admin, and you must use an access token with the <code>admin:enterprise</code> scope.</p>",
"descriptionHTML": "<p><strong>Note:</strong> The audit log REST API is currently in beta and is subject to change.</p>\n<p>Gets the audit log for an enterprise. To use this endpoint, you must be an enterprise admin, and you must use an access token with the <code>admin:enterprise</code> scope.</p>",
"responses": [
{
"httpStatusCode": "200",
@ -17709,6 +17723,20 @@
},
"descriptionHTML": "<p>A cursor, as given in the <a href=\"https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header\">Link header</a>. If specified, the query only searches for events before this cursor.</p>"
},
{
"name": "order",
"description": "The order of audit log events. To list newest events first, specify `desc`. To list oldest events first, specify `asc`.\n\nThe default is `desc`.",
"in": "query",
"required": false,
"schema": {
"type": "string",
"enum": [
"desc",
"asc"
]
},
"descriptionHTML": "<p>The order of audit log events. To list newest events first, specify <code>desc</code>. To list oldest events first, specify <code>asc</code>.</p>\n<p>The default is <code>desc</code>.</p>"
},
{
"name": "per_page",
"description": "Results per page (max 100)",
@ -17733,7 +17761,7 @@
}
],
"summary": "Get the audit log for an organization",
"description": "**Note:** The audit log REST API is currently in beta and is subject to change. To join the beta, talk to your services or sales contact at GitHub.\n\nGets the audit log for an organization. For more information, see \"[Reviewing the audit log for your organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/reviewing-the-audit-log-for-your-organization).\"\n\nTo use this endpoint, you must be an organization owner, and you must use an access token with the `admin:org` scope. GitHub Apps must have the `organization_administration` read permission to use this endpoint.",
"description": "**Note:** The audit log REST API is currently in beta and is subject to change.\n\nGets the audit log for an organization. For more information, see \"[Reviewing the audit log for your organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/reviewing-the-audit-log-for-your-organization).\"\n\nTo use this endpoint, you must be an organization owner, and you must use an access token with the `admin:org` scope. GitHub Apps must have the `organization_administration` read permission to use this endpoint.",
"operationId": "orgs/get-audit-log",
"tags": [
"orgs"
@ -17754,7 +17782,7 @@
"categoryLabel": "Orgs",
"notes": [],
"bodyParameters": [],
"descriptionHTML": "<p><strong>Note:</strong> The audit log REST API is currently in beta and is subject to change. To join the beta, talk to your services or sales contact at GitHub.</p>\n<p>Gets the audit log for an organization. For more information, see \"<a href=\"https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/reviewing-the-audit-log-for-your-organization\">Reviewing the audit log for your organization</a>.\"</p>\n<p>To use this endpoint, you must be an organization owner, and you must use an access token with the <code>admin:org</code> scope. GitHub Apps must have the <code>organization_administration</code> read permission to use this endpoint.</p>",
"descriptionHTML": "<p><strong>Note:</strong> The audit log REST API is currently in beta and is subject to change.</p>\n<p>Gets the audit log for an organization. For more information, see \"<a href=\"https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/reviewing-the-audit-log-for-your-organization\">Reviewing the audit log for your organization</a>.\"</p>\n<p>To use this endpoint, you must be an organization owner, and you must use an access token with the <code>admin:org</code> scope. GitHub Apps must have the <code>organization_administration</code> read permission to use this endpoint.</p>",
"responses": [
{
"httpStatusCode": "200",

Просмотреть файл

@ -21311,7 +21311,7 @@
"/enterprises/{enterprise}/audit-log": {
"get": {
"summary": "Get the audit log for an enterprise",
"description": "**Note:** The audit log REST API is currently in beta and is subject to change. To join the beta, talk to your services or sales contact at GitHub.\n\nGets the audit log for an enterprise. To use this endpoint, you must be an enterprise admin, and you must use an access token with the `admin:enterprise` scope.",
"description": "**Note:** The audit log REST API is currently in beta and is subject to change.\n\nGets the audit log for an enterprise. To use this endpoint, you must be an enterprise admin, and you must use an access token with the `admin:enterprise` scope.",
"operationId": "audit-log/get-audit-log",
"tags": [
"audit-log"
@ -21371,6 +21371,19 @@
"type": "string"
}
},
{
"name": "order",
"description": "The order of audit log events. To list newest events first, specify `desc`. To list oldest events first, specify `asc`.\n\nThe default is `desc`.",
"in": "query",
"required": false,
"schema": {
"type": "string",
"enum": [
"desc",
"asc"
]
}
},
{
"name": "per_page",
"description": "Results per page (max 100)",
@ -54686,7 +54699,7 @@
"/orgs/{org}/audit-log": {
"get": {
"summary": "Get the audit log for an organization",
"description": "**Note:** The audit log REST API is currently in beta and is subject to change. To join the beta, talk to your services or sales contact at GitHub.\n\nGets the audit log for an organization. For more information, see \"[Reviewing the audit log for your organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/reviewing-the-audit-log-for-your-organization).\"\n\nTo use this endpoint, you must be an organization owner, and you must use an access token with the `admin:org` scope. GitHub Apps must have the `organization_administration` read permission to use this endpoint.",
"description": "**Note:** The audit log REST API is currently in beta and is subject to change.\n\nGets the audit log for an organization. For more information, see \"[Reviewing the audit log for your organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/reviewing-the-audit-log-for-your-organization).\"\n\nTo use this endpoint, you must be an organization owner, and you must use an access token with the `admin:org` scope. GitHub Apps must have the `organization_administration` read permission to use this endpoint.",
"operationId": "orgs/get-audit-log",
"tags": [
"orgs"
@ -54745,6 +54758,19 @@
"type": "string"
}
},
{
"name": "order",
"description": "The order of audit log events. To list newest events first, specify `desc`. To list oldest events first, specify `asc`.\n\nThe default is `desc`.",
"in": "query",
"required": false,
"schema": {
"type": "string",
"enum": [
"desc",
"asc"
]
}
},
{
"name": "per_page",
"description": "Results per page (max 100)",

21
lib/search/compress.js Normal file
Просмотреть файл

@ -0,0 +1,21 @@
const { promisify } = require('util')
const zlib = require('zlib')
const brotliCompress = promisify(zlib.brotliCompress)
const brotliDecompress = promisify(zlib.brotliDecompress)
const options = {
params: {
[zlib.constants.BROTLI_PARAM_MODE]: zlib.constants.BROTLI_MODE_TEXT,
[zlib.constants.BROTLI_PARAM_QUALITY]: 6
}
}
module.exports = {
async compress (data) {
return brotliCompress(data, options)
},
async decompress (data) {
return brotliDecompress(data, options)
}
}

Двоичные данные
lib/search/indexes/github-docs-2.20-cn-records.json.br Normal file

Двоичный файл не отображается.

Двоичные данные
lib/search/indexes/github-docs-2.20-cn.json.br Normal file

Двоичный файл не отображается.

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше