Merge branch 'main' into cklin/codeql-cli-2.11.5
This commit is contained in:
Коммит
c51babb6c6
|
@ -1,20 +1,17 @@
|
|||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "npm"
|
||||
- package-ecosystem: npm
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "thursday" # Gives us a working day to merge this before our typical release
|
||||
interval: weekly
|
||||
labels:
|
||||
- "Update dependencies"
|
||||
- Update dependencies
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/runner"
|
||||
update-types:
|
||||
- version-update:semver-minor
|
||||
- version-update:semver-patch
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "thursday" # Gives us a working day to merge this before our typical release
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
||||
interval: weekly
|
||||
|
|
|
@ -42,11 +42,11 @@ jobs:
|
|||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
||||
- uses: swift-actions/setup-swift@194625b58a582570f61cc707c3b558086c26b723
|
||||
# Windows doesn't support Swift, and only macOS latest and nightly-latest support Swift 5.7.1.
|
||||
if: runner.os == 'Linux' || (runner.os == 'macOS' && matrix.version == 'cached')
|
||||
with:
|
||||
swift-version: '5.7'
|
||||
swift-version: 5.7.0
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: javascript
|
||||
|
|
|
@ -65,11 +65,11 @@ jobs:
|
|||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
||||
- uses: swift-actions/setup-swift@194625b58a582570f61cc707c3b558086c26b723
|
||||
# Only macOS latest and nightly-latest support Swift 5.7.1
|
||||
if: runner.os == 'Linux' || matrix.version == 'cached'
|
||||
with:
|
||||
swift-version: '5.7'
|
||||
swift-version: 5.7.0
|
||||
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
|
|
|
@ -0,0 +1,72 @@
|
|||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Submit SARIF after failure
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
submit-sarif-failure:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: Submit SARIF after failure
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./init
|
||||
with:
|
||||
languages: javascript
|
||||
- name: Fail
|
||||
# We want this job to pass if the Action correctly uploads the SARIF file for
|
||||
# the failed run.
|
||||
# Setting this step to continue on error means that it is marked as completing
|
||||
# successfully, so will not fail the job.
|
||||
continue-on-error: true
|
||||
run: exit 1
|
||||
- uses: ./analyze
|
||||
# In a real workflow, this step wouldn't run. Since we used `continue-on-error`
|
||||
# above, we manually disable it with an `if` condition.
|
||||
if: false
|
||||
with:
|
||||
category: /test-codeql-version:${{ matrix.version }}
|
||||
env:
|
||||
# Internal-only environment variable used to indicate that the post-init Action
|
||||
# should expect to upload a SARIF file for the failed run.
|
||||
CODEQL_ACTION_EXPECT_UPLOAD_FAILED_SARIF: true
|
||||
# Make sure the uploading SARIF files feature is enabled.
|
||||
CODEQL_ACTION_UPLOAD_FAILED_SARIF: true
|
||||
# Upload the failed SARIF file as an integration test of the API endpoint.
|
||||
CODEQL_ACTION_TEST_MODE: false
|
||||
# Mark telemetry for this workflow so it can be treated separately.
|
||||
CODEQL_ACTION_TESTING_ENVIRONMENT: codeql-action-pr-checks
|
||||
|
|
@ -42,11 +42,11 @@ jobs:
|
|||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
||||
- uses: swift-actions/setup-swift@194625b58a582570f61cc707c3b558086c26b723
|
||||
# Only macOS latest and nightly-latest support Swift 5.7.1
|
||||
if: runner.os == 'Linux' || matrix.version == 'cached'
|
||||
with:
|
||||
swift-version: '5.7'
|
||||
swift-version: 5.7.0
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: swift
|
||||
|
|
|
@ -48,11 +48,11 @@ jobs:
|
|||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
||||
- uses: swift-actions/setup-swift@194625b58a582570f61cc707c3b558086c26b723
|
||||
# Only macOS latest and nightly-latest support Swift 5.7.1
|
||||
if: runner.os == 'Linux' || matrix.version == 'cached'
|
||||
with:
|
||||
swift-version: '5.7'
|
||||
swift-version: 5.7.0
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: swift
|
||||
|
|
|
@ -88,7 +88,7 @@ jobs:
|
|||
fi
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v3
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
|
|
|
@ -156,7 +156,7 @@ jobs:
|
|||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python_version }}
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ jobs:
|
|||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v3
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v3
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.7"
|
||||
- name: Checkout CodeQL Action
|
||||
|
@ -35,7 +35,7 @@ jobs:
|
|||
env:
|
||||
ENTERPRISE_RELEASES_PATH: ${{ github.workspace }}/enterprise-releases/
|
||||
- name: Commit Changes
|
||||
uses: peter-evans/create-pull-request@c7f493a8000b8aeb17a1332e326ba76b57cb83eb # v3.4.1
|
||||
uses: peter-evans/create-pull-request@2b011faafdcbc9ceb11414d64d0573f37c774b04 # v4.2.3
|
||||
with:
|
||||
commit-message: Update supported GitHub Enterprise Server versions.
|
||||
title: Update supported GitHub Enterprise Server versions.
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
## [UNRELEASED]
|
||||
|
||||
- Update default CodeQL bundle version to 2.11.5. [#1412](https://github.com/github/codeql-action/pull/1412)
|
||||
- Add a step that tries to upload a SARIF file for the workflow run when that workflow run fails. This will help better surface failed code scanning workflow runs. [#1393](https://github.com/github/codeql-action/pull/1393)
|
||||
- Python automatic dependency installation will no longer consider dependecy code installed in venv as user-written, for projects using Poetry that specify `virtualenvs.in-project = true` in their `poetry.toml`. [#1419](https://github.com/github/codeql-action/pull/1419).
|
||||
|
||||
## 2.1.35 - 01 Dec 2022
|
||||
|
||||
|
|
|
@ -12,6 +12,7 @@ inputs:
|
|||
upload:
|
||||
description: Upload the SARIF file to Code Scanning
|
||||
required: false
|
||||
# If changing this, make sure to update workflow.ts accordingly.
|
||||
default: "true"
|
||||
cleanup-level:
|
||||
description: "Level of cleanup to perform on CodeQL databases at the end of the analyze step. This should either be 'none' to skip cleanup, or be a valid argument for the --mode flag of the CodeQL CLI command 'codeql database cleanup' as documented at https://codeql.github.com/docs/codeql-cli/manual/database-cleanup"
|
||||
|
@ -44,6 +45,7 @@ inputs:
|
|||
checkout_path:
|
||||
description: "The path at which the analyzed repository was checked out. Used to relativize any absolute paths in the uploaded SARIF file."
|
||||
required: false
|
||||
# If changing this, make sure to update workflow.ts accordingly.
|
||||
default: ${{ github.workspace }}
|
||||
ref:
|
||||
description: "The ref where results will be uploaded. If not provided, the Action will use the GITHUB_REF environment variable. If provided, the sha input must be provided as well. This input is not available in pull requests from forks."
|
||||
|
|
|
@ -173,10 +173,10 @@ async function getAutomationID() {
|
|||
exports.getAutomationID = getAutomationID;
|
||||
function computeAutomationID(analysis_key, environment) {
|
||||
let automationID = `${analysis_key}/`;
|
||||
// the id has to be deterministic so we sort the fields
|
||||
if (environment !== undefined && environment !== "null") {
|
||||
const environmentObject = JSON.parse(environment);
|
||||
for (const entry of Object.entries(environmentObject).sort()) {
|
||||
const matrix = (0, util_1.parseMatrixInput)(environment);
|
||||
if (matrix !== undefined) {
|
||||
// the id has to be deterministic so we sort the fields
|
||||
for (const entry of Object.entries(matrix).sort()) {
|
||||
if (typeof entry[1] === "string") {
|
||||
automationID += `${entry[0]}:${entry[1]}/`;
|
||||
}
|
||||
|
|
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
|
@ -39,6 +39,7 @@ const feature_flags_1 = require("./feature-flags");
|
|||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const shared_environment_1 = require("./shared-environment");
|
||||
const trap_caching_1 = require("./trap-caching");
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const util = __importStar(require("./util"));
|
||||
|
@ -176,8 +177,9 @@ async function run() {
|
|||
}
|
||||
core.setOutput("db-locations", dbLocations);
|
||||
if (runStats && actionsUtil.getRequiredInput("upload") === "true") {
|
||||
uploadResult = await upload_lib.uploadFromActions(outputDir, logger);
|
||||
uploadResult = await upload_lib.uploadFromActions(outputDir, actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getOptionalInput("category"), logger);
|
||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||
core.exportVariable(shared_environment_1.CODEQL_ACTION_ANALYZE_DID_UPLOAD_SARIF, "true");
|
||||
}
|
||||
else {
|
||||
logger.info("Not uploading results");
|
||||
|
|
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
|
@ -207,7 +207,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
|||
return statusReport;
|
||||
async function runInterpretResults(language, queries, sarifFile, enableDebugLogging) {
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, enableDebugLogging ? "-vv" : "-v", automationDetailsId, featureEnablement);
|
||||
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, enableDebugLogging ? "-vv" : "-v", automationDetailsId);
|
||||
}
|
||||
async function runPrintLinesOfCode(language) {
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
|
|
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
|
@ -36,7 +36,6 @@ const actions_util_1 = require("./actions-util");
|
|||
const api = __importStar(require("./api-client"));
|
||||
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
|
||||
const error_matcher_1 = require("./error-matcher");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
||||
const trap_caching_1 = require("./trap-caching");
|
||||
|
@ -77,6 +76,7 @@ const CODEQL_VERSION_LUA_TRACER_CONFIG = "2.10.0";
|
|||
exports.CODEQL_VERSION_CONFIG_FILES = "2.10.1";
|
||||
const CODEQL_VERSION_LUA_TRACING_GO_WINDOWS_FIXED = "2.10.4";
|
||||
exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = "2.10.4";
|
||||
const CODEQL_VERSION_FILE_BASELINE_INFORMATION = "2.11.3";
|
||||
/**
|
||||
* This variable controls using the new style of tracing from the CodeQL
|
||||
* CLI. In particular, with versions above this we will use both indirect
|
||||
|
@ -371,6 +371,7 @@ function setCodeQL(partialCodeql) {
|
|||
databaseRunQueries: resolveFunction(partialCodeql, "databaseRunQueries"),
|
||||
databaseInterpretResults: resolveFunction(partialCodeql, "databaseInterpretResults"),
|
||||
databasePrintBaseline: resolveFunction(partialCodeql, "databasePrintBaseline"),
|
||||
diagnosticsExport: resolveFunction(partialCodeql, "diagnosticsExport"),
|
||||
};
|
||||
return cachedCodeQL;
|
||||
}
|
||||
|
@ -668,7 +669,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
}
|
||||
await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, codeqlArgs, error_matcher_1.errorMatchers);
|
||||
},
|
||||
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, verbosityFlag, automationDetailsId, featureEnablement) {
|
||||
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, verbosityFlag, automationDetailsId) {
|
||||
const codeqlArgs = [
|
||||
"database",
|
||||
"interpret-results",
|
||||
|
@ -687,7 +688,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
if (automationDetailsId !== undefined) {
|
||||
codeqlArgs.push("--sarif-category", automationDetailsId);
|
||||
}
|
||||
if (await featureEnablement.getValue(feature_flags_1.Feature.FileBaselineInformationEnabled, this)) {
|
||||
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_FILE_BASELINE_INFORMATION)) {
|
||||
codeqlArgs.push("--sarif-add-baseline-file-info");
|
||||
}
|
||||
codeqlArgs.push(databasePath);
|
||||
|
@ -772,6 +773,19 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
];
|
||||
await new toolrunner.ToolRunner(cmd, args).exec();
|
||||
},
|
||||
async diagnosticsExport(sarifFile, automationDetailsId) {
|
||||
const args = [
|
||||
"diagnostics",
|
||||
"export",
|
||||
"--format=sarif-latest",
|
||||
`--output=${sarifFile}`,
|
||||
...getExtraOptionsFromEnv(["diagnostics", "export"]),
|
||||
];
|
||||
if (automationDetailsId !== undefined) {
|
||||
args.push("--sarif-category", automationDetailsId);
|
||||
}
|
||||
await new toolrunner.ToolRunner(cmd, args).exec();
|
||||
},
|
||||
};
|
||||
// To ensure that status reports include the CodeQL CLI version wherever
|
||||
// possible, we want to call getVersion(), which populates the version value
|
||||
|
|
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
|
@ -311,7 +311,7 @@ for (const [isFeatureEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOOLCAC
|
|||
sinon.stub(codeqlObject, "getVersion").resolves("2.7.0");
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", (0, testing_utils_1.createFeatures)([]));
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be absent, but it is present");
|
||||
});
|
||||
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-query-help for 2.7.1", async (t) => {
|
||||
|
@ -320,7 +320,7 @@ for (const [isFeatureEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOOLCAC
|
|||
sinon.stub(codeqlObject, "getVersion").resolves("2.7.1");
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", (0, testing_utils_1.createFeatures)([]));
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be present, but it is absent");
|
||||
});
|
||||
(0, ava_1.default)("databaseInitCluster() without injected codescanning config", async (t) => {
|
||||
|
@ -571,28 +571,22 @@ const injectedConfigMacro = ava_1.default.macro({
|
|||
process.env["CODEQL_PASS_CONFIG_TO_CLI"] = origCODEQL_PASS_CONFIG_TO_CLI;
|
||||
}
|
||||
});
|
||||
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-baseline-file-info when feature enabled", async (t) => {
|
||||
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-baseline-file-info for 2.11.3", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
// We need to set a CodeQL version such that running `databaseInterpretResults` does not crash.
|
||||
// The version of CodeQL is checked separately to determine feature enablement, and does not
|
||||
// otherwise impact this test, so set it to 0.0.0.
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("0.0.0");
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.11.3");
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.FileBaselineInformationEnabled]));
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-baseline-file-info"), "--sarif-add-baseline-file-info should be present, but it is absent");
|
||||
});
|
||||
(0, ava_1.default)("databaseInterpretResults() does not set --sarif-add-baseline-file-info if feature disabled", async (t) => {
|
||||
(0, ava_1.default)("databaseInterpretResults() does not set --sarif-add-baseline-file-info for 2.11.2", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
// We need to set a CodeQL version such that running `databaseInterpretResults` does not crash.
|
||||
// The version of CodeQL is checked upstream to determine feature enablement, so it does not
|
||||
// affect this test.
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("0.0.0");
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.11.2");
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", (0, testing_utils_1.createFeatures)([]));
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-baseline-file-info"), "--sarif-add-baseline-file-info must be absent, but it is present");
|
||||
});
|
||||
function stubToolRunnerConstructor() {
|
||||
|
|
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
|
@ -30,9 +30,9 @@ var Feature;
|
|||
Feature["BypassToolcacheKotlinSwiftEnabled"] = "bypass_toolcache_kotlin_swift_enabled";
|
||||
Feature["CliConfigFileEnabled"] = "cli_config_file_enabled";
|
||||
Feature["DisableKotlinAnalysisEnabled"] = "disable_kotlin_analysis_enabled";
|
||||
Feature["FileBaselineInformationEnabled"] = "file_baseline_information_enabled";
|
||||
Feature["MlPoweredQueriesEnabled"] = "ml_powered_queries_enabled";
|
||||
Feature["TrapCachingEnabled"] = "trap_caching_enabled";
|
||||
Feature["UploadFailedSarifEnabled"] = "upload_failed_sarif_enabled";
|
||||
})(Feature = exports.Feature || (exports.Feature = {}));
|
||||
exports.featureConfig = {
|
||||
[Feature.BypassToolcacheEnabled]: {
|
||||
|
@ -55,10 +55,6 @@ exports.featureConfig = {
|
|||
envVar: "CODEQL_PASS_CONFIG_TO_CLI",
|
||||
minimumVersion: "2.11.1",
|
||||
},
|
||||
[Feature.FileBaselineInformationEnabled]: {
|
||||
envVar: "CODEQL_FILE_BASELINE_INFORMATION",
|
||||
minimumVersion: "2.11.3",
|
||||
},
|
||||
[Feature.MlPoweredQueriesEnabled]: {
|
||||
envVar: "CODEQL_ML_POWERED_QUERIES",
|
||||
minimumVersion: "2.7.5",
|
||||
|
@ -67,6 +63,10 @@ exports.featureConfig = {
|
|||
envVar: "CODEQL_TRAP_CACHING",
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.UploadFailedSarifEnabled]: {
|
||||
envVar: "CODEQL_ACTION_UPLOAD_FAILED_SARIF",
|
||||
minimumVersion: "2.11.3",
|
||||
},
|
||||
};
|
||||
exports.FEATURE_FLAGS_FILE_NAME = "cached-feature-flags.json";
|
||||
/**
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"feature-flags.js","sourceRoot":"","sources":["../src/feature-flags.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,6CAA4C;AAI5C,6CAA+B;AAM/B,IAAY,OAQX;AARD,WAAY,OAAO;IACjB,8DAAmD,CAAA;IACnD,sFAA2E,CAAA;IAC3E,2DAAgD,CAAA;IAChD,2EAAgE,CAAA;IAChE,+EAAoE,CAAA;IACpE,iEAAsD,CAAA;IACtD,sDAA2C,CAAA;AAC7C,CAAC,EARW,OAAO,GAAP,eAAO,KAAP,eAAO,QAQlB;AAEY,QAAA,aAAa,GAGtB;IACF,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAAE;QAChC,MAAM,EAAE,yBAAyB;QACjC,+EAA+E;QAC/E,iCAAiC;QACjC,cAAc,EAAE,SAAS;KAC1B;IACD,CAAC,OAAO,CAAC,iCAAiC,CAAC,EAAE;QAC3C,MAAM,EAAE,sCAAsC;QAC9C,+EAA+E;QAC/E,iCAAiC;QACjC,cAAc,EAAE,SAAS;KAC1B;IACD,CAAC,OAAO,CAAC,4BAA4B,CAAC,EAAE;QACtC,MAAM,EAAE,gCAAgC;QACxC,cAAc,EAAE,SAAS;KAC1B;IACD,CAAC,OAAO,CAAC,oBAAoB,CAAC,EAAE;QAC9B,MAAM,EAAE,2BAA2B;QACnC,cAAc,EAAE,QAAQ;KACzB;IACD,CAAC,OAAO,CAAC,8BAA8B,CAAC,EAAE;QACxC,MAAM,EAAE,kCAAkC;QAC1C,cAAc,EAAE,QAAQ;KACzB;IACD,CAAC,OAAO,CAAC,uBAAuB,CAAC,EAAE;QACjC,MAAM,EAAE,2BAA2B;QACnC,cAAc,EAAE,OAAO;KACxB;IACD,CAAC,OAAO,CAAC,kBAAkB,CAAC,EAAE;QAC5B,MAAM,EAAE,qBAAqB;QAC7B,cAAc,EAAE,SAAS;KAC1B;CACF,CAAC;AAUW,QAAA,uBAAuB,GAAG,2BAA2B,CAAC;AAEnE;;;;GAIG;AACH,MAAa,QAAQ;IAGnB,YACE,aAAiC,EACjC,aAA4B,EAC5B,OAAe,EACf,MAAc;QAEd,IAAI,CAAC,kBAAkB,GAAG,IAAI,kBAAkB,CAC9C,aAAa,EACb,aAAa,EACb,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,+BAAuB,CAAC,EAC3C,MAAM,CACP,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;OAWG;IACH,KAAK,CAAC,QAAQ,CAAC,OAAgB,EAAE,MAAe;QAC9C,IAAI,CAAC,MAAM,IAAI,qBAAa,CAAC,OAAO,CAAC,CAAC,cAAc,EAAE;YACpD,MAAM,IAAI,KAAK,CACb,8DAA8D,OAAO,2CAA2C,CACjH,CAAC;SACH;QAED,oDAAoD;QACpD,IAAI,OAAO,KAAK,OAAO,CAAC,sBAAsB,IAAI,IAAI,CAAC,YAAY,EAAE,EAAE;YACrE,OAAO,KAAK,CAAC;SACd;QAED,MAAM,MAAM,GAAG,CACb,OAAO,CAAC,GAAG,CAAC,qBAAa,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,IAAI,EAAE,CACjD,CAAC,iBAAiB,EAAE,CAAC;QAEtB,sFAAsF;QACtF,IAAI,MAAM,KAAK,OAAO,EAAE;YACtB,OAAO,KAAK,CAAC;SACd;QAED,yEAAyE;QACzE,MAAM,cAAc,GAAG,qBAAa,CAAC,OAAO,CAAC,CAAC,cAAc,CAAC;QAC7D,IAAI,MAAM,IAAI,cAAc,EAAE;YAC5B,IAAI,CAAC,CAAC,MAAM,IAAI,CAAC,kBAAkB,CAAC,MAAM,EAAE,cAAc,CAAC,CAAC,EAAE;gBAC5D,OAAO,KAAK,CAAC;aACd;SACF;QAED,8EAA8E;QAC9E,IAAI,MAAM,KAAK,MAAM,EAAE;YACrB,OAAO,IAAI,CAAC;SACb;QACD,gDAAgD;QAChD,OAAO,MAAM,IAAI,CAAC,kBAAkB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;IACzD,CAAC;CACF;AAjED,4BAiEC;AAED,MAAM,kBAAkB;IAGtB,YACmB,aAAiC,EACjC,aAA4B,EAC5B,gBAAwB,EACxB,MAAc;QAHd,kBAAa,GAAb,aAAa,CAAoB;QACjC,kBAAa,GAAb,aAAa,CAAe;QAC5B,qBAAgB,GAAhB,gBAAgB,CAAQ;QACxB,WAAM,GAAN,MAAM,CAAQ;QAE/B,IAAI;IACN,CAAC;IAED,KAAK,CAAC,QAAQ,CAAC,OAAgB;QAC7B,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC;QAC7C,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,qCAAqC,OAAO,4BAA4B,CACzE,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,MAAM,iBAAiB,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;QAC5C,IAAI,iBAAiB,KAAK,SAAS,EAAE;YACnC,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,YAAY,OAAO,uDAAuD,CAC3E,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,OAAO,CAAC,CAAC,iBAAiB,CAAC;IAC7B,CAAC;IAEO,KAAK,CAAC,cAAc;QAC1B,0CAA0C;QAC1C,IAAI,IAAI,CAAC,iBAAiB,KAAK,SAAS,EAAE;YACxC,OAAO,IAAI,CAAC,iBAAiB,CAAC;SAC/B;QAED,wEAAwE;QACxE,MAAM,SAAS,GAAG,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC;QAC9C,IAAI,SAAS,KAAK,SAAS,EAAE;YAC3B,IAAI,CAAC,iBAAiB,GAAG,SAAS,CAAC;YACnC,OAAO,SAAS,CAAC;SAClB;QAED,wCAAwC;QACxC,IAAI,WAAW,GAAG,MAAM,IAAI,CAAC,eAAe,EAAE,CAAC;QAC/C,IAAI,WAAW,KAAK,SAAS,EAAE;YAC7B,WAAW,GAAG,EAAE,CAAC;SAClB;QAED,+BAA+B;QAC/B,IAAI,CAAC,iBAAiB,GAAG,WAAW,CAAC;QAErC,+DAA+D;QAC/D,MAAM,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC;QAExC,OAAO,WAAW,CAAC;IACrB,CAAC;IAEO,KAAK,CAAC,cAAc;QAG1B,IAAI;YACF,IAAI,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,gBAAgB,CAAC,EAAE;gBACxC,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,8BAA8B,IAAI,CAAC,gBAAgB,EAAE,CACtD,CAAC;gBACF,OAAO,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,YAAY,CAAC,IAAI,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC,CAAC;aACnE;SACF;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,2CAA2C,IAAI,CAAC,gBAAgB,KAAK,CAAC,mCAAmC,CAC1G,CAAC;SACH;QACD,OAAO,SAAS,CAAC;IACnB,CAAC;IAEO,KAAK,CAAC,eAAe,CAC3B,KAAoC;QAEpC,IAAI;YACF,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,4BAA4B,IAAI,CAAC,gBAAgB,EAAE,CAAC,CAAC;YACvE,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,gBAAgB,EAAE,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,2CAA2C,IAAI,CAAC,gBAAgB,KAAK,CAAC,GAAG,CAC1E,CAAC;SACH;IACH,CAAC;IAEO,KAAK,CAAC,eAAe;QAC3B,iDAAiD;QACjD,IAAI,IAAI,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YACzD,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,oEAAoE,CACrE,CAAC;YACF,OAAO,EAAE,CAAC;SACX;QACD,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAA,yBAAY,GAAE,CAAC,OAAO,CAC3C,8DAA8D,EAC9D;gBACE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,KAAK;gBAC/B,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,IAAI;aAC9B,CACF,CAAC;YACF,OAAO,QAAQ,CAAC,IAAI,CAAC;SACtB;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;gBAC3C,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,gGAAgG;oBAC9F,oEAAoE;oBACpE,qFAAqF;oBACrF,kFAAkF,CAAC,EAAE,CACxF,CAAC;aACH;iBAAM;gBACL,kFAAkF;gBAClF,8EAA8E;gBAC9E,2FAA2F;gBAC3F,eAAe;gBACf,MAAM,IAAI,KAAK,CACb,sEAAsE,CAAC,EAAE,CAC1E,CAAC;aACH;SACF;IACH,CAAC;CACF"}
|
||||
{"version":3,"file":"feature-flags.js","sourceRoot":"","sources":["../src/feature-flags.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,6CAA4C;AAI5C,6CAA+B;AAM/B,IAAY,OAQX;AARD,WAAY,OAAO;IACjB,8DAAmD,CAAA;IACnD,sFAA2E,CAAA;IAC3E,2DAAgD,CAAA;IAChD,2EAAgE,CAAA;IAChE,iEAAsD,CAAA;IACtD,sDAA2C,CAAA;IAC3C,mEAAwD,CAAA;AAC1D,CAAC,EARW,OAAO,GAAP,eAAO,KAAP,eAAO,QAQlB;AAEY,QAAA,aAAa,GAGtB;IACF,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAAE;QAChC,MAAM,EAAE,yBAAyB;QACjC,+EAA+E;QAC/E,iCAAiC;QACjC,cAAc,EAAE,SAAS;KAC1B;IACD,CAAC,OAAO,CAAC,iCAAiC,CAAC,EAAE;QAC3C,MAAM,EAAE,sCAAsC;QAC9C,+EAA+E;QAC/E,iCAAiC;QACjC,cAAc,EAAE,SAAS;KAC1B;IACD,CAAC,OAAO,CAAC,4BAA4B,CAAC,EAAE;QACtC,MAAM,EAAE,gCAAgC;QACxC,cAAc,EAAE,SAAS;KAC1B;IACD,CAAC,OAAO,CAAC,oBAAoB,CAAC,EAAE;QAC9B,MAAM,EAAE,2BAA2B;QACnC,cAAc,EAAE,QAAQ;KACzB;IACD,CAAC,OAAO,CAAC,uBAAuB,CAAC,EAAE;QACjC,MAAM,EAAE,2BAA2B;QACnC,cAAc,EAAE,OAAO;KACxB;IACD,CAAC,OAAO,CAAC,kBAAkB,CAAC,EAAE;QAC5B,MAAM,EAAE,qBAAqB;QAC7B,cAAc,EAAE,SAAS;KAC1B;IACD,CAAC,OAAO,CAAC,wBAAwB,CAAC,EAAE;QAClC,MAAM,EAAE,mCAAmC;QAC3C,cAAc,EAAE,QAAQ;KACzB;CACF,CAAC;AAUW,QAAA,uBAAuB,GAAG,2BAA2B,CAAC;AAEnE;;;;GAIG;AACH,MAAa,QAAQ;IAGnB,YACE,aAAiC,EACjC,aAA4B,EAC5B,OAAe,EACf,MAAc;QAEd,IAAI,CAAC,kBAAkB,GAAG,IAAI,kBAAkB,CAC9C,aAAa,EACb,aAAa,EACb,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,+BAAuB,CAAC,EAC3C,MAAM,CACP,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;OAWG;IACH,KAAK,CAAC,QAAQ,CAAC,OAAgB,EAAE,MAAe;QAC9C,IAAI,CAAC,MAAM,IAAI,qBAAa,CAAC,OAAO,CAAC,CAAC,cAAc,EAAE;YACpD,MAAM,IAAI,KAAK,CACb,8DAA8D,OAAO,2CAA2C,CACjH,CAAC;SACH;QAED,oDAAoD;QACpD,IAAI,OAAO,KAAK,OAAO,CAAC,sBAAsB,IAAI,IAAI,CAAC,YAAY,EAAE,EAAE;YACrE,OAAO,KAAK,CAAC;SACd;QAED,MAAM,MAAM,GAAG,CACb,OAAO,CAAC,GAAG,CAAC,qBAAa,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,IAAI,EAAE,CACjD,CAAC,iBAAiB,EAAE,CAAC;QAEtB,sFAAsF;QACtF,IAAI,MAAM,KAAK,OAAO,EAAE;YACtB,OAAO,KAAK,CAAC;SACd;QAED,yEAAyE;QACzE,MAAM,cAAc,GAAG,qBAAa,CAAC,OAAO,CAAC,CAAC,cAAc,CAAC;QAC7D,IAAI,MAAM,IAAI,cAAc,EAAE;YAC5B,IAAI,CAAC,CAAC,MAAM,IAAI,CAAC,kBAAkB,CAAC,MAAM,EAAE,cAAc,CAAC,CAAC,EAAE;gBAC5D,OAAO,KAAK,CAAC;aACd;SACF;QAED,8EAA8E;QAC9E,IAAI,MAAM,KAAK,MAAM,EAAE;YACrB,OAAO,IAAI,CAAC;SACb;QACD,gDAAgD;QAChD,OAAO,MAAM,IAAI,CAAC,kBAAkB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;IACzD,CAAC;CACF;AAjED,4BAiEC;AAED,MAAM,kBAAkB;IAGtB,YACmB,aAAiC,EACjC,aAA4B,EAC5B,gBAAwB,EACxB,MAAc;QAHd,kBAAa,GAAb,aAAa,CAAoB;QACjC,kBAAa,GAAb,aAAa,CAAe;QAC5B,qBAAgB,GAAhB,gBAAgB,CAAQ;QACxB,WAAM,GAAN,MAAM,CAAQ;QAE/B,IAAI;IACN,CAAC;IAED,KAAK,CAAC,QAAQ,CAAC,OAAgB;QAC7B,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC;QAC7C,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,qCAAqC,OAAO,4BAA4B,CACzE,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,MAAM,iBAAiB,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;QAC5C,IAAI,iBAAiB,KAAK,SAAS,EAAE;YACnC,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,YAAY,OAAO,uDAAuD,CAC3E,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,OAAO,CAAC,CAAC,iBAAiB,CAAC;IAC7B,CAAC;IAEO,KAAK,CAAC,cAAc;QAC1B,0CAA0C;QAC1C,IAAI,IAAI,CAAC,iBAAiB,KAAK,SAAS,EAAE;YACxC,OAAO,IAAI,CAAC,iBAAiB,CAAC;SAC/B;QAED,wEAAwE;QACxE,MAAM,SAAS,GAAG,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC;QAC9C,IAAI,SAAS,KAAK,SAAS,EAAE;YAC3B,IAAI,CAAC,iBAAiB,GAAG,SAAS,CAAC;YACnC,OAAO,SAAS,CAAC;SAClB;QAED,wCAAwC;QACxC,IAAI,WAAW,GAAG,MAAM,IAAI,CAAC,eAAe,EAAE,CAAC;QAC/C,IAAI,WAAW,KAAK,SAAS,EAAE;YAC7B,WAAW,GAAG,EAAE,CAAC;SAClB;QAED,+BAA+B;QAC/B,IAAI,CAAC,iBAAiB,GAAG,WAAW,CAAC;QAErC,+DAA+D;QAC/D,MAAM,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC;QAExC,OAAO,WAAW,CAAC;IACrB,CAAC;IAEO,KAAK,CAAC,cAAc;QAG1B,IAAI;YACF,IAAI,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,gBAAgB,CAAC,EAAE;gBACxC,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,8BAA8B,IAAI,CAAC,gBAAgB,EAAE,CACtD,CAAC;gBACF,OAAO,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,YAAY,CAAC,IAAI,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC,CAAC;aACnE;SACF;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,2CAA2C,IAAI,CAAC,gBAAgB,KAAK,CAAC,mCAAmC,CAC1G,CAAC;SACH;QACD,OAAO,SAAS,CAAC;IACnB,CAAC;IAEO,KAAK,CAAC,eAAe,CAC3B,KAAoC;QAEpC,IAAI;YACF,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,4BAA4B,IAAI,CAAC,gBAAgB,EAAE,CAAC,CAAC;YACvE,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,gBAAgB,EAAE,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,2CAA2C,IAAI,CAAC,gBAAgB,KAAK,CAAC,GAAG,CAC1E,CAAC;SACH;IACH,CAAC;IAEO,KAAK,CAAC,eAAe;QAC3B,iDAAiD;QACjD,IAAI,IAAI,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YACzD,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,oEAAoE,CACrE,CAAC;YACF,OAAO,EAAE,CAAC;SACX;QACD,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAA,yBAAY,GAAE,CAAC,OAAO,CAC3C,8DAA8D,EAC9D;gBACE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,KAAK;gBAC/B,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,IAAI;aAC9B,CACF,CAAC;YACF,OAAO,QAAQ,CAAC,IAAI,CAAC;SACtB;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;gBAC3C,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,gGAAgG;oBAC9F,oEAAoE;oBACpE,qFAAqF;oBACrF,kFAAkF,CAAC,EAAE,CACxF,CAAC;aACH;iBAAM;gBACL,kFAAkF;gBAClF,8EAA8E;gBAC9E,2FAA2F;gBAC3F,eAAe;gBACf,MAAM,IAAI,KAAK,CACb,sEAAsE,CAAC,EAAE,CAC1E,CAAC;aACH;SACF;IACH,CAAC;CACF"}
|
|
@ -19,19 +19,68 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.run = void 0;
|
||||
exports.run = exports.uploadFailedSarif = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const config_utils_1 = require("./config-utils");
|
||||
const logging_1 = require("./logging");
|
||||
async function run(uploadDatabaseBundleDebugArtifact, uploadLogsDebugArtifact, printDebugLogs) {
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const shared_environment_1 = require("./shared-environment");
|
||||
const uploadLib = __importStar(require("./upload-lib"));
|
||||
const util_1 = require("./util");
|
||||
const workflow_1 = require("./workflow");
|
||||
async function uploadFailedSarif(config, repositoryNwo, featureEnablement, logger) {
|
||||
if (!config.codeQLCmd) {
|
||||
logger.warning("CodeQL command not found. Unable to upload failed SARIF file.");
|
||||
return;
|
||||
}
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
if (!(await featureEnablement.getValue(feature_flags_1.Feature.UploadFailedSarifEnabled, codeql))) {
|
||||
logger.debug("Uploading failed SARIF is disabled.");
|
||||
return;
|
||||
}
|
||||
const workflow = await (0, workflow_1.getWorkflow)();
|
||||
const jobName = (0, util_1.getRequiredEnvParam)("GITHUB_JOB");
|
||||
const matrix = (0, util_1.parseMatrixInput)(actionsUtil.getRequiredInput("matrix"));
|
||||
if ((0, workflow_1.getUploadInputOrThrow)(workflow, jobName, matrix) !== "true" ||
|
||||
(0, util_1.isInTestMode)()) {
|
||||
logger.debug("Won't upload a failed SARIF file since SARIF upload is disabled.");
|
||||
return;
|
||||
}
|
||||
const category = (0, workflow_1.getCategoryInputOrThrow)(workflow, jobName, matrix);
|
||||
const checkoutPath = (0, workflow_1.getCheckoutPathInputOrThrow)(workflow, jobName, matrix);
|
||||
const sarifFile = "../codeql-failed-run.sarif";
|
||||
await codeql.diagnosticsExport(sarifFile, category);
|
||||
core.info(`Uploading failed SARIF file ${sarifFile}`);
|
||||
const uploadResult = await uploadLib.uploadFromActions(sarifFile, checkoutPath, category, logger);
|
||||
await uploadLib.waitForProcessing(repositoryNwo, uploadResult.sarifID, logger, { isUnsuccessfulExecution: true });
|
||||
}
|
||||
exports.uploadFailedSarif = uploadFailedSarif;
|
||||
async function run(uploadDatabaseBundleDebugArtifact, uploadLogsDebugArtifact, printDebugLogs, repositoryNwo, featureEnablement, logger) {
|
||||
const config = await (0, config_utils_1.getConfig)(actionsUtil.getTemporaryDirectory(), logger);
|
||||
if (config === undefined) {
|
||||
logger.warning("Debugging artifacts are unavailable since the 'init' Action failed before it could produce any.");
|
||||
return;
|
||||
}
|
||||
// Environment variable used to integration test uploading a SARIF file for failed runs
|
||||
const expectFailedSarifUpload = process.env["CODEQL_ACTION_EXPECT_UPLOAD_FAILED_SARIF"] === "true";
|
||||
if (process.env[shared_environment_1.CODEQL_ACTION_ANALYZE_DID_UPLOAD_SARIF] !== "true") {
|
||||
try {
|
||||
await uploadFailedSarif(config, repositoryNwo, featureEnablement, logger);
|
||||
}
|
||||
catch (e) {
|
||||
if (expectFailedSarifUpload) {
|
||||
throw new Error("Expected to upload a SARIF file for the failed run, but encountered " +
|
||||
`the following error: ${e}`);
|
||||
}
|
||||
logger.info(`Failed to upload a SARIF file for the failed run. Error: ${e}`);
|
||||
}
|
||||
}
|
||||
else if (expectFailedSarifUpload) {
|
||||
throw new Error("Expected to upload a SARIF file for the failed run, but didn't.");
|
||||
}
|
||||
// Upload appropriate Actions artifacts for debugging
|
||||
if (config === null || config === void 0 ? void 0 : config.debugMode) {
|
||||
if (config.debugMode) {
|
||||
core.info("Debug mode is on. Uploading available database bundles and logs as Actions debugging artifacts...");
|
||||
await uploadDatabaseBundleDebugArtifact(config, logger);
|
||||
await uploadLogsDebugArtifact(config);
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"init-action-post-helper.js","sourceRoot":"","sources":["../src/init-action-post-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,iDAA2C;AAC3C,uCAA6C;AAEtC,KAAK,UAAU,GAAG,CACvB,iCAA2C,EAC3C,uBAAiC,EACjC,cAAwB;IAExB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAElC,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,CAAC,OAAO,CACZ,iGAAiG,CAClG,CAAC;KACH;IAED,qDAAqD;IACrD,IAAI,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,SAAS,EAAE;QACrB,IAAI,CAAC,IAAI,CACP,mGAAmG,CACpG,CAAC;QACF,MAAM,iCAAiC,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACxD,MAAM,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtC,MAAM,cAAc,CAAC,MAAM,CAAC,CAAC;KAC9B;AACH,CAAC;AAxBD,kBAwBC"}
|
||||
{"version":3,"file":"init-action-post-helper.js","sourceRoot":"","sources":["../src/init-action-post-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,qCAAqC;AACrC,iDAAmD;AACnD,mDAA6D;AAG7D,6DAA8E;AAC9E,wDAA0C;AAC1C,iCAA6E;AAC7E,yCAKoB;AAEb,KAAK,UAAU,iBAAiB,CACrC,MAAc,EACd,aAA4B,EAC5B,iBAAoC,EACpC,MAAc;IAEd,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE;QACrB,MAAM,CAAC,OAAO,CACZ,+DAA+D,CAChE,CAAC;QACF,OAAO;KACR;IACD,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,IACE,CAAC,CAAC,MAAM,iBAAiB,CAAC,QAAQ,CAChC,uBAAO,CAAC,wBAAwB,EAChC,MAAM,CACP,CAAC,EACF;QACA,MAAM,CAAC,KAAK,CAAC,qCAAqC,CAAC,CAAC;QACpD,OAAO;KACR;IACD,MAAM,QAAQ,GAAG,MAAM,IAAA,sBAAW,GAAE,CAAC;IACrC,MAAM,OAAO,GAAG,IAAA,0BAAmB,EAAC,YAAY,CAAC,CAAC;IAClD,MAAM,MAAM,GAAG,IAAA,uBAAgB,EAAC,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC,CAAC;IACxE,IACE,IAAA,gCAAqB,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,KAAK,MAAM;QAC3D,IAAA,mBAAY,GAAE,EACd;QACA,MAAM,CAAC,KAAK,CACV,kEAAkE,CACnE,CAAC;QACF,OAAO;KACR;IACD,MAAM,QAAQ,GAAG,IAAA,kCAAuB,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACpE,MAAM,YAAY,GAAG,IAAA,sCAA2B,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IAE5E,MAAM,SAAS,GAAG,4BAA4B,CAAC;IAC/C,MAAM,MAAM,CAAC,iBAAiB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC;IAEpD,IAAI,CAAC,IAAI,CAAC,+BAA+B,SAAS,EAAE,CAAC,CAAC;IACtD,MAAM,YAAY,GAAG,MAAM,SAAS,CAAC,iBAAiB,CACpD,SAAS,EACT,YAAY,EACZ,QAAQ,EACR,MAAM,CACP,CAAC;IACF,MAAM,SAAS,CAAC,iBAAiB,CAC/B,aAAa,EACb,YAAY,CAAC,OAAO,EACpB,MAAM,EACN,EAAE,uBAAuB,EAAE,IAAI,EAAE,CAClC,CAAC;AACJ,CAAC;AArDD,8CAqDC;AAEM,KAAK,UAAU,GAAG,CACvB,iCAA2C,EAC3C,uBAAiC,EACjC,cAAwB,EACxB,aAA4B,EAC5B,iBAAoC,EACpC,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,CAAC,OAAO,CACZ,iGAAiG,CAClG,CAAC;QACF,OAAO;KACR;IAED,uFAAuF;IACvF,MAAM,uBAAuB,GAC3B,OAAO,CAAC,GAAG,CAAC,0CAA0C,CAAC,KAAK,MAAM,CAAC;IAErE,IAAI,OAAO,CAAC,GAAG,CAAC,2DAAsC,CAAC,KAAK,MAAM,EAAE;QAClE,IAAI;YACF,MAAM,iBAAiB,CAAC,MAAM,EAAE,aAAa,EAAE,iBAAiB,EAAE,MAAM,CAAC,CAAC;SAC3E;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,uBAAuB,EAAE;gBAC3B,MAAM,IAAI,KAAK,CACb,sEAAsE;oBACpE,wBAAwB,CAAC,EAAE,CAC9B,CAAC;aACH;YACD,MAAM,CAAC,IAAI,CACT,4DAA4D,CAAC,EAAE,CAChE,CAAC;SACH;KACF;SAAM,IAAI,uBAAuB,EAAE;QAClC,MAAM,IAAI,KAAK,CACb,iEAAiE,CAClE,CAAC;KACH;IAED,qDAAqD;IACrD,IAAI,MAAM,CAAC,SAAS,EAAE;QACpB,IAAI,CAAC,IAAI,CACP,mGAAmG,CACpG,CAAC;QACF,MAAM,iCAAiC,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACxD,MAAM,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtC,MAAM,cAAc,CAAC,MAAM,CAAC,CAAC;KAC9B;AACH,CAAC;AAlDD,kBAkDC"}
|
|
@ -24,13 +24,21 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const codeql = __importStar(require("./codeql"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const initActionPostHelper = __importStar(require("./init-action-post-helper"));
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const uploadLib = __importStar(require("./upload-lib"));
|
||||
const util = __importStar(require("./util"));
|
||||
const workflow = __importStar(require("./workflow"));
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
(0, ava_1.default)("post: init action with debug mode off", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
const gitHubVersion = {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
|
@ -44,7 +52,7 @@ const util = __importStar(require("./util"));
|
|||
const uploadDatabaseBundleSpy = sinon.spy();
|
||||
const uploadLogsSpy = sinon.spy();
|
||||
const printDebugLogsSpy = sinon.spy();
|
||||
await initActionPostHelper.run(uploadDatabaseBundleSpy, uploadLogsSpy, printDebugLogsSpy);
|
||||
await initActionPostHelper.run(uploadDatabaseBundleSpy, uploadLogsSpy, printDebugLogsSpy, (0, repository_1.parseRepositoryNwo)("github/codeql-action"), (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.assert(uploadDatabaseBundleSpy.notCalled);
|
||||
t.assert(uploadLogsSpy.notCalled);
|
||||
t.assert(printDebugLogsSpy.notCalled);
|
||||
|
@ -52,6 +60,7 @@ const util = __importStar(require("./util"));
|
|||
});
|
||||
(0, ava_1.default)("post: init action with debug mode on", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
const gitHubVersion = {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
|
@ -65,10 +74,90 @@ const util = __importStar(require("./util"));
|
|||
const uploadDatabaseBundleSpy = sinon.spy();
|
||||
const uploadLogsSpy = sinon.spy();
|
||||
const printDebugLogsSpy = sinon.spy();
|
||||
await initActionPostHelper.run(uploadDatabaseBundleSpy, uploadLogsSpy, printDebugLogsSpy);
|
||||
await initActionPostHelper.run(uploadDatabaseBundleSpy, uploadLogsSpy, printDebugLogsSpy, (0, repository_1.parseRepositoryNwo)("github/codeql-action"), (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.assert(uploadDatabaseBundleSpy.called);
|
||||
t.assert(uploadLogsSpy.called);
|
||||
t.assert(printDebugLogsSpy.called);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("uploads failed SARIF run for typical workflow", async (t) => {
|
||||
const actionsWorkflow = createTestWorkflow([
|
||||
{
|
||||
name: "Checkout repository",
|
||||
uses: "actions/checkout@v3",
|
||||
},
|
||||
{
|
||||
name: "Initialize CodeQL",
|
||||
uses: "github/codeql-action/init@v2",
|
||||
with: {
|
||||
languages: "javascript",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Perform CodeQL Analysis",
|
||||
uses: "github/codeql-action/analyze@v2",
|
||||
with: {
|
||||
category: "my-category",
|
||||
},
|
||||
},
|
||||
]);
|
||||
await testFailedSarifUpload(t, actionsWorkflow, { category: "my-category" });
|
||||
});
|
||||
(0, ava_1.default)("uploading failed SARIF run fails when workflow does not reference github/codeql-action", async (t) => {
|
||||
const actionsWorkflow = createTestWorkflow([
|
||||
{
|
||||
name: "Checkout repository",
|
||||
uses: "actions/checkout@v3",
|
||||
},
|
||||
]);
|
||||
await t.throwsAsync(async () => await testFailedSarifUpload(t, actionsWorkflow));
|
||||
});
|
||||
function createTestWorkflow(steps) {
|
||||
return {
|
||||
name: "CodeQL",
|
||||
on: {
|
||||
push: {
|
||||
branches: ["main"],
|
||||
},
|
||||
pull_request: {
|
||||
branches: ["main"],
|
||||
},
|
||||
},
|
||||
jobs: {
|
||||
analyze: {
|
||||
name: "CodeQL Analysis",
|
||||
"runs-on": "ubuntu-latest",
|
||||
steps,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
async function testFailedSarifUpload(t, actionsWorkflow, { category } = {}) {
|
||||
const config = {
|
||||
codeQLCmd: "codeql",
|
||||
debugMode: true,
|
||||
languages: [],
|
||||
packs: [],
|
||||
};
|
||||
const messages = [];
|
||||
process.env["GITHUB_JOB"] = "analyze";
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["GITHUB_WORKSPACE"] =
|
||||
"/home/runner/work/codeql-action/codeql-action";
|
||||
sinon.stub(actionsUtil, "getRequiredInput").withArgs("matrix").returns("{}");
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeql, "getCodeQL").resolves(codeqlObject);
|
||||
const diagnosticsExportStub = sinon.stub(codeqlObject, "diagnosticsExport");
|
||||
sinon.stub(workflow, "getWorkflow").resolves(actionsWorkflow);
|
||||
const uploadFromActions = sinon.stub(uploadLib, "uploadFromActions");
|
||||
uploadFromActions.resolves({ sarifID: "42" });
|
||||
const waitForProcessing = sinon.stub(uploadLib, "waitForProcessing");
|
||||
await initActionPostHelper.uploadFailedSarif(config, (0, repository_1.parseRepositoryNwo)("github/codeql-action"), (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.UploadFailedSarifEnabled]), (0, testing_utils_1.getRecordingLogger)(messages));
|
||||
t.deepEqual(messages, []);
|
||||
t.true(diagnosticsExportStub.calledOnceWith(sinon.match.string, category), `Actual args were: ${diagnosticsExportStub.args}`);
|
||||
t.true(uploadFromActions.calledOnceWith(sinon.match.string, sinon.match.string, category, sinon.match.any), `Actual args were: ${uploadFromActions.args}`);
|
||||
t.true(waitForProcessing.calledOnceWith(sinon.match.any, "42", sinon.match.any, {
|
||||
isUnsuccessfulExecution: true,
|
||||
}));
|
||||
}
|
||||
//# sourceMappingURL=init-action-post-helper.test.js.map
|
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
|
@ -26,11 +26,21 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const api_client_1 = require("./api-client");
|
||||
const debugArtifacts = __importStar(require("./debug-artifacts"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const initActionPostHelper = __importStar(require("./init-action-post-helper"));
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const util_1 = require("./util");
|
||||
async function runWrapper() {
|
||||
try {
|
||||
await initActionPostHelper.run(debugArtifacts.uploadDatabaseBundleDebugArtifact, debugArtifacts.uploadLogsDebugArtifact, actionsUtil.printDebugLogs);
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger);
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
||||
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, actionsUtil.getTemporaryDirectory(), logger);
|
||||
await initActionPostHelper.run(debugArtifacts.uploadDatabaseBundleDebugArtifact, debugArtifacts.uploadLogsDebugArtifact, actionsUtil.printDebugLogs, repositoryNwo, features, logger);
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(`init post-action step failed: ${error}`);
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"init-action-post.js","sourceRoot":"","sources":["../src/init-action-post.ts"],"names":[],"mappings":";AAAA;;;;GAIG;;;;;;;;;;;;;;;;;;;;;AAEH,oDAAsC;AAEtC,4DAA8C;AAC9C,kEAAoD;AACpD,gFAAkE;AAElE,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,oBAAoB,CAAC,GAAG,CAC5B,cAAc,CAAC,iCAAiC,EAChD,cAAc,CAAC,uBAAuB,EACtC,WAAW,CAAC,cAAc,CAC3B,CAAC;KACH;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,iCAAiC,KAAK,EAAE,CAAC,CAAC;QACzD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"init-action-post.js","sourceRoot":"","sources":["../src/init-action-post.ts"],"names":[],"mappings":";AAAA;;;;GAIG;;;;;;;;;;;;;;;;;;;;;AAEH,oDAAsC;AAEtC,4DAA8C;AAC9C,6CAAgD;AAChD,kEAAoD;AACpD,mDAA2C;AAC3C,gFAAkE;AAClE,uCAA6C;AAC7C,6CAAkD;AAClD,iCAAwE;AAExE,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;QACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,WAAW,CAAC,qBAAqB,EAAE,EACnC,MAAM,CACP,CAAC;QAEF,MAAM,oBAAoB,CAAC,GAAG,CAC5B,cAAc,CAAC,iCAAiC,EAChD,cAAc,CAAC,uBAAuB,EACtC,WAAW,CAAC,cAAc,EAC1B,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;KACH;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,iCAAiC,KAAK,EAAE,CAAC,CAAC;QACzD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
|
@ -1,14 +1,22 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CODEQL_ACTION_TEST_MODE = exports.CODEQL_ACTION_TESTING_ENVIRONMENT = exports.CODEQL_WORKFLOW_STARTED_AT = exports.ODASA_TRACER_CONFIGURATION = void 0;
|
||||
exports.ODASA_TRACER_CONFIGURATION = "ODASA_TRACER_CONFIGURATION";
|
||||
// The time at which the first action (normally init) started executing.
|
||||
// If a workflow invokes a different action without first invoking the init
|
||||
// action (i.e. the upload action is being used by a third-party integrator)
|
||||
// then this variable will be assigned the start time of the action invoked
|
||||
// rather that the init action.
|
||||
exports.CODEQL_WORKFLOW_STARTED_AT = "CODEQL_WORKFLOW_STARTED_AT";
|
||||
exports.ODASA_TRACER_CONFIGURATION = exports.CODEQL_WORKFLOW_STARTED_AT = exports.CODEQL_ACTION_TEST_MODE = exports.CODEQL_ACTION_TESTING_ENVIRONMENT = exports.CODEQL_ACTION_ANALYZE_DID_UPLOAD_SARIF = void 0;
|
||||
/**
|
||||
* This environment variable is set to true when the `analyze` Action
|
||||
* successfully uploads a SARIF file. It does NOT indicate whether the
|
||||
* SARIF file was processed successfully.
|
||||
*/
|
||||
exports.CODEQL_ACTION_ANALYZE_DID_UPLOAD_SARIF = "CODEQL_ACTION_ANALYZE_DID_UPLOAD_SARIF";
|
||||
exports.CODEQL_ACTION_TESTING_ENVIRONMENT = "CODEQL_ACTION_TESTING_ENVIRONMENT";
|
||||
/** Used to disable uploading SARIF results or status reports to the GitHub API */
|
||||
exports.CODEQL_ACTION_TEST_MODE = "CODEQL_ACTION_TEST_MODE";
|
||||
/**
|
||||
* The time at which the first action (normally init) started executing.
|
||||
* If a workflow invokes a different action without first invoking the init
|
||||
* action (i.e. the upload action is being used by a third-party integrator)
|
||||
* then this variable will be assigned the start time of the action invoked
|
||||
* rather that the init action.
|
||||
*/
|
||||
exports.CODEQL_WORKFLOW_STARTED_AT = "CODEQL_WORKFLOW_STARTED_AT";
|
||||
exports.ODASA_TRACER_CONFIGURATION = "ODASA_TRACER_CONFIGURATION";
|
||||
//# sourceMappingURL=shared-environment.js.map
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;;AAAa,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AACvE,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAE1D,QAAA,iCAAiC,GAC5C,mCAAmC,CAAC;AAEtC,kFAAkF;AACrE,QAAA,uBAAuB,GAAG,yBAAyB,CAAC"}
|
||||
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;;AAAA;;;;GAIG;AACU,QAAA,sCAAsC,GACjD,wCAAwC,CAAC;AAE9B,QAAA,iCAAiC,GAC5C,mCAAmC,CAAC;AAEtC,kFAAkF;AACrE,QAAA,uBAAuB,GAAG,yBAAyB,CAAC;AAEjE;;;;;;GAMG;AACU,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAE1D,QAAA,0BAA0B,GAAG,4BAA4B,CAAC"}
|
|
@ -128,9 +128,8 @@ function findSarifFilesInDir(sarifPath) {
|
|||
exports.findSarifFilesInDir = findSarifFilesInDir;
|
||||
// Uploads a single sarif file or a directory of sarif files
|
||||
// depending on what the path happens to refer to.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function uploadFromActions(sarifPath, logger) {
|
||||
return await uploadFiles(getSarifFilePaths(sarifPath), (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(actionsUtil.getRequiredInput("checkout_path")), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getOptionalInput("category"), util.getRequiredEnvParam("GITHUB_WORKFLOW"), workflow.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), logger);
|
||||
async function uploadFromActions(sarifPath, checkoutPath, category, logger) {
|
||||
return await uploadFiles(getSarifFilePaths(sarifPath), (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(checkoutPath), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), category, util.getRequiredEnvParam("GITHUB_WORKFLOW"), workflow.getWorkflowRunID(), checkoutPath, actionsUtil.getRequiredInput("matrix"), logger);
|
||||
}
|
||||
exports.uploadFromActions = uploadFromActions;
|
||||
function getSarifFilePaths(sarifPath) {
|
||||
|
@ -270,48 +269,92 @@ async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKe
|
|||
}
|
||||
const STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1000;
|
||||
const STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1000;
|
||||
// Waits until either the analysis is successfully processed, a processing error is reported, or STATUS_CHECK_TIMEOUT_MILLISECONDS elapses.
|
||||
async function waitForProcessing(repositoryNwo, sarifID, logger) {
|
||||
/**
|
||||
* Waits until either the analysis is successfully processed, a processing error
|
||||
* is reported, or `STATUS_CHECK_TIMEOUT_MILLISECONDS` elapses.
|
||||
*
|
||||
* If `isUnsuccessfulExecution` is passed, will throw an error if the analysis
|
||||
* processing does not produce a single error mentioning the unsuccessful
|
||||
* execution.
|
||||
*/
|
||||
async function waitForProcessing(repositoryNwo, sarifID, logger, options = {
|
||||
isUnsuccessfulExecution: false,
|
||||
}) {
|
||||
logger.startGroup("Waiting for processing to finish");
|
||||
const client = api.getApiClient();
|
||||
const statusCheckingStarted = Date.now();
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
if (Date.now() >
|
||||
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS) {
|
||||
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
|
||||
// It's possible the analysis will eventually finish processing, but it's not worth spending more Actions time waiting.
|
||||
logger.warning("Timed out waiting for analysis to finish processing. Continuing.");
|
||||
break;
|
||||
try {
|
||||
const client = api.getApiClient();
|
||||
const statusCheckingStarted = Date.now();
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
if (Date.now() >
|
||||
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS) {
|
||||
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
|
||||
// It's possible the analysis will eventually finish processing, but it's not worth spending more Actions time waiting.
|
||||
logger.warning("Timed out waiting for analysis to finish processing. Continuing.");
|
||||
break;
|
||||
}
|
||||
let response = undefined;
|
||||
try {
|
||||
response = await client.request("GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id", {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
sarif_id: sarifID,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
logger.warning(`An error occurred checking the status of the delivery. ${e} It should still be processed in the background, but errors that occur during processing may not be reported.`);
|
||||
break;
|
||||
}
|
||||
const status = response.data.processing_status;
|
||||
logger.info(`Analysis upload status is ${status}.`);
|
||||
if (status === "pending") {
|
||||
logger.debug("Analysis processing is still pending...");
|
||||
}
|
||||
else if (options.isUnsuccessfulExecution) {
|
||||
// We expect a specific processing error for unsuccessful executions, so
|
||||
// handle these separately.
|
||||
handleProcessingResultForUnsuccessfulExecution(response, status, logger);
|
||||
break;
|
||||
}
|
||||
else if (status === "complete") {
|
||||
break;
|
||||
}
|
||||
else if (status === "failed") {
|
||||
throw new Error(`Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`);
|
||||
}
|
||||
else {
|
||||
util.assertNever(status);
|
||||
}
|
||||
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
|
||||
}
|
||||
let response = undefined;
|
||||
try {
|
||||
response = await client.request("GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id", {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
sarif_id: sarifID,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
logger.warning(`An error occurred checking the status of the delivery. ${e} It should still be processed in the background, but errors that occur during processing may not be reported.`);
|
||||
break;
|
||||
}
|
||||
const status = response.data.processing_status;
|
||||
logger.info(`Analysis upload status is ${status}.`);
|
||||
if (status === "complete") {
|
||||
break;
|
||||
}
|
||||
else if (status === "pending") {
|
||||
logger.debug("Analysis processing is still pending...");
|
||||
}
|
||||
else if (status === "failed") {
|
||||
throw new Error(`Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`);
|
||||
}
|
||||
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
|
||||
}
|
||||
logger.endGroup();
|
||||
finally {
|
||||
logger.endGroup();
|
||||
}
|
||||
}
|
||||
exports.waitForProcessing = waitForProcessing;
|
||||
/**
|
||||
* Checks the processing result for an unsuccessful execution. Throws if the
|
||||
* result is not a failure with a single "unsuccessful execution" error.
|
||||
*/
|
||||
function handleProcessingResultForUnsuccessfulExecution(response, status, logger) {
|
||||
if (status === "failed" &&
|
||||
Array.isArray(response.data.errors) &&
|
||||
response.data.errors.length === 1 &&
|
||||
response.data.errors[0].toString().startsWith("unsuccessful execution")) {
|
||||
logger.debug("Successfully uploaded a SARIF file for the unsuccessful execution. Received expected " +
|
||||
'"unsuccessful execution" error, and no other errors.');
|
||||
}
|
||||
else {
|
||||
const shortMessage = "Failed to upload a SARIF file for the unsuccessful execution. Code scanning status " +
|
||||
"information for the repository may be out of date as a result.";
|
||||
const longMessage = shortMessage + status === "failed"
|
||||
? ` Processing errors: ${response.data.errors}`
|
||||
: ' Encountered no processing errors, but expected to receive an "unsuccessful execution" error.';
|
||||
logger.debug(longMessage);
|
||||
throw new Error(shortMessage);
|
||||
}
|
||||
}
|
||||
function validateUniqueCategory(sarif) {
|
||||
var _a, _b, _c;
|
||||
// duplicate categories are allowed in the same sarif file
|
||||
|
|
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
|
@ -43,7 +43,7 @@ async function run() {
|
|||
return;
|
||||
}
|
||||
try {
|
||||
const uploadResult = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), (0, logging_1.getActionsLogger)());
|
||||
const uploadResult = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getOptionalInput("category"), (0, logging_1.getActionsLogger)());
|
||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||
// We don't upload results in test mode, so don't wait for processing
|
||||
if ((0, util_1.isInTestMode)()) {
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAA,4BAAqB,EAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IACnC,MAAM,IAAA,yBAAkB,EAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IACtC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACrD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,YAAY,CAAC,OAAO,CAAC,CAAC;QAEjD,qEAAqE;QACrE,IAAI,IAAA,mBAAY,GAAE,EAAE;YAClB,IAAI,CAAC,KAAK,CAAC,mDAAmD,CAAC,CAAC;SACjE;aAAM,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE;YACzE,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,IAAA,0BAAgB,GAAE,CACnB,CAAC;SACH;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,CAAC,CAAC;KACrE;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,EACnC,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAA,4BAAqB,EAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IACnC,MAAM,IAAA,yBAAkB,EAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IACtC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACrD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,UAAU,CAAC,EACxC,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,YAAY,CAAC,OAAO,CAAC,CAAC;QAEjD,qEAAqE;QACrE,IAAI,IAAA,mBAAY,GAAE,EAAE;YAClB,IAAI,CAAC,KAAK,CAAC,mDAAmD,CAAC,CAAC;SACjE;aAAM,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE;YACzE,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,IAAA,0BAAgB,GAAE,CACnB,CAAC;SACH;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,CAAC,CAAC;KACrE;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,EACnC,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
|
@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.shouldBypassToolcache = exports.isHostedRunner = exports.checkForTimeout = exports.withTimeout = exports.tryGetFolderBytes = exports.listFolder = exports.doesDirectoryExist = exports.logCodeScanningConfigInCli = exports.useCodeScanningConfigInCli = exports.isInTestMode = exports.checkActionVersion = exports.getMlPoweredJsQueriesStatus = exports.getMlPoweredJsQueriesPack = exports.ML_POWERED_JS_QUERIES_PACK_NAME = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.enrichEnvironment = exports.initializeEnvironment = exports.EnvVar = exports.assertNever = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DID_AUTOBUILD_GO_ENV_VAR_NAME = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||
exports.parseMatrixInput = exports.shouldBypassToolcache = exports.isHostedRunner = exports.checkForTimeout = exports.withTimeout = exports.tryGetFolderBytes = exports.listFolder = exports.doesDirectoryExist = exports.logCodeScanningConfigInCli = exports.useCodeScanningConfigInCli = exports.isInTestMode = exports.checkActionVersion = exports.getMlPoweredJsQueriesStatus = exports.getMlPoweredJsQueriesPack = exports.ML_POWERED_JS_QUERIES_PACK_NAME = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.enrichEnvironment = exports.initializeEnvironment = exports.EnvVar = exports.assertNever = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DID_AUTOBUILD_GO_ENV_VAR_NAME = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
|
@ -750,4 +750,11 @@ async function shouldBypassToolcache(featuresEnablement, codeqlUrl, languagesInp
|
|||
return bypass;
|
||||
}
|
||||
exports.shouldBypassToolcache = shouldBypassToolcache;
|
||||
function parseMatrixInput(matrixInput) {
|
||||
if (matrixInput === undefined || matrixInput === "null") {
|
||||
return undefined;
|
||||
}
|
||||
return JSON.parse(matrixInput);
|
||||
}
|
||||
exports.parseMatrixInput = parseMatrixInput;
|
||||
//# sourceMappingURL=util.js.map
|
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
|
@ -19,7 +19,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getCategoryInputOrThrow = exports.getWorkflowRunID = exports.getWorkflowPath = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = void 0;
|
||||
exports.getCheckoutPathInputOrThrow = exports.getUploadInputOrThrow = exports.getCategoryInputOrThrow = exports.getWorkflowRunID = exports.getWorkflowPath = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
|
@ -264,34 +264,48 @@ function getStepsCallingAction(job, actionName) {
|
|||
* determine that no such input is passed to the Action.
|
||||
*/
|
||||
function getInputOrThrow(workflow, jobName, actionName, inputName, matrixVars) {
|
||||
var _a;
|
||||
const preamble = `Could not get ${inputName} input to ${actionName} since`;
|
||||
if (!workflow.jobs) {
|
||||
throw new Error(`Could not get ${inputName} input to ${actionName} since the workflow has no jobs.`);
|
||||
throw new Error(`${preamble} the workflow has no jobs.`);
|
||||
}
|
||||
if (!workflow.jobs[jobName]) {
|
||||
throw new Error(`Could not get ${inputName} input to ${actionName} since the workflow has no job named ${jobName}.`);
|
||||
throw new Error(`${preamble} the workflow has no job named ${jobName}.`);
|
||||
}
|
||||
const inputs = getStepsCallingAction(workflow.jobs[jobName], actionName)
|
||||
.map((step) => { var _a; return (_a = step.with) === null || _a === void 0 ? void 0 : _a[inputName]; })
|
||||
.filter((input) => input !== undefined)
|
||||
.map((input) => input);
|
||||
if (inputs.length === 0) {
|
||||
return undefined;
|
||||
const stepsCallingAction = getStepsCallingAction(workflow.jobs[jobName], actionName);
|
||||
if (stepsCallingAction.length === 0) {
|
||||
throw new Error(`${preamble} the ${jobName} job does not call ${actionName}.`);
|
||||
}
|
||||
if (!inputs.every((input) => input === inputs[0])) {
|
||||
throw new Error(`Could not get ${inputName} input to ${actionName} since there were multiple steps calling ` +
|
||||
`${actionName} with different values for ${inputName}.`);
|
||||
else if (stepsCallingAction.length > 1) {
|
||||
throw new Error(`${preamble} the ${jobName} job calls ${actionName} multiple times.`);
|
||||
}
|
||||
// Make a basic attempt to substitute matrix variables
|
||||
// First normalize by removing whitespace
|
||||
let input = inputs[0].replace(/\${{\s+/, "${{").replace(/\s+}}/, "}}");
|
||||
for (const [key, value] of Object.entries(matrixVars)) {
|
||||
input = input.replace(`\${{matrix.${key}}}`, value);
|
||||
let input = (_a = stepsCallingAction[0].with) === null || _a === void 0 ? void 0 : _a[inputName];
|
||||
if (input !== undefined && matrixVars !== undefined) {
|
||||
// Make a basic attempt to substitute matrix variables
|
||||
// First normalize by removing whitespace
|
||||
input = input.replace(/\${{\s+/, "${{").replace(/\s+}}/, "}}");
|
||||
for (const [key, value] of Object.entries(matrixVars)) {
|
||||
input = input.replace(`\${{matrix.${key}}}`, value);
|
||||
}
|
||||
}
|
||||
if (input.includes("${{")) {
|
||||
if (input !== undefined && input.includes("${{")) {
|
||||
throw new Error(`Could not get ${inputName} input to ${actionName} since it contained an unrecognized dynamic value.`);
|
||||
}
|
||||
return input;
|
||||
}
|
||||
/**
|
||||
* Get the expected name of the analyze Action.
|
||||
*
|
||||
* This allows us to test workflow parsing functionality as a CodeQL Action PR check.
|
||||
*/
|
||||
function getAnalyzeActionName() {
|
||||
if ((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY") === "github/codeql-action") {
|
||||
return "./analyze";
|
||||
}
|
||||
else {
|
||||
return "github/codeql-action/analyze";
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Makes a best effort attempt to retrieve the category input for the particular job,
|
||||
* given a set of matrix variables.
|
||||
|
@ -302,7 +316,35 @@ function getInputOrThrow(workflow, jobName, actionName, inputName, matrixVars) {
|
|||
* @throws an error if the category input could not be determined
|
||||
*/
|
||||
function getCategoryInputOrThrow(workflow, jobName, matrixVars) {
|
||||
return getInputOrThrow(workflow, jobName, "github/codeql-action/analyze", "category", matrixVars);
|
||||
return getInputOrThrow(workflow, jobName, getAnalyzeActionName(), "category", matrixVars);
|
||||
}
|
||||
exports.getCategoryInputOrThrow = getCategoryInputOrThrow;
|
||||
/**
|
||||
* Makes a best effort attempt to retrieve the upload input for the particular job,
|
||||
* given a set of matrix variables.
|
||||
*
|
||||
* Typically you'll want to wrap this function in a try/catch block and handle the error.
|
||||
*
|
||||
* @returns the upload input
|
||||
* @throws an error if the upload input could not be determined
|
||||
*/
|
||||
function getUploadInputOrThrow(workflow, jobName, matrixVars) {
|
||||
return (getInputOrThrow(workflow, jobName, getAnalyzeActionName(), "upload", matrixVars) || "true" // if unspecified, upload defaults to true
|
||||
);
|
||||
}
|
||||
exports.getUploadInputOrThrow = getUploadInputOrThrow;
|
||||
/**
|
||||
* Makes a best effort attempt to retrieve the checkout_path input for the
|
||||
* particular job, given a set of matrix variables.
|
||||
*
|
||||
* Typically you'll want to wrap this function in a try/catch block and handle the error.
|
||||
*
|
||||
* @returns the checkout_path input
|
||||
* @throws an error if the checkout_path input could not be determined
|
||||
*/
|
||||
function getCheckoutPathInputOrThrow(workflow, jobName, matrixVars) {
|
||||
return (getInputOrThrow(workflow, jobName, getAnalyzeActionName(), "checkout_path", matrixVars) || (0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE") // if unspecified, checkout_path defaults to ${{ github.workspace }}
|
||||
);
|
||||
}
|
||||
exports.getCheckoutPathInputOrThrow = getCheckoutPathInputOrThrow;
|
||||
//# sourceMappingURL=workflow.js.map
|
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
|
@ -356,6 +356,7 @@ function errorCodes(actual, expected) {
|
|||
`)), []));
|
||||
});
|
||||
(0, ava_1.default)("getCategoryInputOrThrow returns category for simple workflow with category", (t) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
t.is((0, workflow_1.getCategoryInputOrThrow)(yaml.load(`
|
||||
jobs:
|
||||
analysis:
|
||||
|
@ -369,6 +370,7 @@ function errorCodes(actual, expected) {
|
|||
`), "analysis", {}), "some-category");
|
||||
});
|
||||
(0, ava_1.default)("getCategoryInputOrThrow returns undefined for simple workflow without category", (t) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
t.is((0, workflow_1.getCategoryInputOrThrow)(yaml.load(`
|
||||
jobs:
|
||||
analysis:
|
||||
|
@ -380,6 +382,7 @@ function errorCodes(actual, expected) {
|
|||
`), "analysis", {}), undefined);
|
||||
});
|
||||
(0, ava_1.default)("getCategoryInputOrThrow returns category for workflow with multiple jobs", (t) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
t.is((0, workflow_1.getCategoryInputOrThrow)(yaml.load(`
|
||||
jobs:
|
||||
foo:
|
||||
|
@ -403,6 +406,7 @@ function errorCodes(actual, expected) {
|
|||
`), "bar", {}), "bar-category");
|
||||
});
|
||||
(0, ava_1.default)("getCategoryInputOrThrow finds category for workflow with language matrix", (t) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
t.is((0, workflow_1.getCategoryInputOrThrow)(yaml.load(`
|
||||
jobs:
|
||||
analysis:
|
||||
|
@ -421,6 +425,7 @@ function errorCodes(actual, expected) {
|
|||
`), "analysis", { language: "javascript" }), "/language:javascript");
|
||||
});
|
||||
(0, ava_1.default)("getCategoryInputOrThrow throws error for workflow with dynamic category", (t) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
t.throws(() => (0, workflow_1.getCategoryInputOrThrow)(yaml.load(`
|
||||
jobs:
|
||||
analysis:
|
||||
|
@ -435,7 +440,8 @@ function errorCodes(actual, expected) {
|
|||
"an unrecognized dynamic value.",
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getCategoryInputOrThrow throws error for workflow with multiple categories", (t) => {
|
||||
(0, ava_1.default)("getCategoryInputOrThrow throws error for workflow with multiple calls to analyze", (t) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
t.throws(() => (0, workflow_1.getCategoryInputOrThrow)(yaml.load(`
|
||||
jobs:
|
||||
analysis:
|
||||
|
@ -450,8 +456,8 @@ function errorCodes(actual, expected) {
|
|||
with:
|
||||
category: another-category
|
||||
`), "analysis", {}), {
|
||||
message: "Could not get category input to github/codeql-action/analyze since there were multiple steps " +
|
||||
"calling github/codeql-action/analyze with different values for category.",
|
||||
message: "Could not get category input to github/codeql-action/analyze since the analysis job " +
|
||||
"calls github/codeql-action/analyze multiple times.",
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=workflow.test.js.map
|
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
|
@ -2,11 +2,11 @@ name: "Export file baseline information"
|
|||
description: "Tests that file baseline information is exported when the feature is enabled"
|
||||
versions: ["nightly-latest"]
|
||||
steps:
|
||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
||||
- uses: swift-actions/setup-swift@194625b58a582570f61cc707c3b558086c26b723
|
||||
# Windows doesn't support Swift, and only macOS latest and nightly-latest support Swift 5.7.1.
|
||||
if: runner.os == 'Linux' || (runner.os == 'macOS' && matrix.version == 'cached')
|
||||
with:
|
||||
swift-version: "5.7"
|
||||
swift-version: "5.7.0"
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: javascript
|
||||
|
|
|
@ -4,11 +4,11 @@ operatingSystems: ["ubuntu", "macos"]
|
|||
env:
|
||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: "true" # Remove when Swift is GA.
|
||||
steps:
|
||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
||||
- uses: swift-actions/setup-swift@194625b58a582570f61cc707c3b558086c26b723
|
||||
# Only macOS latest and nightly-latest support Swift 5.7.1
|
||||
if: runner.os == 'Linux' || matrix.version == 'cached'
|
||||
with:
|
||||
swift-version: "5.7"
|
||||
swift-version: "5.7.0"
|
||||
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
name: Submit SARIF after failure
|
||||
description: Check that a SARIF file is submitted for the workflow run if it fails
|
||||
versions: ["latest", "cached", "nightly-latest"]
|
||||
operatingSystems: ["ubuntu"]
|
||||
|
||||
env:
|
||||
# Internal-only environment variable used to indicate that the post-init Action
|
||||
# should expect to upload a SARIF file for the failed run.
|
||||
CODEQL_ACTION_EXPECT_UPLOAD_FAILED_SARIF: true
|
||||
# Make sure the uploading SARIF files feature is enabled.
|
||||
CODEQL_ACTION_UPLOAD_FAILED_SARIF: true
|
||||
# Upload the failed SARIF file as an integration test of the API endpoint.
|
||||
CODEQL_ACTION_TEST_MODE: false
|
||||
# Mark telemetry for this workflow so it can be treated separately.
|
||||
CODEQL_ACTION_TESTING_ENVIRONMENT: codeql-action-pr-checks
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./init
|
||||
with:
|
||||
languages: javascript
|
||||
- name: Fail
|
||||
# We want this job to pass if the Action correctly uploads the SARIF file for
|
||||
# the failed run.
|
||||
# Setting this step to continue on error means that it is marked as completing
|
||||
# successfully, so will not fail the job.
|
||||
continue-on-error: true
|
||||
run: exit 1
|
||||
- uses: ./analyze
|
||||
# In a real workflow, this step wouldn't run. Since we used `continue-on-error`
|
||||
# above, we manually disable it with an `if` condition.
|
||||
if: false
|
||||
with:
|
||||
category: "/test-codeql-version:${{ matrix.version }}"
|
|
@ -6,11 +6,11 @@ operatingSystems: ["macos"]
|
|||
env:
|
||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: "true"
|
||||
steps:
|
||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
||||
- uses: swift-actions/setup-swift@194625b58a582570f61cc707c3b558086c26b723
|
||||
# Only macOS latest and nightly-latest support Swift 5.7.1
|
||||
if: runner.os == 'Linux' || matrix.version == 'cached'
|
||||
with:
|
||||
swift-version: "5.7"
|
||||
swift-version: "5.7.0"
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: swift
|
||||
|
|
|
@ -6,11 +6,11 @@ env:
|
|||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: "true"
|
||||
DOTNET_GENERATE_ASPNET_CERTIFICATE: "false"
|
||||
steps:
|
||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
||||
- uses: swift-actions/setup-swift@194625b58a582570f61cc707c3b558086c26b723
|
||||
# Only macOS latest and nightly-latest support Swift 5.7.1
|
||||
if: runner.os == 'Linux' || matrix.version == 'cached'
|
||||
with:
|
||||
swift-version: "5.7"
|
||||
swift-version: "5.7.0"
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: swift
|
||||
|
|
|
@ -115,7 +115,8 @@ for file in os.listdir('checks'):
|
|||
checkJob[key] = checkSpecification[key]
|
||||
|
||||
checkJob['env'] = checkJob.get('env', {})
|
||||
checkJob['env']['CODEQL_ACTION_TEST_MODE'] = True
|
||||
if 'CODEQL_ACTION_TEST_MODE' not in checkJob['env']:
|
||||
checkJob['env']['CODEQL_ACTION_TEST_MODE'] = True
|
||||
checkName = file[:len(file) - 4]
|
||||
|
||||
with open(f"../.github/workflows/__{checkName}.yml", 'w') as output_stream:
|
||||
|
|
|
@ -33,10 +33,17 @@ def _check_output(command, extra_env={}):
|
|||
|
||||
def install_packages_with_poetry():
|
||||
|
||||
# To handle poetry 1.2, which started to use keyring interaction MUCH more, we need
|
||||
# add a workaround. See
|
||||
# https://github.com/python-poetry/poetry/issues/2692#issuecomment-1235683370
|
||||
extra_poetry_env = {"PYTHON_KEYRING_BACKEND": "keyring.backends.null.Keyring"}
|
||||
extra_poetry_env = {
|
||||
# To handle poetry 1.2, which started to use keyring interaction MUCH more, we need
|
||||
# add a workaround. See
|
||||
# https://github.com/python-poetry/poetry/issues/2692#issuecomment-1235683370
|
||||
"PYTHON_KEYRING_BACKEND": "keyring.backends.null.Keyring",
|
||||
# Projects that specify `virtualenvs.in-project = true` in their poetry.toml
|
||||
# would get the venv created inside the repo directory, which would cause CodeQL
|
||||
# to consider it as user-written code. We don't want this to happen. see
|
||||
# https://python-poetry.org/docs/configuration/#virtualenvsin-project
|
||||
"POETRY_VIRTUALENVS_IN_PROJECT": "False",
|
||||
}
|
||||
|
||||
command = [sys.executable, '-m', 'poetry']
|
||||
if sys.platform.startswith('win32'):
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
[virtualenvs]
|
||||
in-project = true
|
|
@ -17,6 +17,7 @@ import {
|
|||
GITHUB_DOTCOM_URL,
|
||||
isHTTPError,
|
||||
isInTestMode,
|
||||
parseMatrixInput,
|
||||
UserError,
|
||||
} from "./util";
|
||||
import { getWorkflowPath } from "./workflow";
|
||||
|
@ -192,10 +193,10 @@ export function computeAutomationID(
|
|||
): string {
|
||||
let automationID = `${analysis_key}/`;
|
||||
|
||||
// the id has to be deterministic so we sort the fields
|
||||
if (environment !== undefined && environment !== "null") {
|
||||
const environmentObject = JSON.parse(environment);
|
||||
for (const entry of Object.entries(environmentObject).sort()) {
|
||||
const matrix = parseMatrixInput(environment);
|
||||
if (matrix !== undefined) {
|
||||
// the id has to be deterministic so we sort the fields
|
||||
for (const entry of Object.entries(matrix).sort()) {
|
||||
if (typeof entry[1] === "string") {
|
||||
automationID += `${entry[0]}:${entry[1]}/`;
|
||||
} else {
|
||||
|
|
|
@ -24,6 +24,7 @@ import { Features } from "./feature-flags";
|
|||
import { Language } from "./languages";
|
||||
import { getActionsLogger, Logger } from "./logging";
|
||||
import { parseRepositoryNwo } from "./repository";
|
||||
import { CODEQL_ACTION_ANALYZE_DID_UPLOAD_SARIF } from "./shared-environment";
|
||||
import { getTotalCacheSize, uploadTrapCaches } from "./trap-caching";
|
||||
import * as upload_lib from "./upload-lib";
|
||||
import { UploadResult } from "./upload-lib";
|
||||
|
@ -271,8 +272,14 @@ async function run() {
|
|||
core.setOutput("db-locations", dbLocations);
|
||||
|
||||
if (runStats && actionsUtil.getRequiredInput("upload") === "true") {
|
||||
uploadResult = await upload_lib.uploadFromActions(outputDir, logger);
|
||||
uploadResult = await upload_lib.uploadFromActions(
|
||||
outputDir,
|
||||
actionsUtil.getRequiredInput("checkout_path"),
|
||||
actionsUtil.getOptionalInput("category"),
|
||||
logger
|
||||
);
|
||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||
core.exportVariable(CODEQL_ACTION_ANALYZE_DID_UPLOAD_SARIF, "true");
|
||||
} else {
|
||||
logger.info("Not uploading results");
|
||||
}
|
||||
|
|
|
@ -355,8 +355,7 @@ export async function runQueries(
|
|||
addSnippetsFlag,
|
||||
threadsFlag,
|
||||
enableDebugLogging ? "-vv" : "-v",
|
||||
automationDetailsId,
|
||||
featureEnablement
|
||||
automationDetailsId
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -445,16 +445,7 @@ test("databaseInterpretResults() does not set --sarif-add-query-help for 2.7.0",
|
|||
sinon.stub(codeqlObject, "getVersion").resolves("2.7.0");
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
await codeqlObject.databaseInterpretResults(
|
||||
"",
|
||||
[],
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"-v",
|
||||
"",
|
||||
createFeatures([])
|
||||
);
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||
t.false(
|
||||
runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"),
|
||||
"--sarif-add-query-help should be absent, but it is present"
|
||||
|
@ -467,16 +458,7 @@ test("databaseInterpretResults() sets --sarif-add-query-help for 2.7.1", async (
|
|||
sinon.stub(codeqlObject, "getVersion").resolves("2.7.1");
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
await codeqlObject.databaseInterpretResults(
|
||||
"",
|
||||
[],
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"-v",
|
||||
"",
|
||||
createFeatures([])
|
||||
);
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||
t.true(
|
||||
runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"),
|
||||
"--sarif-add-query-help should be present, but it is absent"
|
||||
|
@ -865,25 +847,13 @@ test("does not use injected config", async (t: ExecutionContext<unknown>) => {
|
|||
}
|
||||
});
|
||||
|
||||
test("databaseInterpretResults() sets --sarif-add-baseline-file-info when feature enabled", async (t) => {
|
||||
test("databaseInterpretResults() sets --sarif-add-baseline-file-info for 2.11.3", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
// We need to set a CodeQL version such that running `databaseInterpretResults` does not crash.
|
||||
// The version of CodeQL is checked separately to determine feature enablement, and does not
|
||||
// otherwise impact this test, so set it to 0.0.0.
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("0.0.0");
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.11.3");
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
await codeqlObject.databaseInterpretResults(
|
||||
"",
|
||||
[],
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"-v",
|
||||
"",
|
||||
createFeatures([Feature.FileBaselineInformationEnabled])
|
||||
);
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||
t.true(
|
||||
runnerConstructorStub.firstCall.args[1].includes(
|
||||
"--sarif-add-baseline-file-info"
|
||||
|
@ -892,25 +862,13 @@ test("databaseInterpretResults() sets --sarif-add-baseline-file-info when featur
|
|||
);
|
||||
});
|
||||
|
||||
test("databaseInterpretResults() does not set --sarif-add-baseline-file-info if feature disabled", async (t) => {
|
||||
test("databaseInterpretResults() does not set --sarif-add-baseline-file-info for 2.11.2", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
// We need to set a CodeQL version such that running `databaseInterpretResults` does not crash.
|
||||
// The version of CodeQL is checked upstream to determine feature enablement, so it does not
|
||||
// affect this test.
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("0.0.0");
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.11.2");
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
await codeqlObject.databaseInterpretResults(
|
||||
"",
|
||||
[],
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"-v",
|
||||
"",
|
||||
createFeatures([])
|
||||
);
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||
t.false(
|
||||
runnerConstructorStub.firstCall.args[1].includes(
|
||||
"--sarif-add-baseline-file-info"
|
||||
|
|
|
@ -15,7 +15,7 @@ import * as api from "./api-client";
|
|||
import { Config } from "./config-utils";
|
||||
import * as defaults from "./defaults.json"; // Referenced from codeql-action-sync-tool!
|
||||
import { errorMatchers } from "./error-matcher";
|
||||
import { Feature, FeatureEnablement } from "./feature-flags";
|
||||
import { FeatureEnablement } from "./feature-flags";
|
||||
import { isTracedLanguage, Language } from "./languages";
|
||||
import { Logger } from "./logging";
|
||||
import { toolrunnerErrorCatcher } from "./toolrunner-error-catcher";
|
||||
|
@ -172,13 +172,19 @@ export interface CodeQL {
|
|||
addSnippetsFlag: string,
|
||||
threadsFlag: string,
|
||||
verbosityFlag: string | undefined,
|
||||
automationDetailsId: string | undefined,
|
||||
featureEnablement: FeatureEnablement
|
||||
automationDetailsId: string | undefined
|
||||
): Promise<string>;
|
||||
/**
|
||||
* Run 'codeql database print-baseline'.
|
||||
*/
|
||||
databasePrintBaseline(databasePath: string): Promise<string>;
|
||||
/**
|
||||
* Run 'codeql diagnostics export'.
|
||||
*/
|
||||
diagnosticsExport(
|
||||
sarifFile: string,
|
||||
automationDetailsId: string | undefined
|
||||
): Promise<void>;
|
||||
}
|
||||
|
||||
export interface ResolveLanguagesOutput {
|
||||
|
@ -250,6 +256,7 @@ const CODEQL_VERSION_LUA_TRACER_CONFIG = "2.10.0";
|
|||
export const CODEQL_VERSION_CONFIG_FILES = "2.10.1";
|
||||
const CODEQL_VERSION_LUA_TRACING_GO_WINDOWS_FIXED = "2.10.4";
|
||||
export const CODEQL_VERSION_GHES_PACK_DOWNLOAD = "2.10.4";
|
||||
const CODEQL_VERSION_FILE_BASELINE_INFORMATION = "2.11.3";
|
||||
|
||||
/**
|
||||
* This variable controls using the new style of tracing from the CodeQL
|
||||
|
@ -634,6 +641,7 @@ export function setCodeQL(partialCodeql: Partial<CodeQL>): CodeQL {
|
|||
partialCodeql,
|
||||
"databasePrintBaseline"
|
||||
),
|
||||
diagnosticsExport: resolveFunction(partialCodeql, "diagnosticsExport"),
|
||||
};
|
||||
return cachedCodeQL;
|
||||
}
|
||||
|
@ -675,7 +683,7 @@ async function getCodeQLForCmd(
|
|||
cmd: string,
|
||||
checkVersion: boolean
|
||||
): Promise<CodeQL> {
|
||||
const codeql = {
|
||||
const codeql: CodeQL = {
|
||||
getPath() {
|
||||
return cmd;
|
||||
},
|
||||
|
@ -1025,8 +1033,7 @@ async function getCodeQLForCmd(
|
|||
addSnippetsFlag: string,
|
||||
threadsFlag: string,
|
||||
verbosityFlag: string,
|
||||
automationDetailsId: string | undefined,
|
||||
featureEnablement: FeatureEnablement
|
||||
automationDetailsId: string | undefined
|
||||
): Promise<string> {
|
||||
const codeqlArgs = [
|
||||
"database",
|
||||
|
@ -1047,9 +1054,9 @@ async function getCodeQLForCmd(
|
|||
codeqlArgs.push("--sarif-category", automationDetailsId);
|
||||
}
|
||||
if (
|
||||
await featureEnablement.getValue(
|
||||
Feature.FileBaselineInformationEnabled,
|
||||
this
|
||||
await util.codeQlVersionAbove(
|
||||
this,
|
||||
CODEQL_VERSION_FILE_BASELINE_INFORMATION
|
||||
)
|
||||
) {
|
||||
codeqlArgs.push("--sarif-add-baseline-file-info");
|
||||
|
@ -1156,6 +1163,22 @@ async function getCodeQLForCmd(
|
|||
];
|
||||
await new toolrunner.ToolRunner(cmd, args).exec();
|
||||
},
|
||||
async diagnosticsExport(
|
||||
sarifFile: string,
|
||||
automationDetailsId: string | undefined
|
||||
): Promise<void> {
|
||||
const args = [
|
||||
"diagnostics",
|
||||
"export",
|
||||
"--format=sarif-latest",
|
||||
`--output=${sarifFile}`,
|
||||
...getExtraOptionsFromEnv(["diagnostics", "export"]),
|
||||
];
|
||||
if (automationDetailsId !== undefined) {
|
||||
args.push("--sarif-category", automationDetailsId);
|
||||
}
|
||||
await new toolrunner.ToolRunner(cmd, args).exec();
|
||||
},
|
||||
};
|
||||
// To ensure that status reports include the CodeQL CLI version wherever
|
||||
// possible, we want to call getVersion(), which populates the version value
|
||||
|
|
|
@ -16,9 +16,9 @@ export enum Feature {
|
|||
BypassToolcacheKotlinSwiftEnabled = "bypass_toolcache_kotlin_swift_enabled",
|
||||
CliConfigFileEnabled = "cli_config_file_enabled",
|
||||
DisableKotlinAnalysisEnabled = "disable_kotlin_analysis_enabled",
|
||||
FileBaselineInformationEnabled = "file_baseline_information_enabled",
|
||||
MlPoweredQueriesEnabled = "ml_powered_queries_enabled",
|
||||
TrapCachingEnabled = "trap_caching_enabled",
|
||||
UploadFailedSarifEnabled = "upload_failed_sarif_enabled",
|
||||
}
|
||||
|
||||
export const featureConfig: Record<
|
||||
|
@ -45,10 +45,6 @@ export const featureConfig: Record<
|
|||
envVar: "CODEQL_PASS_CONFIG_TO_CLI",
|
||||
minimumVersion: "2.11.1",
|
||||
},
|
||||
[Feature.FileBaselineInformationEnabled]: {
|
||||
envVar: "CODEQL_FILE_BASELINE_INFORMATION",
|
||||
minimumVersion: "2.11.3",
|
||||
},
|
||||
[Feature.MlPoweredQueriesEnabled]: {
|
||||
envVar: "CODEQL_ML_POWERED_QUERIES",
|
||||
minimumVersion: "2.7.5",
|
||||
|
@ -57,6 +53,10 @@ export const featureConfig: Record<
|
|||
envVar: "CODEQL_TRAP_CACHING",
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.UploadFailedSarifEnabled]: {
|
||||
envVar: "CODEQL_ACTION_UPLOAD_FAILED_SARIF",
|
||||
minimumVersion: "2.11.3",
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,15 +1,27 @@
|
|||
import test from "ava";
|
||||
import test, { ExecutionContext } from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import * as codeql from "./codeql";
|
||||
import * as configUtils from "./config-utils";
|
||||
import { Feature } from "./feature-flags";
|
||||
import * as initActionPostHelper from "./init-action-post-helper";
|
||||
import { setupTests } from "./testing-utils";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import { parseRepositoryNwo } from "./repository";
|
||||
import {
|
||||
createFeatures,
|
||||
getRecordingLogger,
|
||||
setupTests,
|
||||
} from "./testing-utils";
|
||||
import * as uploadLib from "./upload-lib";
|
||||
import * as util from "./util";
|
||||
import * as workflow from "./workflow";
|
||||
|
||||
setupTests(test);
|
||||
|
||||
test("post: init action with debug mode off", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
|
||||
const gitHubVersion: util.GitHubVersion = {
|
||||
|
@ -29,7 +41,10 @@ test("post: init action with debug mode off", async (t) => {
|
|||
await initActionPostHelper.run(
|
||||
uploadDatabaseBundleSpy,
|
||||
uploadLogsSpy,
|
||||
printDebugLogsSpy
|
||||
printDebugLogsSpy,
|
||||
parseRepositoryNwo("github/codeql-action"),
|
||||
createFeatures([]),
|
||||
getRunnerLogger(true)
|
||||
);
|
||||
|
||||
t.assert(uploadDatabaseBundleSpy.notCalled);
|
||||
|
@ -40,6 +55,7 @@ test("post: init action with debug mode off", async (t) => {
|
|||
|
||||
test("post: init action with debug mode on", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
|
||||
const gitHubVersion: util.GitHubVersion = {
|
||||
|
@ -59,7 +75,10 @@ test("post: init action with debug mode on", async (t) => {
|
|||
await initActionPostHelper.run(
|
||||
uploadDatabaseBundleSpy,
|
||||
uploadLogsSpy,
|
||||
printDebugLogsSpy
|
||||
printDebugLogsSpy,
|
||||
parseRepositoryNwo("github/codeql-action"),
|
||||
createFeatures([]),
|
||||
getRunnerLogger(true)
|
||||
);
|
||||
|
||||
t.assert(uploadDatabaseBundleSpy.called);
|
||||
|
@ -67,3 +86,117 @@ test("post: init action with debug mode on", async (t) => {
|
|||
t.assert(printDebugLogsSpy.called);
|
||||
});
|
||||
});
|
||||
|
||||
test("uploads failed SARIF run for typical workflow", async (t) => {
|
||||
const actionsWorkflow = createTestWorkflow([
|
||||
{
|
||||
name: "Checkout repository",
|
||||
uses: "actions/checkout@v3",
|
||||
},
|
||||
{
|
||||
name: "Initialize CodeQL",
|
||||
uses: "github/codeql-action/init@v2",
|
||||
with: {
|
||||
languages: "javascript",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Perform CodeQL Analysis",
|
||||
uses: "github/codeql-action/analyze@v2",
|
||||
with: {
|
||||
category: "my-category",
|
||||
},
|
||||
},
|
||||
]);
|
||||
await testFailedSarifUpload(t, actionsWorkflow, { category: "my-category" });
|
||||
});
|
||||
|
||||
test("uploading failed SARIF run fails when workflow does not reference github/codeql-action", async (t) => {
|
||||
const actionsWorkflow = createTestWorkflow([
|
||||
{
|
||||
name: "Checkout repository",
|
||||
uses: "actions/checkout@v3",
|
||||
},
|
||||
]);
|
||||
await t.throwsAsync(
|
||||
async () => await testFailedSarifUpload(t, actionsWorkflow)
|
||||
);
|
||||
});
|
||||
|
||||
function createTestWorkflow(
|
||||
steps: workflow.WorkflowJobStep[]
|
||||
): workflow.Workflow {
|
||||
return {
|
||||
name: "CodeQL",
|
||||
on: {
|
||||
push: {
|
||||
branches: ["main"],
|
||||
},
|
||||
pull_request: {
|
||||
branches: ["main"],
|
||||
},
|
||||
},
|
||||
jobs: {
|
||||
analyze: {
|
||||
name: "CodeQL Analysis",
|
||||
"runs-on": "ubuntu-latest",
|
||||
steps,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async function testFailedSarifUpload(
|
||||
t: ExecutionContext<unknown>,
|
||||
actionsWorkflow: workflow.Workflow,
|
||||
{ category }: { category?: string } = {}
|
||||
): Promise<void> {
|
||||
const config = {
|
||||
codeQLCmd: "codeql",
|
||||
debugMode: true,
|
||||
languages: [],
|
||||
packs: [],
|
||||
} as unknown as configUtils.Config;
|
||||
const messages = [];
|
||||
process.env["GITHUB_JOB"] = "analyze";
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["GITHUB_WORKSPACE"] =
|
||||
"/home/runner/work/codeql-action/codeql-action";
|
||||
sinon.stub(actionsUtil, "getRequiredInput").withArgs("matrix").returns("{}");
|
||||
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeql, "getCodeQL").resolves(codeqlObject);
|
||||
const diagnosticsExportStub = sinon.stub(codeqlObject, "diagnosticsExport");
|
||||
|
||||
sinon.stub(workflow, "getWorkflow").resolves(actionsWorkflow);
|
||||
|
||||
const uploadFromActions = sinon.stub(uploadLib, "uploadFromActions");
|
||||
uploadFromActions.resolves({ sarifID: "42" } as uploadLib.UploadResult);
|
||||
const waitForProcessing = sinon.stub(uploadLib, "waitForProcessing");
|
||||
|
||||
await initActionPostHelper.uploadFailedSarif(
|
||||
config,
|
||||
parseRepositoryNwo("github/codeql-action"),
|
||||
createFeatures([Feature.UploadFailedSarifEnabled]),
|
||||
getRecordingLogger(messages)
|
||||
);
|
||||
t.deepEqual(messages, []);
|
||||
t.true(
|
||||
diagnosticsExportStub.calledOnceWith(sinon.match.string, category),
|
||||
`Actual args were: ${diagnosticsExportStub.args}`
|
||||
);
|
||||
t.true(
|
||||
uploadFromActions.calledOnceWith(
|
||||
sinon.match.string,
|
||||
sinon.match.string,
|
||||
category,
|
||||
sinon.match.any
|
||||
),
|
||||
`Actual args were: ${uploadFromActions.args}`
|
||||
);
|
||||
t.true(
|
||||
waitForProcessing.calledOnceWith(sinon.match.any, "42", sinon.match.any, {
|
||||
isUnsuccessfulExecution: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,25 +1,118 @@
|
|||
import * as core from "@actions/core";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { getConfig } from "./config-utils";
|
||||
import { getActionsLogger } from "./logging";
|
||||
import { getCodeQL } from "./codeql";
|
||||
import { Config, getConfig } from "./config-utils";
|
||||
import { Feature, FeatureEnablement } from "./feature-flags";
|
||||
import { Logger } from "./logging";
|
||||
import { RepositoryNwo } from "./repository";
|
||||
import { CODEQL_ACTION_ANALYZE_DID_UPLOAD_SARIF } from "./shared-environment";
|
||||
import * as uploadLib from "./upload-lib";
|
||||
import { getRequiredEnvParam, isInTestMode, parseMatrixInput } from "./util";
|
||||
import {
|
||||
getCategoryInputOrThrow,
|
||||
getCheckoutPathInputOrThrow,
|
||||
getUploadInputOrThrow,
|
||||
getWorkflow,
|
||||
} from "./workflow";
|
||||
|
||||
export async function uploadFailedSarif(
|
||||
config: Config,
|
||||
repositoryNwo: RepositoryNwo,
|
||||
featureEnablement: FeatureEnablement,
|
||||
logger: Logger
|
||||
) {
|
||||
if (!config.codeQLCmd) {
|
||||
logger.warning(
|
||||
"CodeQL command not found. Unable to upload failed SARIF file."
|
||||
);
|
||||
return;
|
||||
}
|
||||
const codeql = await getCodeQL(config.codeQLCmd);
|
||||
if (
|
||||
!(await featureEnablement.getValue(
|
||||
Feature.UploadFailedSarifEnabled,
|
||||
codeql
|
||||
))
|
||||
) {
|
||||
logger.debug("Uploading failed SARIF is disabled.");
|
||||
return;
|
||||
}
|
||||
const workflow = await getWorkflow();
|
||||
const jobName = getRequiredEnvParam("GITHUB_JOB");
|
||||
const matrix = parseMatrixInput(actionsUtil.getRequiredInput("matrix"));
|
||||
if (
|
||||
getUploadInputOrThrow(workflow, jobName, matrix) !== "true" ||
|
||||
isInTestMode()
|
||||
) {
|
||||
logger.debug(
|
||||
"Won't upload a failed SARIF file since SARIF upload is disabled."
|
||||
);
|
||||
return;
|
||||
}
|
||||
const category = getCategoryInputOrThrow(workflow, jobName, matrix);
|
||||
const checkoutPath = getCheckoutPathInputOrThrow(workflow, jobName, matrix);
|
||||
|
||||
const sarifFile = "../codeql-failed-run.sarif";
|
||||
await codeql.diagnosticsExport(sarifFile, category);
|
||||
|
||||
core.info(`Uploading failed SARIF file ${sarifFile}`);
|
||||
const uploadResult = await uploadLib.uploadFromActions(
|
||||
sarifFile,
|
||||
checkoutPath,
|
||||
category,
|
||||
logger
|
||||
);
|
||||
await uploadLib.waitForProcessing(
|
||||
repositoryNwo,
|
||||
uploadResult.sarifID,
|
||||
logger,
|
||||
{ isUnsuccessfulExecution: true }
|
||||
);
|
||||
}
|
||||
|
||||
export async function run(
|
||||
uploadDatabaseBundleDebugArtifact: Function,
|
||||
uploadLogsDebugArtifact: Function,
|
||||
printDebugLogs: Function
|
||||
printDebugLogs: Function,
|
||||
repositoryNwo: RepositoryNwo,
|
||||
featureEnablement: FeatureEnablement,
|
||||
logger: Logger
|
||||
) {
|
||||
const logger = getActionsLogger();
|
||||
|
||||
const config = await getConfig(actionsUtil.getTemporaryDirectory(), logger);
|
||||
if (config === undefined) {
|
||||
logger.warning(
|
||||
"Debugging artifacts are unavailable since the 'init' Action failed before it could produce any."
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Environment variable used to integration test uploading a SARIF file for failed runs
|
||||
const expectFailedSarifUpload =
|
||||
process.env["CODEQL_ACTION_EXPECT_UPLOAD_FAILED_SARIF"] === "true";
|
||||
|
||||
if (process.env[CODEQL_ACTION_ANALYZE_DID_UPLOAD_SARIF] !== "true") {
|
||||
try {
|
||||
await uploadFailedSarif(config, repositoryNwo, featureEnablement, logger);
|
||||
} catch (e) {
|
||||
if (expectFailedSarifUpload) {
|
||||
throw new Error(
|
||||
"Expected to upload a SARIF file for the failed run, but encountered " +
|
||||
`the following error: ${e}`
|
||||
);
|
||||
}
|
||||
logger.info(
|
||||
`Failed to upload a SARIF file for the failed run. Error: ${e}`
|
||||
);
|
||||
}
|
||||
} else if (expectFailedSarifUpload) {
|
||||
throw new Error(
|
||||
"Expected to upload a SARIF file for the failed run, but didn't."
|
||||
);
|
||||
}
|
||||
|
||||
// Upload appropriate Actions artifacts for debugging
|
||||
if (config?.debugMode) {
|
||||
if (config.debugMode) {
|
||||
core.info(
|
||||
"Debug mode is on. Uploading available database bundles and logs as Actions debugging artifacts..."
|
||||
);
|
||||
|
|
|
@ -7,15 +7,37 @@
|
|||
import * as core from "@actions/core";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { getGitHubVersion } from "./api-client";
|
||||
import * as debugArtifacts from "./debug-artifacts";
|
||||
import { Features } from "./feature-flags";
|
||||
import * as initActionPostHelper from "./init-action-post-helper";
|
||||
import { getActionsLogger } from "./logging";
|
||||
import { parseRepositoryNwo } from "./repository";
|
||||
import { checkGitHubVersionInRange, getRequiredEnvParam } from "./util";
|
||||
|
||||
async function runWrapper() {
|
||||
try {
|
||||
const logger = getActionsLogger();
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
checkGitHubVersionInRange(gitHubVersion, logger);
|
||||
|
||||
const repositoryNwo = parseRepositoryNwo(
|
||||
getRequiredEnvParam("GITHUB_REPOSITORY")
|
||||
);
|
||||
const features = new Features(
|
||||
gitHubVersion,
|
||||
repositoryNwo,
|
||||
actionsUtil.getTemporaryDirectory(),
|
||||
logger
|
||||
);
|
||||
|
||||
await initActionPostHelper.run(
|
||||
debugArtifacts.uploadDatabaseBundleDebugArtifact,
|
||||
debugArtifacts.uploadLogsDebugArtifact,
|
||||
actionsUtil.printDebugLogs
|
||||
actionsUtil.printDebugLogs,
|
||||
repositoryNwo,
|
||||
features,
|
||||
logger
|
||||
);
|
||||
} catch (error) {
|
||||
core.setFailed(`init post-action step failed: ${error}`);
|
||||
|
|
|
@ -1,13 +1,24 @@
|
|||
export const ODASA_TRACER_CONFIGURATION = "ODASA_TRACER_CONFIGURATION";
|
||||
// The time at which the first action (normally init) started executing.
|
||||
// If a workflow invokes a different action without first invoking the init
|
||||
// action (i.e. the upload action is being used by a third-party integrator)
|
||||
// then this variable will be assigned the start time of the action invoked
|
||||
// rather that the init action.
|
||||
export const CODEQL_WORKFLOW_STARTED_AT = "CODEQL_WORKFLOW_STARTED_AT";
|
||||
/**
|
||||
* This environment variable is set to true when the `analyze` Action
|
||||
* successfully uploads a SARIF file. It does NOT indicate whether the
|
||||
* SARIF file was processed successfully.
|
||||
*/
|
||||
export const CODEQL_ACTION_ANALYZE_DID_UPLOAD_SARIF =
|
||||
"CODEQL_ACTION_ANALYZE_DID_UPLOAD_SARIF";
|
||||
|
||||
export const CODEQL_ACTION_TESTING_ENVIRONMENT =
|
||||
"CODEQL_ACTION_TESTING_ENVIRONMENT";
|
||||
|
||||
/** Used to disable uploading SARIF results or status reports to the GitHub API */
|
||||
export const CODEQL_ACTION_TEST_MODE = "CODEQL_ACTION_TEST_MODE";
|
||||
|
||||
/**
|
||||
* The time at which the first action (normally init) started executing.
|
||||
* If a workflow invokes a different action without first invoking the init
|
||||
* action (i.e. the upload action is being used by a third-party integrator)
|
||||
* then this variable will be assigned the start time of the action invoked
|
||||
* rather that the init action.
|
||||
*/
|
||||
export const CODEQL_WORKFLOW_STARTED_AT = "CODEQL_WORKFLOW_STARTED_AT";
|
||||
|
||||
export const ODASA_TRACER_CONFIGURATION = "ODASA_TRACER_CONFIGURATION";
|
||||
|
|
|
@ -158,23 +158,22 @@ export function findSarifFilesInDir(sarifPath: string): string[] {
|
|||
|
||||
// Uploads a single sarif file or a directory of sarif files
|
||||
// depending on what the path happens to refer to.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
export async function uploadFromActions(
|
||||
sarifPath: string,
|
||||
checkoutPath: string,
|
||||
category: string | undefined,
|
||||
logger: Logger
|
||||
): Promise<UploadResult> {
|
||||
return await uploadFiles(
|
||||
getSarifFilePaths(sarifPath),
|
||||
parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY")),
|
||||
await actionsUtil.getCommitOid(
|
||||
actionsUtil.getRequiredInput("checkout_path")
|
||||
),
|
||||
await actionsUtil.getCommitOid(checkoutPath),
|
||||
await actionsUtil.getRef(),
|
||||
await actionsUtil.getAnalysisKey(),
|
||||
actionsUtil.getOptionalInput("category"),
|
||||
category,
|
||||
util.getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||
workflow.getWorkflowRunID(),
|
||||
actionsUtil.getRequiredInput("checkout_path"),
|
||||
checkoutPath,
|
||||
actionsUtil.getRequiredInput("matrix"),
|
||||
logger
|
||||
);
|
||||
|
@ -386,60 +385,119 @@ async function uploadFiles(
|
|||
const STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1000;
|
||||
const STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1000;
|
||||
|
||||
// Waits until either the analysis is successfully processed, a processing error is reported, or STATUS_CHECK_TIMEOUT_MILLISECONDS elapses.
|
||||
type ProcessingStatus = "pending" | "complete" | "failed";
|
||||
|
||||
/**
|
||||
* Waits until either the analysis is successfully processed, a processing error
|
||||
* is reported, or `STATUS_CHECK_TIMEOUT_MILLISECONDS` elapses.
|
||||
*
|
||||
* If `isUnsuccessfulExecution` is passed, will throw an error if the analysis
|
||||
* processing does not produce a single error mentioning the unsuccessful
|
||||
* execution.
|
||||
*/
|
||||
export async function waitForProcessing(
|
||||
repositoryNwo: RepositoryNwo,
|
||||
sarifID: string,
|
||||
logger: Logger
|
||||
logger: Logger,
|
||||
options: { isUnsuccessfulExecution: boolean } = {
|
||||
isUnsuccessfulExecution: false,
|
||||
}
|
||||
): Promise<void> {
|
||||
logger.startGroup("Waiting for processing to finish");
|
||||
const client = api.getApiClient();
|
||||
try {
|
||||
const client = api.getApiClient();
|
||||
|
||||
const statusCheckingStarted = Date.now();
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
if (
|
||||
Date.now() >
|
||||
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS
|
||||
) {
|
||||
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
|
||||
// It's possible the analysis will eventually finish processing, but it's not worth spending more Actions time waiting.
|
||||
logger.warning(
|
||||
"Timed out waiting for analysis to finish processing. Continuing."
|
||||
);
|
||||
break;
|
||||
}
|
||||
let response: OctokitResponse<any> | undefined = undefined;
|
||||
try {
|
||||
response = await client.request(
|
||||
"GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id",
|
||||
{
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
sarif_id: sarifID,
|
||||
}
|
||||
);
|
||||
} catch (e) {
|
||||
logger.warning(
|
||||
`An error occurred checking the status of the delivery. ${e} It should still be processed in the background, but errors that occur during processing may not be reported.`
|
||||
);
|
||||
break;
|
||||
}
|
||||
const status = response.data.processing_status;
|
||||
logger.info(`Analysis upload status is ${status}.`);
|
||||
if (status === "complete") {
|
||||
break;
|
||||
} else if (status === "pending") {
|
||||
logger.debug("Analysis processing is still pending...");
|
||||
} else if (status === "failed") {
|
||||
throw new Error(
|
||||
`Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`
|
||||
);
|
||||
}
|
||||
const statusCheckingStarted = Date.now();
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
if (
|
||||
Date.now() >
|
||||
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS
|
||||
) {
|
||||
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
|
||||
// It's possible the analysis will eventually finish processing, but it's not worth spending more Actions time waiting.
|
||||
logger.warning(
|
||||
"Timed out waiting for analysis to finish processing. Continuing."
|
||||
);
|
||||
break;
|
||||
}
|
||||
let response: OctokitResponse<any> | undefined = undefined;
|
||||
try {
|
||||
response = await client.request(
|
||||
"GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id",
|
||||
{
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
sarif_id: sarifID,
|
||||
}
|
||||
);
|
||||
} catch (e) {
|
||||
logger.warning(
|
||||
`An error occurred checking the status of the delivery. ${e} It should still be processed in the background, but errors that occur during processing may not be reported.`
|
||||
);
|
||||
break;
|
||||
}
|
||||
const status = response.data.processing_status as ProcessingStatus;
|
||||
logger.info(`Analysis upload status is ${status}.`);
|
||||
|
||||
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
|
||||
if (status === "pending") {
|
||||
logger.debug("Analysis processing is still pending...");
|
||||
} else if (options.isUnsuccessfulExecution) {
|
||||
// We expect a specific processing error for unsuccessful executions, so
|
||||
// handle these separately.
|
||||
handleProcessingResultForUnsuccessfulExecution(
|
||||
response,
|
||||
status,
|
||||
logger
|
||||
);
|
||||
break;
|
||||
} else if (status === "complete") {
|
||||
break;
|
||||
} else if (status === "failed") {
|
||||
throw new Error(
|
||||
`Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`
|
||||
);
|
||||
} else {
|
||||
util.assertNever(status);
|
||||
}
|
||||
|
||||
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
|
||||
}
|
||||
} finally {
|
||||
logger.endGroup();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the processing result for an unsuccessful execution. Throws if the
|
||||
* result is not a failure with a single "unsuccessful execution" error.
|
||||
*/
|
||||
function handleProcessingResultForUnsuccessfulExecution(
|
||||
response: OctokitResponse<any, number>,
|
||||
status: Exclude<ProcessingStatus, "pending">,
|
||||
logger: Logger
|
||||
): void {
|
||||
if (
|
||||
status === "failed" &&
|
||||
Array.isArray(response.data.errors) &&
|
||||
response.data.errors.length === 1 &&
|
||||
response.data.errors[0].toString().startsWith("unsuccessful execution")
|
||||
) {
|
||||
logger.debug(
|
||||
"Successfully uploaded a SARIF file for the unsuccessful execution. Received expected " +
|
||||
'"unsuccessful execution" error, and no other errors.'
|
||||
);
|
||||
} else {
|
||||
const shortMessage =
|
||||
"Failed to upload a SARIF file for the unsuccessful execution. Code scanning status " +
|
||||
"information for the repository may be out of date as a result.";
|
||||
const longMessage =
|
||||
shortMessage + status === "failed"
|
||||
? ` Processing errors: ${response.data.errors}`
|
||||
: ' Encountered no processing errors, but expected to receive an "unsuccessful execution" error.';
|
||||
logger.debug(longMessage);
|
||||
throw new Error(shortMessage);
|
||||
}
|
||||
logger.endGroup();
|
||||
}
|
||||
|
||||
export function validateUniqueCategory(sarif: SarifFile): void {
|
||||
|
|
|
@ -53,6 +53,8 @@ async function run() {
|
|||
try {
|
||||
const uploadResult = await upload_lib.uploadFromActions(
|
||||
actionsUtil.getRequiredInput("sarif_file"),
|
||||
actionsUtil.getRequiredInput("checkout_path"),
|
||||
actionsUtil.getOptionalInput("category"),
|
||||
getActionsLogger()
|
||||
);
|
||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||
|
|
|
@ -892,3 +892,12 @@ export async function shouldBypassToolcache(
|
|||
}
|
||||
return bypass;
|
||||
}
|
||||
|
||||
export function parseMatrixInput(
|
||||
matrixInput: string | undefined
|
||||
): { [key: string]: string } | undefined {
|
||||
if (matrixInput === undefined || matrixInput === "null") {
|
||||
return undefined;
|
||||
}
|
||||
return JSON.parse(matrixInput);
|
||||
}
|
||||
|
|
|
@ -525,6 +525,7 @@ test("getWorkflowErrors() should not report an error if PRs are totally unconfig
|
|||
});
|
||||
|
||||
test("getCategoryInputOrThrow returns category for simple workflow with category", (t) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
t.is(
|
||||
getCategoryInputOrThrow(
|
||||
yaml.load(`
|
||||
|
@ -546,6 +547,7 @@ test("getCategoryInputOrThrow returns category for simple workflow with category
|
|||
});
|
||||
|
||||
test("getCategoryInputOrThrow returns undefined for simple workflow without category", (t) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
t.is(
|
||||
getCategoryInputOrThrow(
|
||||
yaml.load(`
|
||||
|
@ -565,6 +567,7 @@ test("getCategoryInputOrThrow returns undefined for simple workflow without cate
|
|||
});
|
||||
|
||||
test("getCategoryInputOrThrow returns category for workflow with multiple jobs", (t) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
t.is(
|
||||
getCategoryInputOrThrow(
|
||||
yaml.load(`
|
||||
|
@ -596,6 +599,7 @@ test("getCategoryInputOrThrow returns category for workflow with multiple jobs",
|
|||
});
|
||||
|
||||
test("getCategoryInputOrThrow finds category for workflow with language matrix", (t) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
t.is(
|
||||
getCategoryInputOrThrow(
|
||||
yaml.load(`
|
||||
|
@ -622,6 +626,7 @@ test("getCategoryInputOrThrow finds category for workflow with language matrix",
|
|||
});
|
||||
|
||||
test("getCategoryInputOrThrow throws error for workflow with dynamic category", (t) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
t.throws(
|
||||
() =>
|
||||
getCategoryInputOrThrow(
|
||||
|
@ -646,7 +651,8 @@ test("getCategoryInputOrThrow throws error for workflow with dynamic category",
|
|||
);
|
||||
});
|
||||
|
||||
test("getCategoryInputOrThrow throws error for workflow with multiple categories", (t) => {
|
||||
test("getCategoryInputOrThrow throws error for workflow with multiple calls to analyze", (t) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
t.throws(
|
||||
() =>
|
||||
getCategoryInputOrThrow(
|
||||
|
@ -669,8 +675,8 @@ test("getCategoryInputOrThrow throws error for workflow with multiple categories
|
|||
),
|
||||
{
|
||||
message:
|
||||
"Could not get category input to github/codeql-action/analyze since there were multiple steps " +
|
||||
"calling github/codeql-action/analyze with different values for category.",
|
||||
"Could not get category input to github/codeql-action/analyze since the analysis job " +
|
||||
"calls github/codeql-action/analyze multiple times.",
|
||||
}
|
||||
);
|
||||
});
|
||||
|
|
127
src/workflow.ts
127
src/workflow.ts
|
@ -7,13 +7,16 @@ import * as yaml from "js-yaml";
|
|||
import * as api from "./api-client";
|
||||
import { getRequiredEnvParam } from "./util";
|
||||
|
||||
interface WorkflowJobStep {
|
||||
run: any;
|
||||
export interface WorkflowJobStep {
|
||||
name?: string;
|
||||
run?: any;
|
||||
uses?: string;
|
||||
with?: { [key: string]: string };
|
||||
}
|
||||
|
||||
interface WorkflowJob {
|
||||
name?: string;
|
||||
"runs-on"?: string;
|
||||
steps?: WorkflowJobStep[];
|
||||
}
|
||||
|
||||
|
@ -33,6 +36,7 @@ interface WorkflowTriggers {
|
|||
}
|
||||
|
||||
export interface Workflow {
|
||||
name?: string;
|
||||
jobs?: { [key: string]: WorkflowJob };
|
||||
on?: string | string[] | WorkflowTriggers;
|
||||
}
|
||||
|
@ -321,42 +325,42 @@ function getInputOrThrow(
|
|||
jobName: string,
|
||||
actionName: string,
|
||||
inputName: string,
|
||||
matrixVars: { [key: string]: string }
|
||||
matrixVars: { [key: string]: string } | undefined
|
||||
) {
|
||||
const preamble = `Could not get ${inputName} input to ${actionName} since`;
|
||||
if (!workflow.jobs) {
|
||||
throw new Error(
|
||||
`Could not get ${inputName} input to ${actionName} since the workflow has no jobs.`
|
||||
);
|
||||
throw new Error(`${preamble} the workflow has no jobs.`);
|
||||
}
|
||||
if (!workflow.jobs[jobName]) {
|
||||
throw new Error(`${preamble} the workflow has no job named ${jobName}.`);
|
||||
}
|
||||
|
||||
const stepsCallingAction = getStepsCallingAction(
|
||||
workflow.jobs[jobName],
|
||||
actionName
|
||||
);
|
||||
|
||||
if (stepsCallingAction.length === 0) {
|
||||
throw new Error(
|
||||
`Could not get ${inputName} input to ${actionName} since the workflow has no job named ${jobName}.`
|
||||
`${preamble} the ${jobName} job does not call ${actionName}.`
|
||||
);
|
||||
} else if (stepsCallingAction.length > 1) {
|
||||
throw new Error(
|
||||
`${preamble} the ${jobName} job calls ${actionName} multiple times.`
|
||||
);
|
||||
}
|
||||
|
||||
const inputs = getStepsCallingAction(workflow.jobs[jobName], actionName)
|
||||
.map((step) => step.with?.[inputName])
|
||||
.filter((input) => input !== undefined)
|
||||
.map((input) => input!);
|
||||
let input = stepsCallingAction[0].with?.[inputName];
|
||||
|
||||
if (inputs.length === 0) {
|
||||
return undefined;
|
||||
if (input !== undefined && matrixVars !== undefined) {
|
||||
// Make a basic attempt to substitute matrix variables
|
||||
// First normalize by removing whitespace
|
||||
input = input.replace(/\${{\s+/, "${{").replace(/\s+}}/, "}}");
|
||||
for (const [key, value] of Object.entries(matrixVars)) {
|
||||
input = input.replace(`\${{matrix.${key}}}`, value);
|
||||
}
|
||||
}
|
||||
if (!inputs.every((input) => input === inputs[0])) {
|
||||
throw new Error(
|
||||
`Could not get ${inputName} input to ${actionName} since there were multiple steps calling ` +
|
||||
`${actionName} with different values for ${inputName}.`
|
||||
);
|
||||
}
|
||||
|
||||
// Make a basic attempt to substitute matrix variables
|
||||
// First normalize by removing whitespace
|
||||
let input = inputs[0].replace(/\${{\s+/, "${{").replace(/\s+}}/, "}}");
|
||||
for (const [key, value] of Object.entries(matrixVars)) {
|
||||
input = input.replace(`\${{matrix.${key}}}`, value);
|
||||
}
|
||||
|
||||
if (input.includes("${{")) {
|
||||
if (input !== undefined && input.includes("${{")) {
|
||||
throw new Error(
|
||||
`Could not get ${inputName} input to ${actionName} since it contained an unrecognized dynamic value.`
|
||||
);
|
||||
|
@ -364,6 +368,19 @@ function getInputOrThrow(
|
|||
return input;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the expected name of the analyze Action.
|
||||
*
|
||||
* This allows us to test workflow parsing functionality as a CodeQL Action PR check.
|
||||
*/
|
||||
function getAnalyzeActionName() {
|
||||
if (getRequiredEnvParam("GITHUB_REPOSITORY") === "github/codeql-action") {
|
||||
return "./analyze";
|
||||
} else {
|
||||
return "github/codeql-action/analyze";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes a best effort attempt to retrieve the category input for the particular job,
|
||||
* given a set of matrix variables.
|
||||
|
@ -376,13 +393,63 @@ function getInputOrThrow(
|
|||
export function getCategoryInputOrThrow(
|
||||
workflow: Workflow,
|
||||
jobName: string,
|
||||
matrixVars: { [key: string]: string }
|
||||
matrixVars: { [key: string]: string } | undefined
|
||||
): string | undefined {
|
||||
return getInputOrThrow(
|
||||
workflow,
|
||||
jobName,
|
||||
"github/codeql-action/analyze",
|
||||
getAnalyzeActionName(),
|
||||
"category",
|
||||
matrixVars
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes a best effort attempt to retrieve the upload input for the particular job,
|
||||
* given a set of matrix variables.
|
||||
*
|
||||
* Typically you'll want to wrap this function in a try/catch block and handle the error.
|
||||
*
|
||||
* @returns the upload input
|
||||
* @throws an error if the upload input could not be determined
|
||||
*/
|
||||
export function getUploadInputOrThrow(
|
||||
workflow: Workflow,
|
||||
jobName: string,
|
||||
matrixVars: { [key: string]: string } | undefined
|
||||
): string {
|
||||
return (
|
||||
getInputOrThrow(
|
||||
workflow,
|
||||
jobName,
|
||||
getAnalyzeActionName(),
|
||||
"upload",
|
||||
matrixVars
|
||||
) || "true" // if unspecified, upload defaults to true
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes a best effort attempt to retrieve the checkout_path input for the
|
||||
* particular job, given a set of matrix variables.
|
||||
*
|
||||
* Typically you'll want to wrap this function in a try/catch block and handle the error.
|
||||
*
|
||||
* @returns the checkout_path input
|
||||
* @throws an error if the checkout_path input could not be determined
|
||||
*/
|
||||
export function getCheckoutPathInputOrThrow(
|
||||
workflow: Workflow,
|
||||
jobName: string,
|
||||
matrixVars: { [key: string]: string } | undefined
|
||||
): string {
|
||||
return (
|
||||
getInputOrThrow(
|
||||
workflow,
|
||||
jobName,
|
||||
getAnalyzeActionName(),
|
||||
"checkout_path",
|
||||
matrixVars
|
||||
) || getRequiredEnvParam("GITHUB_WORKSPACE") // if unspecified, checkout_path defaults to ${{ github.workspace }}
|
||||
);
|
||||
}
|
||||
|
|
Загрузка…
Ссылка в новой задаче