474 строки
21 KiB
JavaScript
474 строки
21 KiB
JavaScript
"use strict";
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
}
|
|
Object.defineProperty(o, k2, desc);
|
|
}) : (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
o[k2] = m[k];
|
|
}));
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
}) : function(o, v) {
|
|
o["default"] = v;
|
|
});
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
__setModuleDefault(result, mod);
|
|
return result;
|
|
};
|
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
exports.pruneInvalidResults = exports.validateUniqueCategory = exports.waitForProcessing = exports.buildPayload = exports.validateSarifFileSchema = exports.uploadFromActions = exports.findSarifFilesInDir = exports.populateRunAutomationDetails = void 0;
|
|
const fs = __importStar(require("fs"));
|
|
const path = __importStar(require("path"));
|
|
const process_1 = require("process");
|
|
const zlib_1 = __importDefault(require("zlib"));
|
|
const core = __importStar(require("@actions/core"));
|
|
const file_url_1 = __importDefault(require("file-url"));
|
|
const jsonschema = __importStar(require("jsonschema"));
|
|
const actionsUtil = __importStar(require("./actions-util"));
|
|
const api = __importStar(require("./api-client"));
|
|
const environment_1 = require("./environment");
|
|
const fingerprints = __importStar(require("./fingerprints"));
|
|
const repository_1 = require("./repository");
|
|
const util = __importStar(require("./util"));
|
|
const util_1 = require("./util");
|
|
// Takes a list of paths to sarif files and combines them together,
|
|
// returning the contents of the combined sarif file.
|
|
function combineSarifFiles(sarifFiles) {
|
|
const combinedSarif = {
|
|
version: null,
|
|
runs: [],
|
|
};
|
|
for (const sarifFile of sarifFiles) {
|
|
const sarifObject = JSON.parse(fs.readFileSync(sarifFile, "utf8"));
|
|
// Check SARIF version
|
|
if (combinedSarif.version === null) {
|
|
combinedSarif.version = sarifObject.version;
|
|
}
|
|
else if (combinedSarif.version !== sarifObject.version) {
|
|
throw new InvalidRequestError(`Different SARIF versions encountered: ${combinedSarif.version} and ${sarifObject.version}`);
|
|
}
|
|
combinedSarif.runs.push(...sarifObject.runs);
|
|
}
|
|
return combinedSarif;
|
|
}
|
|
// Populates the run.automationDetails.id field using the analysis_key and environment
|
|
// and return an updated sarif file contents.
|
|
function populateRunAutomationDetails(sarif, category, analysis_key, environment) {
|
|
const automationID = getAutomationID(category, analysis_key, environment);
|
|
if (automationID !== undefined) {
|
|
for (const run of sarif.runs || []) {
|
|
if (run.automationDetails === undefined) {
|
|
run.automationDetails = {
|
|
id: automationID,
|
|
};
|
|
}
|
|
}
|
|
return sarif;
|
|
}
|
|
return sarif;
|
|
}
|
|
exports.populateRunAutomationDetails = populateRunAutomationDetails;
|
|
function getAutomationID(category, analysis_key, environment) {
|
|
if (category !== undefined) {
|
|
let automationID = category;
|
|
if (!automationID.endsWith("/")) {
|
|
automationID += "/";
|
|
}
|
|
return automationID;
|
|
}
|
|
return api.computeAutomationID(analysis_key, environment);
|
|
}
|
|
// Upload the given payload.
|
|
// If the request fails then this will retry a small number of times.
|
|
async function uploadPayload(payload, repositoryNwo, logger) {
|
|
logger.info("Uploading results");
|
|
// If in test mode we don't want to upload the results
|
|
if (util.isInTestMode()) {
|
|
const payloadSaveFile = path.join(actionsUtil.getTemporaryDirectory(), "payload.json");
|
|
logger.info(`In test mode. Results are not uploaded. Saving to ${payloadSaveFile}`);
|
|
logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`);
|
|
fs.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
|
|
return;
|
|
}
|
|
const client = api.getApiClient();
|
|
const response = await client.request("PUT /repos/:owner/:repo/code-scanning/analysis", {
|
|
owner: repositoryNwo.owner,
|
|
repo: repositoryNwo.repo,
|
|
data: payload,
|
|
});
|
|
logger.debug(`response status: ${response.status}`);
|
|
logger.info("Successfully uploaded results");
|
|
return response.data.id;
|
|
}
|
|
// Recursively walks a directory and returns all SARIF files it finds.
|
|
// Does not follow symlinks.
|
|
function findSarifFilesInDir(sarifPath) {
|
|
const sarifFiles = [];
|
|
const walkSarifFiles = (dir) => {
|
|
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
for (const entry of entries) {
|
|
if (entry.isFile() && entry.name.endsWith(".sarif")) {
|
|
sarifFiles.push(path.resolve(dir, entry.name));
|
|
}
|
|
else if (entry.isDirectory()) {
|
|
walkSarifFiles(path.resolve(dir, entry.name));
|
|
}
|
|
}
|
|
};
|
|
walkSarifFiles(sarifPath);
|
|
return sarifFiles;
|
|
}
|
|
exports.findSarifFilesInDir = findSarifFilesInDir;
|
|
/**
|
|
* Uploads a single SARIF file or a directory of SARIF files depending on what `sarifPath` refers
|
|
* to.
|
|
*
|
|
* @param considerInvalidRequestUserError Whether an invalid request, for example one with a
|
|
* `sarifPath` that does not exist, should be considered a
|
|
* user error.
|
|
*/
|
|
async function uploadFromActions(sarifPath, checkoutPath, category, logger, { considerInvalidRequestUserError, }) {
|
|
try {
|
|
return await uploadFiles(getSarifFilePaths(sarifPath), (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(checkoutPath), await actionsUtil.getRef(), await api.getAnalysisKey(), category, util.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getWorkflowRunAttempt(), checkoutPath, actionsUtil.getRequiredInput("matrix"), logger);
|
|
}
|
|
catch (e) {
|
|
if (e instanceof InvalidRequestError && considerInvalidRequestUserError) {
|
|
throw new util_1.UserError(e.message);
|
|
}
|
|
throw e;
|
|
}
|
|
}
|
|
exports.uploadFromActions = uploadFromActions;
|
|
function getSarifFilePaths(sarifPath) {
|
|
if (!fs.existsSync(sarifPath)) {
|
|
throw new InvalidRequestError(`Path does not exist: ${sarifPath}`);
|
|
}
|
|
let sarifFiles;
|
|
if (fs.lstatSync(sarifPath).isDirectory()) {
|
|
sarifFiles = findSarifFilesInDir(sarifPath);
|
|
if (sarifFiles.length === 0) {
|
|
throw new InvalidRequestError(`No SARIF files found to upload in "${sarifPath}".`);
|
|
}
|
|
}
|
|
else {
|
|
sarifFiles = [sarifPath];
|
|
}
|
|
return sarifFiles;
|
|
}
|
|
// Counts the number of results in the given SARIF file
|
|
function countResultsInSarif(sarif) {
|
|
let numResults = 0;
|
|
let parsedSarif;
|
|
try {
|
|
parsedSarif = JSON.parse(sarif);
|
|
}
|
|
catch (e) {
|
|
throw new InvalidRequestError(`Invalid SARIF. JSON syntax error: ${(0, util_1.wrapError)(e).message}`);
|
|
}
|
|
if (!Array.isArray(parsedSarif.runs)) {
|
|
throw new InvalidRequestError("Invalid SARIF. Missing 'runs' array.");
|
|
}
|
|
for (const run of parsedSarif.runs) {
|
|
if (!Array.isArray(run.results)) {
|
|
throw new InvalidRequestError("Invalid SARIF. Missing 'results' array in run.");
|
|
}
|
|
numResults += run.results.length;
|
|
}
|
|
return numResults;
|
|
}
|
|
// Validates that the given file path refers to a valid SARIF file.
|
|
// Throws an error if the file is invalid.
|
|
function validateSarifFileSchema(sarifFilePath, logger) {
|
|
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, "utf8"));
|
|
const schema = require("../src/sarif-schema-2.1.0.json");
|
|
const result = new jsonschema.Validator().validate(sarif, schema);
|
|
// Filter errors related to invalid URIs in the artifactLocation field as this
|
|
// is a breaking change. See https://github.com/github/codeql-action/issues/1703
|
|
const errors = (result.errors || []).filter((err) => err.argument !== "uri-reference");
|
|
const warnings = (result.errors || []).filter((err) => err.argument === "uri-reference");
|
|
for (const warning of warnings) {
|
|
logger.info(`Warning: '${warning.instance}' is not a valid URI in '${warning.property}'.`);
|
|
}
|
|
if (errors.length) {
|
|
// Output the more verbose error messages in groups as these may be very large.
|
|
for (const error of errors) {
|
|
logger.startGroup(`Error details: ${error.stack}`);
|
|
logger.info(JSON.stringify(error, null, 2));
|
|
logger.endGroup();
|
|
}
|
|
// Set the main error message to the stacks of all the errors.
|
|
// This should be of a manageable size and may even give enough to fix the error.
|
|
const sarifErrors = errors.map((e) => `- ${e.stack}`);
|
|
throw new InvalidRequestError(`Unable to upload "${sarifFilePath}" as it is not valid SARIF:\n${sarifErrors.join("\n")}`);
|
|
}
|
|
}
|
|
exports.validateSarifFileSchema = validateSarifFileSchema;
|
|
// buildPayload constructs a map ready to be uploaded to the API from the given
|
|
// parameters, respecting the current mode and target GitHub instance version.
|
|
function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, workflowRunAttempt, checkoutURI, environment, toolNames, mergeBaseCommitOid) {
|
|
const payloadObj = {
|
|
commit_oid: commitOid,
|
|
ref,
|
|
analysis_key: analysisKey,
|
|
analysis_name: analysisName,
|
|
sarif: zippedSarif,
|
|
workflow_run_id: workflowRunID,
|
|
workflow_run_attempt: workflowRunAttempt,
|
|
checkout_uri: checkoutURI,
|
|
environment,
|
|
started_at: process.env[environment_1.EnvVar.WORKFLOW_STARTED_AT],
|
|
tool_names: toolNames,
|
|
base_ref: undefined,
|
|
base_sha: undefined,
|
|
};
|
|
if (actionsUtil.getWorkflowEventName() === "pull_request") {
|
|
if (commitOid === util.getRequiredEnvParam("GITHUB_SHA") &&
|
|
mergeBaseCommitOid) {
|
|
// We're uploading results for the merge commit
|
|
// and were able to determine the merge base.
|
|
// So we use that as the most accurate base.
|
|
payloadObj.base_ref = `refs/heads/${util.getRequiredEnvParam("GITHUB_BASE_REF")}`;
|
|
payloadObj.base_sha = mergeBaseCommitOid;
|
|
}
|
|
else if (process.env.GITHUB_EVENT_PATH) {
|
|
// Either we're not uploading results for the merge commit
|
|
// or we could not determine the merge base.
|
|
// Using the PR base is the only option here
|
|
const githubEvent = JSON.parse(fs.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8"));
|
|
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
|
|
payloadObj.base_sha = githubEvent.pull_request.base.sha;
|
|
}
|
|
}
|
|
return payloadObj;
|
|
}
|
|
exports.buildPayload = buildPayload;
|
|
// Uploads the given set of sarif files.
|
|
// Returns true iff the upload occurred and succeeded
|
|
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, category, analysisName, workflowRunID, workflowRunAttempt, sourceRoot, environment, logger) {
|
|
logger.startGroup("Uploading results");
|
|
logger.info(`Processing sarif files: ${JSON.stringify(sarifFiles)}`);
|
|
// Validate that the files we were asked to upload are all valid SARIF files
|
|
for (const file of sarifFiles) {
|
|
validateSarifFileSchema(file, logger);
|
|
}
|
|
let sarif = combineSarifFiles(sarifFiles);
|
|
sarif = await fingerprints.addFingerprints(sarif, sourceRoot, logger);
|
|
sarif = populateRunAutomationDetails(sarif, category, analysisKey, environment);
|
|
if (process_1.env["CODEQL_DISABLE_SARIF_PRUNING"] !== "true")
|
|
sarif = pruneInvalidResults(sarif, logger);
|
|
const toolNames = util.getToolNames(sarif);
|
|
validateUniqueCategory(sarif);
|
|
const sarifPayload = JSON.stringify(sarif);
|
|
const zippedSarif = zlib_1.default.gzipSync(sarifPayload).toString("base64");
|
|
const checkoutURI = (0, file_url_1.default)(sourceRoot);
|
|
const payload = buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, workflowRunAttempt, checkoutURI, environment, toolNames, await actionsUtil.determineMergeBaseCommitOid());
|
|
// Log some useful debug info about the info
|
|
const rawUploadSizeBytes = sarifPayload.length;
|
|
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
|
const zippedUploadSizeBytes = zippedSarif.length;
|
|
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
|
|
const numResultInSarif = countResultsInSarif(sarifPayload);
|
|
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
|
// Make the upload
|
|
const sarifID = await uploadPayload(payload, repositoryNwo, logger);
|
|
logger.endGroup();
|
|
return {
|
|
statusReport: {
|
|
raw_upload_size_bytes: rawUploadSizeBytes,
|
|
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
|
num_results_in_sarif: numResultInSarif,
|
|
},
|
|
sarifID,
|
|
};
|
|
}
|
|
const STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1000;
|
|
const STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1000;
|
|
/**
|
|
* Waits until either the analysis is successfully processed, a processing error
|
|
* is reported, or `STATUS_CHECK_TIMEOUT_MILLISECONDS` elapses.
|
|
*
|
|
* If `isUnsuccessfulExecution` is passed, will throw an error if the analysis
|
|
* processing does not produce a single error mentioning the unsuccessful
|
|
* execution.
|
|
*/
|
|
async function waitForProcessing(repositoryNwo, sarifID, logger, options = {
|
|
isUnsuccessfulExecution: false,
|
|
}) {
|
|
logger.startGroup("Waiting for processing to finish");
|
|
try {
|
|
const client = api.getApiClient();
|
|
const statusCheckingStarted = Date.now();
|
|
// eslint-disable-next-line no-constant-condition
|
|
while (true) {
|
|
if (Date.now() >
|
|
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS) {
|
|
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
|
|
// It's possible the analysis will eventually finish processing, but it's not worth spending more
|
|
// Actions time waiting.
|
|
logger.warning("Timed out waiting for analysis to finish processing. Continuing.");
|
|
break;
|
|
}
|
|
let response = undefined;
|
|
try {
|
|
response = await client.request("GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id", {
|
|
owner: repositoryNwo.owner,
|
|
repo: repositoryNwo.repo,
|
|
sarif_id: sarifID,
|
|
});
|
|
}
|
|
catch (e) {
|
|
logger.warning(`An error occurred checking the status of the delivery. ${e} It should still be processed in the background, but errors that occur during processing may not be reported.`);
|
|
break;
|
|
}
|
|
const status = response.data.processing_status;
|
|
logger.info(`Analysis upload status is ${status}.`);
|
|
if (status === "pending") {
|
|
logger.debug("Analysis processing is still pending...");
|
|
}
|
|
else if (options.isUnsuccessfulExecution) {
|
|
// We expect a specific processing error for unsuccessful executions, so
|
|
// handle these separately.
|
|
handleProcessingResultForUnsuccessfulExecution(response, status, logger);
|
|
break;
|
|
}
|
|
else if (status === "complete") {
|
|
break;
|
|
}
|
|
else if (status === "failed") {
|
|
const message = `Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`;
|
|
throw shouldConsiderAsUserError(response.data.errors)
|
|
? new util_1.UserError(message)
|
|
: new InvalidRequestError(message);
|
|
}
|
|
else {
|
|
util.assertNever(status);
|
|
}
|
|
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS, {
|
|
allowProcessExit: false,
|
|
});
|
|
}
|
|
}
|
|
finally {
|
|
logger.endGroup();
|
|
}
|
|
}
|
|
exports.waitForProcessing = waitForProcessing;
|
|
/**
|
|
* Returns whether the provided processing errors should be considered a user error.
|
|
*/
|
|
function shouldConsiderAsUserError(processingErrors) {
|
|
return (processingErrors.length === 1 &&
|
|
processingErrors[0] ===
|
|
"CodeQL analyses from advanced configurations cannot be processed when the default setup is enabled");
|
|
}
|
|
/**
|
|
* Checks the processing result for an unsuccessful execution. Throws if the
|
|
* result is not a failure with a single "unsuccessful execution" error.
|
|
*/
|
|
function handleProcessingResultForUnsuccessfulExecution(response, status, logger) {
|
|
if (status === "failed" &&
|
|
Array.isArray(response.data.errors) &&
|
|
response.data.errors.length === 1 &&
|
|
response.data.errors[0].toString().startsWith("unsuccessful execution")) {
|
|
logger.debug("Successfully uploaded a SARIF file for the unsuccessful execution. Received expected " +
|
|
'"unsuccessful execution" processing error, and no other errors.');
|
|
}
|
|
else if (status === "failed") {
|
|
logger.warning(`Failed to upload a SARIF file for the unsuccessful execution. Code scanning status ` +
|
|
`information for the repository may be out of date as a result. Processing errors: ${response.data.errors}`);
|
|
}
|
|
else if (status === "complete") {
|
|
// There is a known transient issue with the code scanning API where it sometimes reports
|
|
// `complete` for an unsuccessful execution submission.
|
|
logger.debug("Uploaded a SARIF file for the unsuccessful execution, but did not receive the expected " +
|
|
'"unsuccessful execution" processing error. This is a known transient issue with the ' +
|
|
"code scanning API, and does not cause out of date code scanning status information.");
|
|
}
|
|
else {
|
|
util.assertNever(status);
|
|
}
|
|
}
|
|
function validateUniqueCategory(sarif) {
|
|
// duplicate categories are allowed in the same sarif file
|
|
// but not across multiple sarif files
|
|
const categories = {};
|
|
for (const run of sarif.runs) {
|
|
const id = run?.automationDetails?.id;
|
|
const tool = run.tool?.driver?.name;
|
|
const category = `${sanitize(id)}_${sanitize(tool)}`;
|
|
categories[category] = { id, tool };
|
|
}
|
|
for (const [category, { id, tool }] of Object.entries(categories)) {
|
|
const sentinelEnvVar = `CODEQL_UPLOAD_SARIF_${category}`;
|
|
if (process.env[sentinelEnvVar]) {
|
|
throw new InvalidRequestError("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. " +
|
|
"The easiest fix is to specify a unique value for the `category` input. If .runs[].automationDetails.id is specified " +
|
|
"in the sarif file, that will take precedence over your configured `category`. " +
|
|
`Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})`);
|
|
}
|
|
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
|
}
|
|
}
|
|
exports.validateUniqueCategory = validateUniqueCategory;
|
|
/**
|
|
* Sanitizes a string to be used as an environment variable name.
|
|
* This will replace all non-alphanumeric characters with underscores.
|
|
* There could still be some false category clashes if two uploads
|
|
* occur that differ only in their non-alphanumeric characters. This is
|
|
* unlikely.
|
|
*
|
|
* @param str the initial value to sanitize
|
|
*/
|
|
function sanitize(str) {
|
|
return (str ?? "_").replace(/[^a-zA-Z0-9_]/g, "_").toLocaleUpperCase();
|
|
}
|
|
function pruneInvalidResults(sarif, logger) {
|
|
let pruned = 0;
|
|
const newRuns = [];
|
|
for (const run of sarif.runs || []) {
|
|
if (run.tool?.driver?.name === "CodeQL" &&
|
|
run.tool?.driver?.semanticVersion === "2.11.2") {
|
|
// Version 2.11.2 of the CodeQL CLI had many false positives in the
|
|
// rb/weak-cryptographic-algorithm query which we prune here. The
|
|
// issue is tracked in https://github.com/github/codeql/issues/11107.
|
|
const newResults = [];
|
|
for (const result of run.results || []) {
|
|
if (result.ruleId === "rb/weak-cryptographic-algorithm" &&
|
|
(result.message?.text?.includes(" MD5 ") ||
|
|
result.message?.text?.includes(" SHA1 "))) {
|
|
pruned += 1;
|
|
continue;
|
|
}
|
|
newResults.push(result);
|
|
}
|
|
newRuns.push({ ...run, results: newResults });
|
|
}
|
|
else {
|
|
newRuns.push(run);
|
|
}
|
|
}
|
|
if (pruned > 0) {
|
|
logger.info(`Pruned ${pruned} results believed to be invalid from SARIF file.`);
|
|
}
|
|
return { ...sarif, runs: newRuns };
|
|
}
|
|
exports.pruneInvalidResults = pruneInvalidResults;
|
|
/**
|
|
* An error that occurred due to an invalid SARIF upload request.
|
|
*/
|
|
class InvalidRequestError extends Error {
|
|
constructor(message) {
|
|
super(message);
|
|
}
|
|
}
|
|
//# sourceMappingURL=upload-lib.js.map
|