Merge pull request #1242 from github/aeisenberg/analysis-results-on-restart
This commit is contained in:
Коммит
0efd02979e
|
@ -1,3 +1,4 @@
|
|||
import * as fs from 'fs-extra';
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
import { CancellationToken, ExtensionContext } from 'vscode';
|
||||
|
@ -12,7 +13,8 @@ import { sarifParser } from '../sarif-parser';
|
|||
import { extractAnalysisAlerts } from './sarif-processing';
|
||||
import { CodeQLCliServer } from '../cli';
|
||||
import { extractRawResults } from './bqrs-processing';
|
||||
import { getErrorMessage } from '../pure/helpers-pure';
|
||||
import { asyncFilter, getErrorMessage } from '../pure/helpers-pure';
|
||||
import { createDownloadPath } from './download-link';
|
||||
|
||||
export class AnalysesResultsManager {
|
||||
// Store for the results of various analyses for each remote query.
|
||||
|
@ -44,13 +46,22 @@ export class AnalysesResultsManager {
|
|||
await this.downloadSingleAnalysisResults(analysisSummary, credentials, publishResults);
|
||||
}
|
||||
|
||||
public async downloadAnalysesResults(
|
||||
allAnalysesToDownload: AnalysisSummary[],
|
||||
token: CancellationToken | undefined,
|
||||
publishResults: (analysesResults: AnalysisResults[]) => Promise<void>
|
||||
/**
|
||||
* Loads the array analysis results. For each analysis results, if it is not downloaded yet,
|
||||
* it will be downloaded. If it is already downloaded, it will be loaded into memory.
|
||||
* If it is already in memory, this will be a no-op.
|
||||
*
|
||||
* @param allAnalysesToLoad List of analyses to ensure are downloaded and in memory
|
||||
* @param token Optional cancellation token
|
||||
* @param publishResults Optional function to publish the results after loading
|
||||
*/
|
||||
public async loadAnalysesResults(
|
||||
allAnalysesToLoad: AnalysisSummary[],
|
||||
token?: CancellationToken,
|
||||
publishResults: (analysesResults: AnalysisResults[]) => Promise<void> = () => Promise.resolve()
|
||||
): Promise<void> {
|
||||
// Filter out analyses that we have already in memory.
|
||||
const analysesToDownload = allAnalysesToDownload.filter(x => !this.isAnalysisInMemory(x));
|
||||
const analysesToDownload = allAnalysesToLoad.filter(x => !this.isAnalysisInMemory(x));
|
||||
|
||||
const credentials = await Credentials.initialize(this.ctx);
|
||||
|
||||
|
@ -151,6 +162,21 @@ export class AnalysesResultsManager {
|
|||
void publishResults([...resultsForQuery]);
|
||||
}
|
||||
|
||||
|
||||
public async loadDownloadedAnalyses(
|
||||
allAnalysesToCheck: AnalysisSummary[]
|
||||
) {
|
||||
|
||||
// Find all analyses that are already downloaded.
|
||||
const allDownloadedAnalyses = await asyncFilter(allAnalysesToCheck, x => this.isAnalysisDownloaded(x));
|
||||
// Now, ensure that all of these analyses are in memory. Some may already be in memory. These are ignored.
|
||||
await this.loadAnalysesResults(allDownloadedAnalyses);
|
||||
}
|
||||
|
||||
private async isAnalysisDownloaded(analysis: AnalysisSummary): Promise<boolean> {
|
||||
return await fs.pathExists(createDownloadPath(this.storagePath, analysis.downloadLink));
|
||||
}
|
||||
|
||||
private async readBqrsResults(filePath: string, fileLinkPrefix: string): Promise<AnalysisRawResults> {
|
||||
return await extractRawResults(this.cliServer, this.logger, filePath, fileLinkPrefix);
|
||||
}
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import * as path from 'path';
|
||||
|
||||
/**
|
||||
* Represents a link to an artifact to be downloaded.
|
||||
*/
|
||||
|
@ -23,3 +25,16 @@ export interface DownloadLink {
|
|||
*/
|
||||
queryId: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a downloadLink to the path where the artifact should be stored.
|
||||
*
|
||||
* @param storagePath The base directory to store artifacts in.
|
||||
* @param downloadLink The DownloadLink
|
||||
* @param extension An optional file extension to append to the artifact (no `.`).
|
||||
*
|
||||
* @returns A full path to the download location of the artifact
|
||||
*/
|
||||
export function createDownloadPath(storagePath: string, downloadLink: DownloadLink, extension = '') {
|
||||
return path.join(storagePath, downloadLink.queryId, downloadLink.id + (extension ? `.${extension}` : ''));
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ import { showAndLogWarningMessage, tmpDir } from '../helpers';
|
|||
import { Credentials } from '../authentication';
|
||||
import { logger } from '../logging';
|
||||
import { RemoteQueryWorkflowResult } from './remote-query-workflow-result';
|
||||
import { DownloadLink } from './download-link';
|
||||
import { DownloadLink, createDownloadPath } from './download-link';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { RemoteQueryFailureIndexItem, RemoteQueryResultIndex, RemoteQuerySuccessIndexItem } from './remote-query-result-index';
|
||||
|
||||
|
@ -82,14 +82,14 @@ export async function downloadArtifactFromLink(
|
|||
|
||||
const octokit = await credentials.getOctokit();
|
||||
|
||||
const extractedPath = path.join(storagePath, downloadLink.queryId, downloadLink.id);
|
||||
const extractedPath = createDownloadPath(storagePath, downloadLink);
|
||||
|
||||
// first check if we already have the artifact
|
||||
if (!(await fs.pathExists(extractedPath))) {
|
||||
// Download the zipped artifact.
|
||||
const response = await octokit.request(`GET ${downloadLink.urlPath}/zip`, {});
|
||||
|
||||
const zipFilePath = path.join(storagePath, downloadLink.queryId, `${downloadLink.id}.zip`);
|
||||
const zipFilePath = createDownloadPath(storagePath, downloadLink, 'zip');
|
||||
await saveFile(`${zipFilePath}`, response.data as ArrayBuffer);
|
||||
|
||||
// Extract the zipped artifact.
|
||||
|
|
|
@ -46,11 +46,15 @@ export class RemoteQueriesInterfaceManager {
|
|||
this.getPanel().reveal(undefined, true);
|
||||
|
||||
await this.waitForPanelLoaded();
|
||||
const model = this.buildViewModel(query, queryResult);
|
||||
await this.postMessage({
|
||||
t: 'setRemoteQueryResult',
|
||||
queryResult: this.buildViewModel(query, queryResult)
|
||||
queryResult: model
|
||||
});
|
||||
|
||||
// Ensure all pre-downloaded artifacts are loaded into memory
|
||||
await this.analysesResultsManager.loadDownloadedAnalyses(model.analysisSummaries);
|
||||
|
||||
await this.setAnalysisResults(this.analysesResultsManager.getAnalysesResults(queryResult.queryId));
|
||||
}
|
||||
|
||||
|
@ -213,7 +217,7 @@ export class RemoteQueriesInterfaceManager {
|
|||
}
|
||||
|
||||
private async downloadAllAnalysesResults(msg: RemoteQueryDownloadAllAnalysesResultsMessage): Promise<void> {
|
||||
await this.analysesResultsManager.downloadAnalysesResults(
|
||||
await this.analysesResultsManager.loadAnalysesResults(
|
||||
msg.analysisSummaries,
|
||||
undefined,
|
||||
results => this.setAnalysisResults(results));
|
||||
|
|
|
@ -187,7 +187,7 @@ export class RemoteQueriesManager extends DisposableObject {
|
|||
fileSize: String(a.fileSizeInBytes)
|
||||
}));
|
||||
|
||||
await this.analysesResultsManager.downloadAnalysesResults(
|
||||
await this.analysesResultsManager.loadAnalysesResults(
|
||||
analysesToDownload,
|
||||
token,
|
||||
results => this.interfaceManager.setAnalysisResults(results));
|
||||
|
|
|
@ -22,6 +22,17 @@
|
|||
"innerFilePath": "results.sarif",
|
||||
"queryId": "MRVA Integration test 1-6sBi6oaky_fxqXW2NA4bx"
|
||||
}
|
||||
},
|
||||
{
|
||||
"nwo": "hucairz/i-dont-exist",
|
||||
"resultCount": 5,
|
||||
"fileSizeInBytes": 81237,
|
||||
"downloadLink": {
|
||||
"id": "999999",
|
||||
"urlPath": "/these/results/will/never/be/downloaded/999999",
|
||||
"innerFilePath": "results.sarif",
|
||||
"queryId": "MRVA Integration test 2-UL-vbKAjP8ffObxjsp7hN"
|
||||
}
|
||||
}
|
||||
],
|
||||
"analysisFailures": [],
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
import { expect } from 'chai';
|
||||
import 'mocha';
|
||||
import * as path from 'path';
|
||||
|
||||
import { DownloadLink, createDownloadPath } from '../../remote-queries/download-link';
|
||||
|
||||
describe('createDownloadPath', () => {
|
||||
it('should return the correct path', () => {
|
||||
const downloadLink: DownloadLink = {
|
||||
id: 'abc',
|
||||
urlPath: '',
|
||||
innerFilePath: '',
|
||||
queryId: 'def'
|
||||
};
|
||||
const expectedPath = path.join('storage', 'def', 'abc');
|
||||
|
||||
const actualPath = createDownloadPath('storage', downloadLink);
|
||||
|
||||
expect(actualPath).to.equal(expectedPath);
|
||||
});
|
||||
|
||||
it('should return the correct path with extension', () => {
|
||||
const downloadLink: DownloadLink = {
|
||||
id: 'abc',
|
||||
urlPath: '',
|
||||
innerFilePath: '',
|
||||
queryId: 'def'
|
||||
};
|
||||
|
||||
const expectedPath = path.join('storage', 'def', 'abc.zip');
|
||||
|
||||
const actualPath = createDownloadPath('storage', downloadLink, 'zip');
|
||||
|
||||
expect(actualPath).to.equal(expectedPath);
|
||||
});
|
||||
});
|
|
@ -245,8 +245,8 @@ describe('Remote queries and query history manager', function() {
|
|||
|
||||
it('should download two artifacts at once', async () => {
|
||||
const publisher = sandbox.spy();
|
||||
const analysisSummaries = [...remoteQueryResult0.analysisSummaries];
|
||||
await arm.downloadAnalysesResults(analysisSummaries, undefined, publisher);
|
||||
const analysisSummaries = [remoteQueryResult0.analysisSummaries[0], remoteQueryResult0.analysisSummaries[1]];
|
||||
await arm.loadAnalysesResults(analysisSummaries, undefined, publisher);
|
||||
|
||||
const trimmed = publisher.getCalls().map(call => call.args[0]).map(args => {
|
||||
args.forEach((analysisResult: any) => delete analysisResult.interpretedResults);
|
||||
|
@ -287,7 +287,7 @@ describe('Remote queries and query history manager', function() {
|
|||
const analysisSummaries = [...remoteQueryResult0.analysisSummaries];
|
||||
|
||||
try {
|
||||
await arm.downloadAnalysesResults(analysisSummaries, {
|
||||
await arm.loadAnalysesResults(analysisSummaries, {
|
||||
isCancellationRequested: true
|
||||
} as CancellationToken, publisher);
|
||||
expect.fail('Should have thrown');
|
||||
|
@ -300,11 +300,11 @@ describe('Remote queries and query history manager', function() {
|
|||
|
||||
it('should get the analysis results', async () => {
|
||||
const publisher = sandbox.spy();
|
||||
const analysisSummaries0 = [...remoteQueryResult0.analysisSummaries];
|
||||
const analysisSummaries0 = [remoteQueryResult0.analysisSummaries[0], remoteQueryResult0.analysisSummaries[1]];
|
||||
const analysisSummaries1 = [...remoteQueryResult1.analysisSummaries];
|
||||
|
||||
await arm.downloadAnalysesResults(analysisSummaries0, undefined, publisher);
|
||||
await arm.downloadAnalysesResults(analysisSummaries1, undefined, publisher);
|
||||
await arm.loadAnalysesResults(analysisSummaries0, undefined, publisher);
|
||||
await arm.loadAnalysesResults(analysisSummaries1, undefined, publisher);
|
||||
|
||||
const result0 = arm.getAnalysesResults(rawQueryHistory[0].queryId);
|
||||
const result0Again = arm.getAnalysesResults(rawQueryHistory[0].queryId);
|
||||
|
@ -323,7 +323,7 @@ describe('Remote queries and query history manager', function() {
|
|||
it.skip('should read sarif', async () => {
|
||||
const publisher = sandbox.spy();
|
||||
const analysisSummaries0 = [remoteQueryResult0.analysisSummaries[0]];
|
||||
await arm.downloadAnalysesResults(analysisSummaries0, undefined, publisher);
|
||||
await arm.loadAnalysesResults(analysisSummaries0, undefined, publisher);
|
||||
|
||||
const sarif = fs.readJSONSync(path.join(STORAGE_DIR, 'queries', rawQueryHistory[0].queryId, '171543249', 'results.sarif'));
|
||||
const queryResults = sarif.runs
|
||||
|
@ -332,6 +332,29 @@ describe('Remote queries and query history manager', function() {
|
|||
|
||||
expect(publisher.getCall(1).args[0][0].results).to.deep.eq(queryResults);
|
||||
});
|
||||
|
||||
it('should check if an artifact is downloaded and not in memory', async () => {
|
||||
// Load remoteQueryResult0.analysisSummaries[1] into memory
|
||||
await arm.downloadAnalysisResults(remoteQueryResult0.analysisSummaries[1], () => Promise.resolve());
|
||||
|
||||
// on disk
|
||||
expect(await (arm as any).isAnalysisDownloaded(remoteQueryResult0.analysisSummaries[0])).to.be.true;
|
||||
|
||||
// in memory
|
||||
expect(await (arm as any).isAnalysisDownloaded(remoteQueryResult0.analysisSummaries[1])).to.be.true;
|
||||
|
||||
// not downloaded
|
||||
expect(await (arm as any).isAnalysisDownloaded(remoteQueryResult0.analysisSummaries[2])).to.be.false;
|
||||
});
|
||||
|
||||
it('should load downloaded artifacts', async () => {
|
||||
await arm.loadDownloadedAnalyses(remoteQueryResult0.analysisSummaries);
|
||||
const queryId = rawQueryHistory[0].queryId;
|
||||
const analysesResultsNwos = arm.getAnalysesResults(queryId).map(ar => ar.nwo).sort();
|
||||
expect(analysesResultsNwos[0]).to.eq('github/vscode-codeql');
|
||||
expect(analysesResultsNwos[1]).to.eq('other/hucairz');
|
||||
expect(analysesResultsNwos.length).to.eq(2);
|
||||
});
|
||||
});
|
||||
|
||||
async function copyHistoryState() {
|
||||
|
|
Загрузка…
Ссылка в новой задаче