Address comments from PR
- Rename queryStorageLocation -> queryStorageDir - Extract scrubber to its own module - Add more comments - Rename source -> cancellationSource - Ensure cancellatinSource is disposed
This commit is contained in:
Родитель
7785dfead2
Коммит
64ac33e3bb
|
@ -28,6 +28,7 @@ export interface FullLocationLink extends LocationLink {
|
|||
* @param dbm The database manager
|
||||
* @param uriString The selected source file and location
|
||||
* @param keyType The contextual query type to run
|
||||
* @param queryStorageDir The directory to store the query results
|
||||
* @param progress A progress callback
|
||||
* @param token A CancellationToken
|
||||
* @param filter A function that will filter extraneous results
|
||||
|
@ -38,7 +39,7 @@ export async function getLocationsForUriString(
|
|||
dbm: DatabaseManager,
|
||||
uriString: string,
|
||||
keyType: KeyType,
|
||||
queryStorageLocation: string,
|
||||
queryStorageDir: string,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
filter: (src: string, dest: string) => boolean
|
||||
|
@ -70,7 +71,7 @@ export async function getLocationsForUriString(
|
|||
qs,
|
||||
db,
|
||||
initialInfo,
|
||||
queryStorageLocation,
|
||||
queryStorageDir,
|
||||
progress,
|
||||
token,
|
||||
templates
|
||||
|
|
|
@ -42,7 +42,7 @@ export class TemplateQueryDefinitionProvider implements DefinitionProvider {
|
|||
private cli: CodeQLCliServer,
|
||||
private qs: QueryServerClient,
|
||||
private dbm: DatabaseManager,
|
||||
private queryStorageLocation: string,
|
||||
private queryStorageDir: string,
|
||||
) {
|
||||
this.cache = new CachedOperation<LocationLink[]>(this.getDefinitions.bind(this));
|
||||
}
|
||||
|
@ -70,7 +70,7 @@ export class TemplateQueryDefinitionProvider implements DefinitionProvider {
|
|||
this.dbm,
|
||||
uriString,
|
||||
KeyType.DefinitionQuery,
|
||||
this.queryStorageLocation,
|
||||
this.queryStorageDir,
|
||||
progress,
|
||||
token,
|
||||
(src, _dest) => src === uriString
|
||||
|
@ -86,7 +86,7 @@ export class TemplateQueryReferenceProvider implements ReferenceProvider {
|
|||
private cli: CodeQLCliServer,
|
||||
private qs: QueryServerClient,
|
||||
private dbm: DatabaseManager,
|
||||
private queryStorageLocation: string,
|
||||
private queryStorageDir: string,
|
||||
) {
|
||||
this.cache = new CachedOperation<FullLocationLink[]>(this.getReferences.bind(this));
|
||||
}
|
||||
|
@ -119,7 +119,7 @@ export class TemplateQueryReferenceProvider implements ReferenceProvider {
|
|||
this.dbm,
|
||||
uriString,
|
||||
KeyType.DefinitionQuery,
|
||||
this.queryStorageLocation,
|
||||
this.queryStorageDir,
|
||||
progress,
|
||||
token,
|
||||
(src, _dest) => src === uriString
|
||||
|
@ -140,7 +140,7 @@ export class TemplatePrintAstProvider {
|
|||
private cli: CodeQLCliServer,
|
||||
private qs: QueryServerClient,
|
||||
private dbm: DatabaseManager,
|
||||
private queryStorageLocation: string,
|
||||
private queryStorageDir: string,
|
||||
) {
|
||||
this.cache = new CachedOperation<QueryWithDb>(this.getAst.bind(this));
|
||||
}
|
||||
|
@ -221,7 +221,7 @@ export class TemplatePrintAstProvider {
|
|||
this.qs,
|
||||
db,
|
||||
initialInfo,
|
||||
this.queryStorageLocation,
|
||||
this.queryStorageDir,
|
||||
progress,
|
||||
token,
|
||||
templates
|
||||
|
|
|
@ -436,13 +436,13 @@ async function activateWithInstalledDistribution(
|
|||
ctx.subscriptions.push(queryHistoryConfigurationListener);
|
||||
const showResults = async (item: FullCompletedQueryInfo) =>
|
||||
showResultsForCompletedQuery(item, WebviewReveal.Forced);
|
||||
const queryStorageLocation = path.join(ctx.globalStorageUri.fsPath, 'queries');
|
||||
await fs.ensureDir(queryStorageLocation);
|
||||
const queryStorageDir = path.join(ctx.globalStorageUri.fsPath, 'queries');
|
||||
await fs.ensureDir(queryStorageDir);
|
||||
|
||||
const qhm = new QueryHistoryManager(
|
||||
qs,
|
||||
dbm,
|
||||
queryStorageLocation,
|
||||
queryStorageDir,
|
||||
ctx,
|
||||
queryHistoryConfigurationListener,
|
||||
showResults,
|
||||
|
@ -519,7 +519,7 @@ async function activateWithInstalledDistribution(
|
|||
qs,
|
||||
databaseItem,
|
||||
initialInfo,
|
||||
queryStorageLocation,
|
||||
queryStorageDir,
|
||||
progress,
|
||||
source.token,
|
||||
);
|
||||
|
@ -996,16 +996,16 @@ async function activateWithInstalledDistribution(
|
|||
void logger.log('Registering jump-to-definition handlers.');
|
||||
languages.registerDefinitionProvider(
|
||||
{ scheme: archiveFilesystemProvider.zipArchiveScheme },
|
||||
new TemplateQueryDefinitionProvider(cliServer, qs, dbm, queryStorageLocation)
|
||||
new TemplateQueryDefinitionProvider(cliServer, qs, dbm, queryStorageDir)
|
||||
);
|
||||
|
||||
languages.registerReferenceProvider(
|
||||
{ scheme: archiveFilesystemProvider.zipArchiveScheme },
|
||||
new TemplateQueryReferenceProvider(cliServer, qs, dbm, queryStorageLocation)
|
||||
new TemplateQueryReferenceProvider(cliServer, qs, dbm, queryStorageDir)
|
||||
);
|
||||
|
||||
const astViewer = new AstViewer();
|
||||
const templateProvider = new TemplatePrintAstProvider(cliServer, qs, dbm, queryStorageLocation);
|
||||
const templateProvider = new TemplatePrintAstProvider(cliServer, qs, dbm, queryStorageDir);
|
||||
|
||||
ctx.subscriptions.push(astViewer);
|
||||
ctx.subscriptions.push(commandRunnerWithProgress('codeQL.viewAst', async (
|
||||
|
|
|
@ -0,0 +1,135 @@
|
|||
import * as fs from 'fs-extra';
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
import { Disposable, ExtensionContext } from 'vscode';
|
||||
import { logger } from './logging';
|
||||
|
||||
const LAST_SCRUB_TIME_KEY = 'lastScrubTime';
|
||||
|
||||
type Counter = {
|
||||
increment: () => void;
|
||||
};
|
||||
|
||||
/**
|
||||
* Registers an interval timer that will periodically check for queries old enought
|
||||
* to be deleted.
|
||||
*
|
||||
* Note that this scrubber will clean all queries from all workspaces. It should not
|
||||
* run too often and it should only run from one workspace at a time.
|
||||
*
|
||||
* Generally, `wakeInterval` should be significantly shorter than `throttleTime`.
|
||||
*
|
||||
* @param wakeInterval How often to check to see if the job should run.
|
||||
* @param throttleTime How often to actually run the job.
|
||||
* @param maxQueryTime The maximum age of a query before is ready for deletion.
|
||||
* @param queryDirectory The directory containing all queries.
|
||||
* @param ctx The extension context.
|
||||
*/
|
||||
export function registerQueryHistoryScubber(
|
||||
wakeInterval: number,
|
||||
throttleTime: number,
|
||||
maxQueryTime: number,
|
||||
queryDirectory: string,
|
||||
ctx: ExtensionContext,
|
||||
|
||||
// optional counter to keep track of how many times the scrubber has run
|
||||
counter?: Counter
|
||||
): Disposable {
|
||||
const deregister = setInterval(scrubber, wakeInterval, throttleTime, maxQueryTime, queryDirectory, ctx, counter);
|
||||
|
||||
return {
|
||||
dispose: () => {
|
||||
clearInterval(deregister);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async function scrubber(
|
||||
throttleTime: number,
|
||||
maxQueryTime: number,
|
||||
queryDirectory: string,
|
||||
ctx: ExtensionContext,
|
||||
counter?: Counter
|
||||
) {
|
||||
const lastScrubTime = ctx.globalState.get<number>(LAST_SCRUB_TIME_KEY);
|
||||
const now = Date.now();
|
||||
|
||||
// If we have never scrubbed before, or if the last scrub was more than `throttleTime` ago,
|
||||
// then scrub again.
|
||||
if (lastScrubTime === undefined || now - lastScrubTime >= throttleTime) {
|
||||
await ctx.globalState.update(LAST_SCRUB_TIME_KEY, now);
|
||||
|
||||
let scrubCount = 0; // total number of directories deleted
|
||||
try {
|
||||
counter?.increment();
|
||||
void logger.log('Scrubbing query directory. Removing old queries.');
|
||||
if (!(await fs.pathExists(queryDirectory))) {
|
||||
void logger.log(`Cannot scrub. Query directory does not exist: ${queryDirectory}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const baseNames = await fs.readdir(queryDirectory);
|
||||
const errors: string[] = [];
|
||||
for (const baseName of baseNames) {
|
||||
const dir = path.join(queryDirectory, baseName);
|
||||
const scrubResult = await scrubDirectory(dir, now, maxQueryTime);
|
||||
if (scrubResult.errorMsg) {
|
||||
errors.push(scrubResult.errorMsg);
|
||||
}
|
||||
if (scrubResult.deleted) {
|
||||
scrubCount++;
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length) {
|
||||
throw new Error(os.EOL + errors.join(os.EOL));
|
||||
}
|
||||
} catch (e) {
|
||||
void logger.log(`Error while scrubbing queries: ${e}`);
|
||||
} finally {
|
||||
void logger.log(`Scrubbed ${scrubCount} old queries.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function scrubDirectory(dir: string, now: number, maxQueryTime: number): Promise<{
|
||||
errorMsg?: string,
|
||||
deleted: boolean
|
||||
}> {
|
||||
const timestampFile = path.join(dir, 'timestamp');
|
||||
try {
|
||||
let deleted = true;
|
||||
if (!(await fs.stat(dir)).isDirectory()) {
|
||||
void logger.log(` ${dir} is not a directory. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
} else if (!(await fs.pathExists(timestampFile))) {
|
||||
void logger.log(` ${dir} has no timestamp file. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
} else if (!(await fs.stat(timestampFile)).isFile()) {
|
||||
void logger.log(` ${timestampFile} is not a file. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
} else {
|
||||
const timestampText = await fs.readFile(timestampFile, 'utf8');
|
||||
const timestamp = parseInt(timestampText, 10);
|
||||
|
||||
if (Number.isNaN(timestamp)) {
|
||||
void logger.log(` ${dir} has invalid timestamp '${timestampText}'. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
} else if (now - timestamp > maxQueryTime) {
|
||||
void logger.log(` ${dir} is older than ${maxQueryTime / 1000} seconds. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
} else {
|
||||
void logger.log(` ${dir} is not older than ${maxQueryTime / 1000} seconds. Keeping.`);
|
||||
deleted = false;
|
||||
}
|
||||
}
|
||||
return {
|
||||
deleted
|
||||
};
|
||||
} catch (err) {
|
||||
return {
|
||||
errorMsg: ` Could not delete '${dir}': ${err}`,
|
||||
deleted: false
|
||||
};
|
||||
}
|
||||
}
|
|
@ -1,5 +1,4 @@
|
|||
import * as path from 'path';
|
||||
import * as fs from 'fs-extra';
|
||||
import {
|
||||
commands,
|
||||
Disposable,
|
||||
|
@ -32,6 +31,7 @@ import { commandRunner } from './commandRunner';
|
|||
import { assertNever } from './pure/helpers-pure';
|
||||
import { FullCompletedQueryInfo, FullQueryInfo, QueryStatus } from './query-results';
|
||||
import { DatabaseManager } from './databases';
|
||||
import { registerQueryHistoryScubber } from './query-history-scrubber';
|
||||
|
||||
/**
|
||||
* query-history.ts
|
||||
|
@ -258,13 +258,13 @@ export class QueryHistoryManager extends DisposableObject {
|
|||
treeView: TreeView<FullQueryInfo>;
|
||||
lastItemClick: { time: Date; item: FullQueryInfo } | undefined;
|
||||
compareWithItem: FullQueryInfo | undefined;
|
||||
queryHistoryScrubber: Disposable;
|
||||
queryHistoryScrubber: Disposable | undefined;
|
||||
private queryMetadataStorageLocation;
|
||||
|
||||
constructor(
|
||||
private qs: QueryServerClient,
|
||||
private dbm: DatabaseManager,
|
||||
private queryStorageLocation: string,
|
||||
private queryStorageDir: string,
|
||||
ctx: ExtensionContext,
|
||||
private queryHistoryConfigListener: QueryHistoryConfig,
|
||||
private selectedCallback: (item: FullCompletedQueryInfo) => Promise<void>,
|
||||
|
@ -397,19 +397,15 @@ export class QueryHistoryManager extends DisposableObject {
|
|||
}
|
||||
)
|
||||
);
|
||||
|
||||
// There are two configuration items that affect the query history:
|
||||
// 1. The ttl for query history items.
|
||||
// 2. The default label for query history items.
|
||||
// When either of these change, must refresh the tree view.
|
||||
this.push(
|
||||
queryHistoryConfigListener.onDidChangeConfiguration(() => {
|
||||
this.treeDataProvider.refresh();
|
||||
// recreate the history scrubber
|
||||
this.queryHistoryScrubber.dispose();
|
||||
this.queryHistoryScrubber = this.push(
|
||||
registerQueryHistoryScubber(
|
||||
ONE_HOUR_IN_MS, TWO_HOURS_IN_MS,
|
||||
queryHistoryConfigListener.ttlInMillis,
|
||||
this.queryStorageLocation,
|
||||
ctx
|
||||
)
|
||||
);
|
||||
this.registerQueryHistoryScrubber(queryHistoryConfigListener, ctx);
|
||||
})
|
||||
);
|
||||
|
||||
|
@ -428,19 +424,28 @@ export class QueryHistoryManager extends DisposableObject {
|
|||
},
|
||||
}));
|
||||
|
||||
// Register the query history scrubber
|
||||
this.registerQueryHistoryScrubber(queryHistoryConfigListener, ctx);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register and create the history scrubber.
|
||||
*/
|
||||
private registerQueryHistoryScrubber(queryHistoryConfigListener: QueryHistoryConfig, ctx: ExtensionContext) {
|
||||
this.queryHistoryScrubber?.dispose();
|
||||
// Every hour check if we need to re-run the query history scrubber.
|
||||
this.queryHistoryScrubber = this.push(
|
||||
registerQueryHistoryScubber(
|
||||
ONE_HOUR_IN_MS, TWO_HOURS_IN_MS,
|
||||
ONE_HOUR_IN_MS,
|
||||
TWO_HOURS_IN_MS,
|
||||
queryHistoryConfigListener.ttlInMillis,
|
||||
path.join(ctx.globalStorageUri.fsPath, 'queries'),
|
||||
this.queryStorageDir,
|
||||
ctx
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
async readQueryHistory(): Promise<void> {
|
||||
void logger.log(`Reading cached query history from '${this.queryMetadataStorageLocation}'.`);
|
||||
const history = await FullQueryInfo.slurp(this.queryMetadataStorageLocation, this.queryHistoryConfigListener);
|
||||
this.treeDataProvider.allHistory = history;
|
||||
}
|
||||
|
@ -501,7 +506,10 @@ export class QueryHistoryManager extends DisposableObject {
|
|||
if (item.status !== QueryStatus.InProgress) {
|
||||
this.treeDataProvider.remove(item);
|
||||
item.completedQuery?.dispose();
|
||||
await item.completedQuery?.query.cleanUp();
|
||||
|
||||
// User has explicitly asked for this query to be removed.
|
||||
// We need to delete it from disk as well.
|
||||
await item.completedQuery?.query.deleteQuery();
|
||||
}
|
||||
}));
|
||||
await this.writeQueryHistory();
|
||||
|
@ -951,106 +959,3 @@ the file in the file explorer and dragging it into the workspace.`
|
|||
this.treeDataProvider.refresh();
|
||||
}
|
||||
}
|
||||
|
||||
const LAST_SCRUB_TIME_KEY = 'lastScrubTime';
|
||||
|
||||
/**
|
||||
* Registers an interval timer that will periodically check for queries old enought
|
||||
* to be deleted.
|
||||
*
|
||||
* Note that this scrubber will clean all queries from all workspaces. It should not
|
||||
* run too often and it should only run from one workspace at a time.
|
||||
*
|
||||
* Generally, `wakeInterval` should be significantly shorter than `throttleTime`.
|
||||
*
|
||||
* @param wakeInterval How often to check to see if the job should run.
|
||||
* @param throttleTime How often to actually run the job.
|
||||
* @param maxQueryTime The maximum age of a query before is ready for deletion.
|
||||
* @param queryDirectory The directory containing all queries.
|
||||
* @param ctx The extension context.
|
||||
*/
|
||||
export function registerQueryHistoryScubber(
|
||||
wakeInterval: number,
|
||||
throttleTime: number,
|
||||
maxQueryTime: number,
|
||||
queryDirectory: string,
|
||||
ctx: ExtensionContext,
|
||||
|
||||
// optional counter to keep track of how many times the scrubber has run
|
||||
counter?: {
|
||||
increment: () => void;
|
||||
}
|
||||
): Disposable {
|
||||
const deregister = setInterval(async () => {
|
||||
const lastScrubTime = ctx.globalState.get<number>(LAST_SCRUB_TIME_KEY);
|
||||
const now = Date.now();
|
||||
if (lastScrubTime === undefined || now - lastScrubTime >= throttleTime) {
|
||||
let scrubCount = 0;
|
||||
try {
|
||||
counter?.increment();
|
||||
void logger.log('Scrubbing query directory. Removing old queries.');
|
||||
// do a scrub
|
||||
if (!(await fs.pathExists(queryDirectory))) {
|
||||
void logger.log(`Query directory does not exist: ${queryDirectory}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const baseNames = await fs.readdir(queryDirectory);
|
||||
const errors: string[] = [];
|
||||
for (const baseName of baseNames) {
|
||||
const dir = path.join(queryDirectory, baseName);
|
||||
const timestampFile = path.join(dir, 'timestamp');
|
||||
try {
|
||||
if (!(await fs.stat(dir)).isDirectory()) {
|
||||
void logger.log(` ${dir} is not a directory. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
scrubCount++;
|
||||
} else if (!(await fs.pathExists(timestampFile))) {
|
||||
void logger.log(` ${dir} has no timestamp file. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
scrubCount++;
|
||||
} else if (!(await fs.stat(timestampFile)).isFile()) {
|
||||
void logger.log(` ${timestampFile} is not a file. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
scrubCount++;
|
||||
} else {
|
||||
const timestampText = await fs.readFile(timestampFile, 'utf8');
|
||||
const timestamp = parseInt(timestampText, 10);
|
||||
|
||||
if (Number.isNaN(timestamp)) {
|
||||
void logger.log(` ${dir} has invalid timestamp '${timestampText}'. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
scrubCount++;
|
||||
} else if (now - timestamp > maxQueryTime) {
|
||||
void logger.log(` ${dir} is older than ${maxQueryTime / 1000} seconds. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
scrubCount++;
|
||||
} else {
|
||||
void logger.log(` ${dir} is not older than ${maxQueryTime / 1000} seconds. Keeping.`);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
errors.push(` Could not delete '${dir}': ${err}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length) {
|
||||
throw new Error('\n' + errors.join('\n'));
|
||||
}
|
||||
} catch (e) {
|
||||
void logger.log(`Error while scrubbing query directory: ${e}`);
|
||||
} finally {
|
||||
|
||||
// keep track of when we last scrubbed
|
||||
await ctx.globalState.update(LAST_SCRUB_TIME_KEY, now);
|
||||
void logger.log(`Scrubbed ${scrubCount} queries.`);
|
||||
}
|
||||
}
|
||||
}, wakeInterval);
|
||||
|
||||
return {
|
||||
dispose: () => {
|
||||
clearInterval(deregister);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -270,15 +270,16 @@ export class FullQueryInfo {
|
|||
constructor(
|
||||
public readonly initialInfo: InitialQueryInfo,
|
||||
config: QueryHistoryConfig,
|
||||
private source?: CancellationTokenSource // used to cancel in progress queries
|
||||
private cancellationSource?: CancellationTokenSource // used to cancel in progress queries
|
||||
) {
|
||||
this.setConfig(config);
|
||||
}
|
||||
|
||||
cancel() {
|
||||
this.source?.cancel();
|
||||
this.cancellationSource?.cancel();
|
||||
// query is no longer in progress, can delete the cancellation token source
|
||||
delete this.source;
|
||||
this.cancellationSource?.dispose();
|
||||
delete this.cancellationSource;
|
||||
}
|
||||
|
||||
get startTime() {
|
||||
|
@ -361,7 +362,10 @@ export class FullQueryInfo {
|
|||
|
||||
completeThisQuery(info: QueryWithResults) {
|
||||
this.completedQuery = new CompletedQueryInfo(info);
|
||||
delete this.source;
|
||||
|
||||
// dispose of the cancellation token source and also ensure the source is not serialized as JSON
|
||||
this.cancellationSource?.dispose();
|
||||
delete this.cancellationSource;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -298,7 +298,7 @@ export class QueryEvaluationInfo {
|
|||
return this.csvPath;
|
||||
}
|
||||
|
||||
async cleanUp(): Promise<void> {
|
||||
async deleteQuery(): Promise<void> {
|
||||
await fs.remove(this.querySaveDir);
|
||||
}
|
||||
}
|
||||
|
@ -598,7 +598,7 @@ export async function compileAndRunQueryAgainstDatabase(
|
|||
qs: qsClient.QueryServerClient,
|
||||
dbItem: DatabaseItem,
|
||||
initialInfo: InitialQueryInfo,
|
||||
queryStorageLocation: string,
|
||||
queryStorageDir: string,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
templates?: messages.TemplateDefinitions,
|
||||
|
@ -664,7 +664,7 @@ export async function compileAndRunQueryAgainstDatabase(
|
|||
|
||||
const hasMetadataFile = (await dbItem.hasMetadataFile());
|
||||
const query = new QueryEvaluationInfo(
|
||||
path.join(queryStorageLocation, initialInfo.id),
|
||||
path.join(queryStorageDir, initialInfo.id),
|
||||
dbItem.databaseUri.fsPath,
|
||||
hasMetadataFile,
|
||||
packConfig.dbscheme,
|
||||
|
|
|
@ -8,7 +8,8 @@ import * as sinon from 'sinon';
|
|||
|
||||
import * as chaiAsPromised from 'chai-as-promised';
|
||||
import { logger } from '../../logging';
|
||||
import { QueryHistoryManager, HistoryTreeDataProvider, SortOrder, registerQueryHistoryScubber } from '../../query-history';
|
||||
import { QueryHistoryManager, HistoryTreeDataProvider, SortOrder } from '../../query-history';
|
||||
import { registerQueryHistoryScubber } from '../../query-history-scrubber';
|
||||
import { QueryEvaluationInfo, QueryWithResults, tmpDir } from '../../run-queries';
|
||||
import { QueryHistoryConfigListener } from '../../config';
|
||||
import * as messages from '../../pure/messages';
|
||||
|
|
Загрузка…
Ссылка в новой задаче