Merge from vscode 2b0b913632
(#4880)
This commit is contained in:
Родитель
9bd7e30d18
Коммит
cb5bcf2248
|
@ -28,6 +28,20 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "npm",
|
||||
"script": "strict-initialization-watch",
|
||||
"label": "TS - Strict Initialization",
|
||||
"isBackground": true,
|
||||
"presentation": {
|
||||
"reveal": "never"
|
||||
},
|
||||
"problemMatcher": {
|
||||
"base": "$tsc-watch",
|
||||
"owner": "typescript-strict-initialization",
|
||||
"applyTo": "allDocuments"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "gulp",
|
||||
"task": "tslint",
|
||||
|
|
2
.yarnrc
2
.yarnrc
|
@ -1,3 +1,3 @@
|
|||
disturl "https://atom.io/download/electron"
|
||||
target "3.1.6"
|
||||
target "3.1.8"
|
||||
runtime "electron"
|
||||
|
|
|
@ -84,7 +84,8 @@ const vscodeResources = [
|
|||
'out-build/vs/workbench/browser/media/*-theme.css',
|
||||
'out-build/vs/workbench/contrib/debug/**/*.json',
|
||||
'out-build/vs/workbench/contrib/externalTerminal/**/*.scpt',
|
||||
'out-build/vs/workbench/contrib/webview/electron-browser/webview-pre.js',
|
||||
'out-build/vs/workbench/contrib/webview/browser/pre/*.js',
|
||||
'out-build/vs/workbench/contrib/webview/electron-browser/pre/*.js',
|
||||
'out-build/vs/**/markdown.css',
|
||||
'out-build/vs/workbench/contrib/tasks/**/*.json',
|
||||
'out-build/vs/workbench/contrib/welcome/walkThrough/**/*.md',
|
||||
|
|
|
@ -43,7 +43,7 @@
|
|||
"request": "^2.85.0",
|
||||
"tslint": "^5.9.1",
|
||||
"service-downloader": "github:anthonydresser/service-downloader#0.1.5",
|
||||
"typescript": "3.3.1",
|
||||
"typescript": "3.4.1",
|
||||
"vsce": "1.48.0",
|
||||
"xml2js": "^0.4.17"
|
||||
},
|
||||
|
|
|
@ -3201,10 +3201,10 @@ typed-rest-client@^0.9.0:
|
|||
tunnel "0.0.4"
|
||||
underscore "1.8.3"
|
||||
|
||||
typescript@3.3.1:
|
||||
version "3.3.1"
|
||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.3.1.tgz#6de14e1db4b8a006ac535e482c8ba018c55f750b"
|
||||
integrity sha512-cTmIDFW7O0IHbn1DPYjkiebHxwtCMU+eTy30ZtJNBPF9j2O1ITu5XH2YnBeVRKWHqF+3JQwWJv0Q0aUgX8W7IA==
|
||||
typescript@3.4.1:
|
||||
version "3.4.1"
|
||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.4.1.tgz#b6691be11a881ffa9a05765a205cb7383f3b63c6"
|
||||
integrity sha512-3NSMb2VzDQm8oBTLH6Nj55VVtUEpe/rgkIzMir0qVoLyjDZlnMBva0U6vDiV3IH+sl/Yu6oP5QwsAQtHPmDd2Q==
|
||||
|
||||
uc.micro@^1.0.1, uc.micro@^1.0.5:
|
||||
version "1.0.5"
|
||||
|
|
|
@ -73,12 +73,12 @@
|
|||
"git": {
|
||||
"name": "electron",
|
||||
"repositoryUrl": "https://github.com/electron/electron",
|
||||
"commitHash": "73158a6419a3e2da9e4d523e1131052abd28fbbb"
|
||||
"commitHash": "e84a6860e35e14b4031b88bb9b49841cdb89a305"
|
||||
}
|
||||
},
|
||||
"isOnlyProductionDependency": true,
|
||||
"license": "MIT",
|
||||
"version": "3.1.6"
|
||||
"version": "3.1.8"
|
||||
},
|
||||
{
|
||||
"component": {
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Run Server Ready Extension",
|
||||
"type": "extensionHost",
|
||||
"request": "launch",
|
||||
"args": [
|
||||
"--extensionDevelopmentPath=${workspaceFolder}",
|
||||
],
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
|
@ -1445,6 +1445,7 @@ export class CommandCenter {
|
|||
const quickpick = window.createQuickPick();
|
||||
quickpick.items = picks;
|
||||
quickpick.placeholder = placeHolder;
|
||||
quickpick.ignoreFocusOut = true;
|
||||
quickpick.show();
|
||||
|
||||
const choice = await new Promise<QuickPickItem | undefined>(c => quickpick.onDidAccept(() => c(quickpick.activeItems[0])));
|
||||
|
|
|
@ -5,15 +5,18 @@
|
|||
|
||||
import { Command } from '../commandManager';
|
||||
import { MarkdownPreviewManager } from '../features/previewManager';
|
||||
import { MarkdownEngine } from '../markdownEngine';
|
||||
|
||||
export class RefreshPreviewCommand implements Command {
|
||||
public readonly id = 'markdown.preview.refresh';
|
||||
|
||||
public constructor(
|
||||
private readonly webviewManager: MarkdownPreviewManager
|
||||
private readonly webviewManager: MarkdownPreviewManager,
|
||||
private readonly engine: MarkdownEngine
|
||||
) { }
|
||||
|
||||
public execute() {
|
||||
this.engine.cleanCache();
|
||||
this.webviewManager.refresh();
|
||||
}
|
||||
}
|
|
@ -78,7 +78,7 @@ function registerMarkdownCommands(
|
|||
commandManager.register(new commands.ShowPreviewToSideCommand(previewManager, telemetryReporter));
|
||||
commandManager.register(new commands.ShowLockedPreviewToSideCommand(previewManager, telemetryReporter));
|
||||
commandManager.register(new commands.ShowSourceCommand(previewManager));
|
||||
commandManager.register(new commands.RefreshPreviewCommand(previewManager));
|
||||
commandManager.register(new commands.RefreshPreviewCommand(previewManager, engine));
|
||||
commandManager.register(new commands.MoveCursorToPositionCommand());
|
||||
commandManager.register(new commands.ShowPreviewSecuritySelectorCommand(previewSecuritySelector, previewManager));
|
||||
commandManager.register(new commands.OpenDocumentLinkCommand(engine));
|
||||
|
|
|
@ -81,8 +81,10 @@ export default class LinkProvider implements vscode.DocumentLinkProvider {
|
|||
const base = document.uri.scheme === 'file' ? path.dirname(document.uri.fsPath) : '';
|
||||
const text = document.getText();
|
||||
|
||||
return this.providerInlineLinks(text, document, base)
|
||||
.concat(this.provideReferenceLinks(text, document, base));
|
||||
return [
|
||||
...this.providerInlineLinks(text, document, base),
|
||||
...this.provideReferenceLinks(text, document, base)
|
||||
];
|
||||
}
|
||||
|
||||
private providerInlineLinks(
|
||||
|
|
|
@ -7,24 +7,36 @@ import { Token } from 'markdown-it';
|
|||
import * as vscode from 'vscode';
|
||||
import { MarkdownEngine } from '../markdownEngine';
|
||||
import { TableOfContentsProvider } from '../tableOfContentsProvider';
|
||||
import { flatten } from '../util/arrays';
|
||||
|
||||
const rangeLimit = 5000;
|
||||
|
||||
const isStartRegion = (t: string) => /^\s*<!--\s*#?region\b.*-->/.test(t);
|
||||
const isEndRegion = (t: string) => /^\s*<!--\s*#?endregion\b.*-->/.test(t);
|
||||
|
||||
const isRegionMarker = (token: Token) =>
|
||||
token.type === 'html_block' && (isStartRegion(token.content) || isEndRegion(token.content));
|
||||
|
||||
export default class MarkdownFoldingProvider implements vscode.FoldingRangeProvider {
|
||||
|
||||
constructor(
|
||||
private readonly engine: MarkdownEngine
|
||||
) { }
|
||||
|
||||
public async provideFoldingRanges(
|
||||
document: vscode.TextDocument,
|
||||
_: vscode.FoldingContext,
|
||||
_token: vscode.CancellationToken
|
||||
): Promise<vscode.FoldingRange[]> {
|
||||
const foldables = await Promise.all([
|
||||
this.getRegions(document),
|
||||
this.getHeaderFoldingRanges(document),
|
||||
this.getBlockFoldingRanges(document)
|
||||
]);
|
||||
return flatten(foldables).slice(0, rangeLimit);
|
||||
}
|
||||
|
||||
private async getRegions(document: vscode.TextDocument): Promise<vscode.FoldingRange[]> {
|
||||
|
||||
const isStartRegion = (t: string) => /^\s*<!--\s*#?region\b.*-->/.test(t);
|
||||
const isEndRegion = (t: string) => /^\s*<!--\s*#?endregion\b.*-->/.test(t);
|
||||
|
||||
const isRegionMarker = (token: Token) => token.type === 'html_block' &&
|
||||
(isStartRegion(token.content) || isEndRegion(token.content));
|
||||
|
||||
|
||||
const tokens = await this.engine.parse(document);
|
||||
const regionMarkers = tokens.filter(isRegionMarker)
|
||||
.map(token => ({ line: token.map[0], isStart: isStartRegion(token.content) }));
|
||||
|
@ -44,18 +56,6 @@ export default class MarkdownFoldingProvider implements vscode.FoldingRangeProvi
|
|||
.filter((region: vscode.FoldingRange | null): region is vscode.FoldingRange => !!region);
|
||||
}
|
||||
|
||||
public async provideFoldingRanges(
|
||||
document: vscode.TextDocument,
|
||||
_: vscode.FoldingContext,
|
||||
_token: vscode.CancellationToken
|
||||
): Promise<vscode.FoldingRange[]> {
|
||||
const foldables = await Promise.all([
|
||||
this.getRegions(document),
|
||||
this.getHeaderFoldingRanges(document),
|
||||
this.getBlockFoldingRanges(document)]);
|
||||
return ([] as vscode.FoldingRange[]).concat.apply([], foldables).slice(0, rangeLimit);
|
||||
}
|
||||
|
||||
private async getHeaderFoldingRanges(document: vscode.TextDocument) {
|
||||
const tocProvider = new TableOfContentsProvider(this.engine, document);
|
||||
const toc = await tocProvider.getToc();
|
||||
|
@ -70,7 +70,7 @@ export default class MarkdownFoldingProvider implements vscode.FoldingRangeProvi
|
|||
|
||||
private async getBlockFoldingRanges(document: vscode.TextDocument): Promise<vscode.FoldingRange[]> {
|
||||
|
||||
const isFoldableToken = (token: Token) => {
|
||||
const isFoldableToken = (token: Token): boolean => {
|
||||
switch (token.type) {
|
||||
case 'fence':
|
||||
case 'list_item_open':
|
||||
|
|
|
@ -9,6 +9,7 @@ import { isMarkdownFile } from '../util/file';
|
|||
import { Lazy, lazy } from '../util/lazy';
|
||||
import MDDocumentSymbolProvider from './documentSymbolProvider';
|
||||
import { SkinnyTextDocument } from '../tableOfContentsProvider';
|
||||
import { flatten } from '../util/arrays';
|
||||
|
||||
export interface WorkspaceMarkdownDocumentProvider {
|
||||
getAllMarkdownDocuments(): Thenable<Iterable<SkinnyTextDocument>>;
|
||||
|
@ -108,7 +109,7 @@ export default class MarkdownWorkspaceSymbolProvider extends Disposable implemen
|
|||
}
|
||||
|
||||
const allSymbolsSets = await Promise.all(Array.from(this._symbolCache.values()).map(x => x.value));
|
||||
const allSymbols: vscode.SymbolInformation[] = Array.prototype.concat.apply([], allSymbolsSets);
|
||||
const allSymbols = flatten(allSymbolsSets);
|
||||
return allSymbols.filter(symbolInformation => symbolInformation.name.toLowerCase().indexOf(query.toLowerCase()) !== -1);
|
||||
}
|
||||
|
||||
|
|
|
@ -47,6 +47,11 @@ class TokenCache {
|
|||
};
|
||||
this.tokens = tokens;
|
||||
}
|
||||
|
||||
public clean(): void {
|
||||
this.cachedDocument = undefined;
|
||||
this.tokens = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
export class MarkdownEngine {
|
||||
|
@ -154,6 +159,10 @@ export class MarkdownEngine {
|
|||
return this.tokenize(document, config, engine);
|
||||
}
|
||||
|
||||
public cleanCache(): void {
|
||||
this._tokenCache.clean();
|
||||
}
|
||||
|
||||
private getConfig(resource: vscode.Uri): MarkdownItConfig {
|
||||
const config = vscode.workspace.getConfiguration('markdown', resource);
|
||||
return {
|
||||
|
|
|
@ -4,4 +4,5 @@
|
|||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
/// <reference path='../../../../src/vs/vscode.d.ts'/>
|
||||
/// <reference path='../../../../src/vs/vscode.proposed.d.ts'/>
|
||||
/// <reference types='@types/node'/>
|
||||
|
|
|
@ -16,3 +16,7 @@ export function equals<T>(one: ReadonlyArray<T>, other: ReadonlyArray<T>, itemEq
|
|||
|
||||
return true;
|
||||
}
|
||||
|
||||
export function flatten<T>(arr: ReadonlyArray<T>[]): T[] {
|
||||
return ([] as T[]).concat.apply([], arr);
|
||||
}
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
import * as vscode from 'vscode';
|
||||
|
||||
const knownSchemes = ['http:', 'https:', 'file:', 'mailto:', 'data:', 'vscode-resource:'];
|
||||
const knownSchemes = ['http:', 'https:', 'file:', 'mailto:', 'data:', `${vscode.env.uriScheme}:`, 'vscode:', 'vscode-insiders:', 'vscode-resource:'];
|
||||
|
||||
export function getUriForLinkWithKnownExternalScheme(
|
||||
link: string,
|
||||
|
@ -15,4 +15,4 @@ export function getUriForLinkWithKnownExternalScheme(
|
|||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,7 +24,8 @@
|
|||
"monaco-compile-check": "tsc -p src/tsconfig.monaco.json --noEmit",
|
||||
"tslint": "node node_modules/tslint/bin/tslint -c tslint-gci.json -p src/tsconfig.json",
|
||||
"strict-null-check": "tsc -p src/tsconfig.strictNullChecks.json",
|
||||
"strict-null-check-watch": "tsc -p src/tsconfig.strictNullChecks.json --watch"
|
||||
"strict-null-check-watch": "tsc -p src/tsconfig.strictNullChecks.json --watch",
|
||||
"strict-initialization-watch": "tsc --watch -p src/tsconfig.json --noEmit --strictPropertyInitialization"
|
||||
},
|
||||
"dependencies": {
|
||||
"@angular/animations": "~4.1.3",
|
||||
|
@ -155,7 +156,7 @@
|
|||
"tslint": "^5.11.0",
|
||||
"tslint-microsoft-contrib": "^6.0.0",
|
||||
"typemoq": "^0.3.2",
|
||||
"typescript": "3.3.1",
|
||||
"typescript": "3.4.1",
|
||||
"typescript-formatter": "7.1.0",
|
||||
"typescript-tslint-plugin": "^0.0.7",
|
||||
"uglify-es": "^3.0.18",
|
||||
|
|
|
@ -233,7 +233,7 @@ export class Dropdown extends Disposable {
|
|||
this._layoutTree();
|
||||
return { dispose: () => { } };
|
||||
},
|
||||
onDOMEvent: e => {
|
||||
onDOMEvent: (e: any) => {
|
||||
if (!DOM.isAncestor(e.srcElement, this.$el.getHTMLElement()) && !DOM.isAncestor(e.srcElement, this.$treeContainer.getHTMLElement())) {
|
||||
this._input.validate();
|
||||
this._onBlur.fire();
|
||||
|
|
|
@ -23,8 +23,9 @@ import { ComponentBase } from 'sql/parts/modelComponents/componentBase';
|
|||
import { IComponent, IComponentDescriptor, IModelStore, ComponentEventType } from 'sql/parts/modelComponents/interfaces';
|
||||
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
|
||||
import { IContextKeyService, IContextKey } from 'vs/platform/contextkey/common/contextkey';
|
||||
import { WebviewElement, WebviewOptions, WebviewContentOptions } from 'vs/workbench/contrib/webview/electron-browser/webviewElement';
|
||||
import { WebviewElement } from 'vs/workbench/contrib/webview/electron-browser/webviewElement';
|
||||
import { IWorkbenchLayoutService, Parts } from 'vs/workbench/services/layout/browser/layoutService';
|
||||
import { WebviewContentOptions } from 'vs/workbench/contrib/webview/common/webview';
|
||||
|
||||
function reviveWebviewOptions(options: vscode.WebviewOptions): vscode.WebviewOptions {
|
||||
return {
|
||||
|
|
|
@ -164,7 +164,7 @@ export class NotebookComponent extends AngularDisposable implements OnInit, OnDe
|
|||
|
||||
//Saves scrollTop value on scroll change
|
||||
public scrollHandler(event: Event){
|
||||
this._scrollTop = event.srcElement.scrollTop;
|
||||
this._scrollTop = (<any>event.srcElement).scrollTop;
|
||||
}
|
||||
|
||||
public unselectActiveCell() {
|
||||
|
|
|
@ -14,6 +14,34 @@ import { TreeNode } from 'sql/parts/objectExplorer/common/treeNode';
|
|||
import errors = require('vs/base/common/errors');
|
||||
import { IConnectionProfile } from 'sql/platform/connection/common/interfaces';
|
||||
|
||||
|
||||
export interface IExpandableTree extends ITree {
|
||||
// {{SQL CARBON EDIT }} - add back deleted VS Code tree methods
|
||||
/**
|
||||
* Returns a list of the currently expanded elements.
|
||||
*/
|
||||
getExpandedElements(): any[];
|
||||
|
||||
/**
|
||||
* Returns a number between 0 and 1 representing how much the tree is scroll down. 0 means all the way
|
||||
* to the top; 1 means all the way down.
|
||||
*/
|
||||
getScrollPosition(): number;
|
||||
|
||||
/**
|
||||
* Sets the scroll position with a number between 0 and 1 representing how much the tree is scroll down. 0 means all the way
|
||||
* to the top; 1 means all the way down.
|
||||
*/
|
||||
setScrollPosition(pos: number): void;
|
||||
|
||||
/**
|
||||
* Returns the total height of the tree's content.
|
||||
*/
|
||||
getContentHeight(): number;
|
||||
// {{SQL CARBON EDIT }} - end block
|
||||
}
|
||||
|
||||
|
||||
export class TreeUpdateUtils {
|
||||
|
||||
public static isInDragAndDrop: boolean = false;
|
||||
|
@ -22,6 +50,9 @@ export class TreeUpdateUtils {
|
|||
* Set input for the tree.
|
||||
*/
|
||||
public static structuralTreeUpdate(tree: ITree, viewKey: string, connectionManagementService: IConnectionManagementService, providers?: string[]): Promise<void> {
|
||||
// convert to old VS Code tree interface with expandable methods
|
||||
let expandableTree: IExpandableTree = <IExpandableTree>tree;
|
||||
|
||||
let selectedElement: any;
|
||||
let targetsToExpand: any[];
|
||||
if (tree) {
|
||||
|
@ -29,7 +60,7 @@ export class TreeUpdateUtils {
|
|||
if (selection && selection.length === 1) {
|
||||
selectedElement = <any>selection[0];
|
||||
}
|
||||
targetsToExpand = tree.getExpandedElements();
|
||||
targetsToExpand = expandableTree.getExpandedElements();
|
||||
}
|
||||
let groups;
|
||||
let treeInput = new ConnectionProfileGroup('root', null, undefined, undefined, undefined);
|
||||
|
@ -59,6 +90,9 @@ export class TreeUpdateUtils {
|
|||
* Set input for the registered servers tree.
|
||||
*/
|
||||
public static registeredServerUpdate(tree: ITree, connectionManagementService: IConnectionManagementService, elementToSelect?: any): Promise<void> {
|
||||
// convert to old VS Code tree interface with expandable methods
|
||||
let expandableTree: IExpandableTree = <IExpandableTree>tree;
|
||||
|
||||
let selectedElement: any = elementToSelect;
|
||||
let targetsToExpand: any[];
|
||||
|
||||
|
@ -72,7 +106,7 @@ export class TreeUpdateUtils {
|
|||
selectedElement = <any>selection[0];
|
||||
}
|
||||
}
|
||||
targetsToExpand = tree.getExpandedElements();
|
||||
targetsToExpand = expandableTree.getExpandedElements();
|
||||
if (selectedElement && targetsToExpand.indexOf(selectedElement) === -1) {
|
||||
targetsToExpand.push(selectedElement);
|
||||
}
|
||||
|
|
|
@ -30,6 +30,7 @@ import { IDisposable, dispose } from 'vs/base/common/lifecycle';
|
|||
import { IEditorService } from 'vs/workbench/services/editor/common/editorService';
|
||||
import { ScrollbarVisibility } from 'vs/base/common/scrollable';
|
||||
import { IClipboardService } from 'vs/platform/clipboard/common/clipboardService';
|
||||
import { IExpandableTree } from 'sql/parts/objectExplorer/viewlet/treeUpdateUtils';
|
||||
|
||||
export interface IResultMessageIntern extends IResultMessage {
|
||||
id?: string;
|
||||
|
@ -109,8 +110,11 @@ export class MessagePanel extends ViewletPanel {
|
|||
}, { keyboardSupport: false, horizontalScrollMode: ScrollbarVisibility.Auto });
|
||||
this.disposables.push(this.tree);
|
||||
this.tree.onDidScroll(e => {
|
||||
// convert to old VS Code tree interface with expandable methods
|
||||
let expandableTree: IExpandableTree = <IExpandableTree>this.tree;
|
||||
|
||||
if (this.state) {
|
||||
this.state.scrollPosition = this.tree.getScrollPosition();
|
||||
this.state.scrollPosition = expandableTree.getScrollPosition();
|
||||
}
|
||||
});
|
||||
this.onDidChange(e => {
|
||||
|
@ -178,13 +182,16 @@ export class MessagePanel extends ViewletPanel {
|
|||
}
|
||||
|
||||
protected layoutBody(size: number): void {
|
||||
const previousScrollPosition = this.tree.getScrollPosition();
|
||||
// convert to old VS Code tree interface with expandable methods
|
||||
let expandableTree: IExpandableTree = <IExpandableTree>this.tree;
|
||||
|
||||
const previousScrollPosition = expandableTree.getScrollPosition();
|
||||
this.tree.layout(size);
|
||||
if (this.state && this.state.scrollPosition) {
|
||||
this.tree.setScrollPosition(this.state.scrollPosition);
|
||||
expandableTree.setScrollPosition(this.state.scrollPosition);
|
||||
} else {
|
||||
if (previousScrollPosition === 1) {
|
||||
this.tree.setScrollPosition(1);
|
||||
expandableTree.setScrollPosition(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -214,17 +221,19 @@ export class MessagePanel extends ViewletPanel {
|
|||
if (hasError) {
|
||||
this.setExpanded(true);
|
||||
}
|
||||
// convert to old VS Code tree interface with expandable methods
|
||||
let expandableTree: IExpandableTree = <IExpandableTree>this.tree;
|
||||
if (this.state.scrollPosition) {
|
||||
this.tree.refresh(this.model).then(() => {
|
||||
// Restore the previous scroll position when switching between tabs
|
||||
this.tree.setScrollPosition(this.state.scrollPosition);
|
||||
expandableTree.setScrollPosition(this.state.scrollPosition);
|
||||
});
|
||||
} else {
|
||||
const previousScrollPosition = this.tree.getScrollPosition();
|
||||
const previousScrollPosition = expandableTree.getScrollPosition();
|
||||
this.tree.refresh(this.model).then(() => {
|
||||
// Scroll to the end if the user was already at the end otherwise leave the current scroll position
|
||||
if (previousScrollPosition === 1) {
|
||||
this.tree.setScrollPosition(1);
|
||||
expandableTree.setScrollPosition(1);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -244,8 +253,10 @@ export class MessagePanel extends ViewletPanel {
|
|||
|
||||
public set state(val: MessagePanelState) {
|
||||
this._state = val;
|
||||
// convert to old VS Code tree interface with expandable methods
|
||||
let expandableTree: IExpandableTree = <IExpandableTree>this.tree;
|
||||
if (this.state.scrollPosition) {
|
||||
this.tree.setScrollPosition(this.state.scrollPosition);
|
||||
expandableTree.setScrollPosition(this.state.scrollPosition);
|
||||
}
|
||||
this.setExpanded(!this.state.collapsed);
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@ import { ITree } from 'vs/base/parts/tree/browser/tree';
|
|||
import { IDisposable, dispose } from 'vs/base/common/lifecycle';
|
||||
import { DefaultFilter, DefaultDragAndDrop, DefaultAccessibilityProvider } from 'vs/base/parts/tree/browser/treeDefaults';
|
||||
import { localize } from 'vs/nls';
|
||||
import { hide, $, append } from 'vs/base/browser/dom';
|
||||
|
||||
import { TaskHistoryRenderer } from 'sql/parts/taskHistory/viewlet/taskHistoryRenderer';
|
||||
import { TaskHistoryDataSource } from 'sql/parts/taskHistory/viewlet/taskHistoryDataSource';
|
||||
|
@ -22,7 +23,7 @@ import { TaskHistoryActionProvider } from 'sql/parts/taskHistory/viewlet/taskHis
|
|||
import { ITaskService } from 'sql/platform/taskHistory/common/taskService';
|
||||
import { TaskNode, TaskStatus } from 'sql/parts/taskHistory/common/taskNode';
|
||||
import { IErrorMessageService } from 'sql/platform/errorMessage/common/errorMessageService';
|
||||
import { hide, $, append } from 'vs/base/browser/dom';
|
||||
import { IExpandableTree } from 'sql/parts/objectExplorer/viewlet/treeUpdateUtils';
|
||||
|
||||
/**
|
||||
* TaskHistoryView implements the dynamic tree view.
|
||||
|
@ -112,7 +113,9 @@ export class TaskHistoryView {
|
|||
if (selection && selection.length === 1) {
|
||||
selectedElement = <any>selection[0];
|
||||
}
|
||||
targetsToExpand = this._tree.getExpandedElements();
|
||||
// convert to old VS Code tree interface with expandable methods
|
||||
let expandableTree: IExpandableTree = <IExpandableTree>this._tree;
|
||||
targetsToExpand = expandableTree.getExpandedElements();
|
||||
}
|
||||
|
||||
//Get the tree Input
|
||||
|
|
|
@ -12,7 +12,7 @@ import { Modal } from 'sql/workbench/browser/modal/modal';
|
|||
import { IConnectionManagementService, INewConnectionParams } from 'sql/platform/connection/common/connectionManagement';
|
||||
import * as DialogHelper from 'sql/workbench/browser/modal/dialogHelper';
|
||||
import { TreeCreationUtils } from 'sql/parts/objectExplorer/viewlet/treeCreationUtils';
|
||||
import { TreeUpdateUtils } from 'sql/parts/objectExplorer/viewlet/treeUpdateUtils';
|
||||
import { TreeUpdateUtils, IExpandableTree } from 'sql/parts/objectExplorer/viewlet/treeUpdateUtils';
|
||||
import { ConnectionProfile } from 'sql/platform/connection/common/connectionProfile';
|
||||
import { TabbedPanel, PanelTabIdentifier } from 'sql/base/browser/ui/panel/panel';
|
||||
import { RecentConnectionTreeController, RecentConnectionActionsProvider } from 'sql/parts/connection/connectionDialog/recentConnectionTreeController';
|
||||
|
@ -178,11 +178,14 @@ export class ConnectionDialogWidget extends Modal {
|
|||
});
|
||||
|
||||
this._panel.onTabChange(async c => {
|
||||
if (c === savedConnectionTabId && this._savedConnectionTree.getContentHeight() === 0) {
|
||||
// convert to old VS Code tree interface with expandable methods
|
||||
let expandableTree: IExpandableTree = <IExpandableTree>this._savedConnectionTree;
|
||||
|
||||
if (c === savedConnectionTabId && expandableTree.getContentHeight() === 0) {
|
||||
// Update saved connection tree
|
||||
await TreeUpdateUtils.structuralTreeUpdate(this._savedConnectionTree, 'saved', this._connectionManagementService, this._providers);
|
||||
|
||||
if (this._savedConnectionTree.getContentHeight() > 0) {
|
||||
if (expandableTree.getContentHeight() > 0) {
|
||||
this._noSavedConnectionBuilder.hide();
|
||||
this._savedConnectionBuilder.show();
|
||||
} else {
|
||||
|
|
|
@ -18,6 +18,7 @@ import { IInstantiationService } from 'vs/platform/instantiation/common/instanti
|
|||
import { attachListStyler } from 'vs/platform/theme/common/styler';
|
||||
import { IThemeService } from 'vs/platform/theme/common/themeService';
|
||||
import { ITree } from 'vs/base/parts/tree/browser/tree';
|
||||
import { IExpandableTree } from 'sql/parts/objectExplorer/viewlet/treeUpdateUtils';
|
||||
|
||||
/**
|
||||
* Implements tree view for file browser
|
||||
|
@ -96,7 +97,9 @@ export class FileBrowserTreeView implements IDisposable {
|
|||
if (selection && selection.length === 1) {
|
||||
selectedElement = <any>selection[0];
|
||||
}
|
||||
targetsToExpand = this._tree.getExpandedElements();
|
||||
// convert to old VS Code tree interface with expandable methods
|
||||
let expandableTree: IExpandableTree = <IExpandableTree>this._tree;
|
||||
targetsToExpand = expandableTree.getExpandedElements();
|
||||
}
|
||||
|
||||
if (rootNode) {
|
||||
|
|
|
@ -171,8 +171,8 @@ suite('Insights Utils tests', function () {
|
|||
|
||||
});
|
||||
|
||||
suiteTeardown(done => {
|
||||
suiteTeardown(() => {
|
||||
// Clean up our test files
|
||||
pfs.del(testRootPath).then(done());
|
||||
return pfs.rimraf(testRootPath);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -36,4 +36,7 @@ export class ContextKeyServiceStub implements IContextKeyService {
|
|||
return undefined;
|
||||
}
|
||||
|
||||
bufferChangeEvents(callback: Function): void {
|
||||
}
|
||||
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
// Type definitions for Electron 3.1.6
|
||||
// Type definitions for Electron 3.1.8
|
||||
// Project: http://electronjs.org/
|
||||
// Definitions by: The Electron Team <https://github.com/electron/electron>
|
||||
// Definitions: https://github.com/electron/electron-typescript-definitions
|
||||
|
@ -86,7 +86,7 @@ declare namespace Electron {
|
|||
webviewTag: WebviewTag;
|
||||
}
|
||||
|
||||
interface AllElectron extends MainInterface, RendererInterface { }
|
||||
interface AllElectron extends MainInterface, RendererInterface {}
|
||||
|
||||
const app: App;
|
||||
const autoUpdater: AutoUpdater;
|
||||
|
|
|
@ -40,9 +40,10 @@ declare module 'yauzl' {
|
|||
}
|
||||
|
||||
export interface IOptions {
|
||||
autoClose: boolean;
|
||||
autoClose?: boolean;
|
||||
lazyEntries?: boolean;
|
||||
}
|
||||
|
||||
export function open(path: string, callback: (err?: Error, zipfile?: ZipFile) => void): void;
|
||||
export function open(path: string, options: IOptions, callback: (err?: Error, zipfile?: ZipFile) => void): void;
|
||||
export function open(path: string, options: IOptions | undefined, callback: (err?: Error, zipfile?: ZipFile) => void): void;
|
||||
}
|
|
@ -29,7 +29,7 @@ export class DelayedDragHandler extends Disposable {
|
|||
}));
|
||||
|
||||
['dragleave', 'drop', 'dragend'].forEach(type => {
|
||||
this._register(addDisposableListener(container, type as 'dragleave' | 'drop' | 'dragend', () => {
|
||||
this._register(addDisposableListener(container, type, () => {
|
||||
this.clearDragTimeout();
|
||||
}));
|
||||
});
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
export function createSHA1(content: string): Thenable<string> {
|
||||
if (typeof require !== 'undefined') {
|
||||
const _crypto: typeof crypto = require.__$__nodeRequire('crypto');
|
||||
return Promise.resolve(_crypto['createHash']('sha1').update(content).digest('hex'));
|
||||
}
|
||||
return crypto.subtle.digest('SHA-1', new TextEncoder().encode(content)).then(buffer => {
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/digest#Converting_a_digest_to_a_hex_string
|
||||
return Array.prototype.map.call(new Uint8Array(buffer), (value: number) => `00${value.toString(16)}`.slice(-2)).join('');
|
||||
});
|
||||
}
|
|
@ -678,7 +678,7 @@ export class ActionBar extends Disposable implements IActionRunner {
|
|||
|
||||
focus(index?: number): void;
|
||||
focus(selectFirst?: boolean): void;
|
||||
focus(arg?: any): void {
|
||||
focus(arg?: number | boolean): void {
|
||||
let selectFirst: boolean = false;
|
||||
let index: number | undefined = undefined;
|
||||
if (arg === undefined) {
|
||||
|
|
|
@ -334,7 +334,7 @@ export class BreadcrumbsWidget {
|
|||
|
||||
private _onClick(event: IMouseEvent): void {
|
||||
for (let el: HTMLElement | null = event.target; el; el = el.parentElement) {
|
||||
let idx = this._nodes.indexOf(el as any);
|
||||
let idx = this._nodes.indexOf(el as HTMLDivElement);
|
||||
if (idx >= 0) {
|
||||
this._focus(idx, event);
|
||||
this._select(idx, event);
|
||||
|
|
|
@ -40,7 +40,7 @@ export class Button extends Disposable {
|
|||
private buttonForeground: Color | undefined;
|
||||
private buttonBorder: Color | undefined;
|
||||
|
||||
private _onDidClick = this._register(new Emitter<any>());
|
||||
private _onDidClick = this._register(new Emitter<Event>());
|
||||
get onDidClick(): BaseEvent<Event> { return this._onDidClick.event; }
|
||||
|
||||
private focusTracker: DOM.IFocusTracker;
|
||||
|
|
|
@ -120,7 +120,7 @@ export class ContextView extends Disposable {
|
|||
|
||||
setContainer(container: HTMLElement | null): void {
|
||||
if (this.container) {
|
||||
this.toDisposeOnSetContainer = dispose(this.toDisposeOnSetContainer);
|
||||
dispose(this.toDisposeOnSetContainer);
|
||||
this.container.removeChild(this.view);
|
||||
this.container = null;
|
||||
}
|
||||
|
|
|
@ -19,7 +19,9 @@
|
|||
.monaco-workbench .dialog-box {
|
||||
display: flex;
|
||||
flex-direction: column-reverse;
|
||||
width: min-content;
|
||||
min-width: 500px;
|
||||
max-width: 90%;
|
||||
min-height: 75px;
|
||||
padding: 5px;
|
||||
}
|
||||
|
@ -134,12 +136,13 @@
|
|||
overflow: hidden; /* buttons row should never overflow */
|
||||
}
|
||||
|
||||
.monaco-workbench .monaco-workbench .dialog-box > .dialog-buttons-row {
|
||||
.monaco-workbench .dialog-box > .dialog-buttons-row {
|
||||
display: flex;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
/** Dialog: Buttons */
|
||||
.monaco-workbench .monaco-workbench .dialog-box > .dialog-buttons-row > .dialog-buttons {
|
||||
.monaco-workbench .dialog-box > .dialog-buttons-row > .dialog-buttons {
|
||||
display: flex;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
|
|
@ -272,12 +272,12 @@ export class DropdownMenuActionItem extends BaseActionItem {
|
|||
private contextMenuProvider: IContextMenuProvider;
|
||||
private actionItemProvider?: IActionItemProvider;
|
||||
private keybindings?: (action: IAction) => ResolvedKeybinding | undefined;
|
||||
private clazz: string;
|
||||
private clazz: string | undefined;
|
||||
private anchorAlignmentProvider: (() => AnchorAlignment) | undefined;
|
||||
|
||||
constructor(action: IAction, menuActions: IAction[], contextMenuProvider: IContextMenuProvider, actionItemProvider: IActionItemProvider | undefined, actionRunner: IActionRunner, keybindings: ((action: IAction) => ResolvedKeybinding | undefined) | undefined, clazz: string, anchorAlignmentProvider?: () => AnchorAlignment);
|
||||
constructor(action: IAction, actionProvider: IActionProvider, contextMenuProvider: IContextMenuProvider, actionItemProvider: IActionItemProvider | undefined, actionRunner: IActionRunner, keybindings: ((action: IAction) => ResolvedKeybinding) | undefined, clazz: string, anchorAlignmentProvider?: () => AnchorAlignment);
|
||||
constructor(action: IAction, menuActionsOrProvider: any, contextMenuProvider: IContextMenuProvider, actionItemProvider: IActionItemProvider | undefined, actionRunner: IActionRunner, keybindings: ((action: IAction) => ResolvedKeybinding | undefined) | undefined, clazz: string, anchorAlignmentProvider?: () => AnchorAlignment) {
|
||||
constructor(action: IAction, menuActions: IAction[], contextMenuProvider: IContextMenuProvider, actionItemProvider: IActionItemProvider | undefined, actionRunner: IActionRunner, keybindings: ((action: IAction) => ResolvedKeybinding | undefined) | undefined, clazz: string | undefined, anchorAlignmentProvider?: () => AnchorAlignment);
|
||||
constructor(action: IAction, actionProvider: IActionProvider, contextMenuProvider: IContextMenuProvider, actionItemProvider: IActionItemProvider | undefined, actionRunner: IActionRunner, keybindings: ((action: IAction) => ResolvedKeybinding) | undefined, clazz: string | undefined, anchorAlignmentProvider?: () => AnchorAlignment);
|
||||
constructor(action: IAction, menuActionsOrProvider: any, contextMenuProvider: IContextMenuProvider, actionItemProvider: IActionItemProvider | undefined, actionRunner: IActionRunner, keybindings: ((action: IAction) => ResolvedKeybinding | undefined) | undefined, clazz: string | undefined, anchorAlignmentProvider?: () => AnchorAlignment) {
|
||||
super(null, action);
|
||||
|
||||
this.menuActionsOrProvider = menuActionsOrProvider;
|
||||
|
@ -292,7 +292,9 @@ export class DropdownMenuActionItem extends BaseActionItem {
|
|||
render(container: HTMLElement): void {
|
||||
const labelRenderer: ILabelRenderer = (el: HTMLElement): IDisposable | null => {
|
||||
this.element = append(el, $('a.action-label.icon'));
|
||||
addClasses(this.element, this.clazz);
|
||||
if (this.clazz) {
|
||||
addClasses(this.element, this.clazz);
|
||||
}
|
||||
|
||||
this.element.tabIndex = 0;
|
||||
this.element.setAttribute('role', 'button');
|
||||
|
|
|
@ -457,7 +457,7 @@ export class SerializableGrid<T extends ISerializableView> extends Grid<T> {
|
|||
return { children, box };
|
||||
|
||||
} else if (json.type === 'leaf') {
|
||||
const view = deserializer.fromJSON(json.data) as T;
|
||||
const view: T = deserializer.fromJSON(json.data);
|
||||
return { view, box };
|
||||
}
|
||||
|
||||
|
@ -481,9 +481,9 @@ export class SerializableGrid<T extends ISerializableView> extends Grid<T> {
|
|||
throw new Error('Invalid JSON: \'height\' property must be a number.');
|
||||
}
|
||||
|
||||
const orientation = json.orientation as Orientation;
|
||||
const width = json.width as number;
|
||||
const height = json.height as number;
|
||||
const orientation = json.orientation;
|
||||
const width = json.width;
|
||||
const height = json.height;
|
||||
const box: Box = { top: 0, left: 0, width, height };
|
||||
|
||||
const root = SerializableGrid.deserializeNode(json.root, orientation, box, deserializer) as GridBranchNode<T>;
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as dom from 'vs/base/browser/dom';
|
||||
import * as objects from 'vs/base/common/objects';
|
||||
import { renderOcticons } from 'vs/base/browser/ui/octiconLabel/octiconLabel';
|
||||
import { escape } from 'vs/base/common/strings';
|
||||
|
@ -54,10 +53,9 @@ export class HighlightedLabel {
|
|||
this.render();
|
||||
}
|
||||
|
||||
private render() {
|
||||
dom.clearNode(this.domNode);
|
||||
private render(): void {
|
||||
|
||||
let htmlContent: string[] = [];
|
||||
let htmlContent = '';
|
||||
let pos = 0;
|
||||
|
||||
for (const highlight of this.highlights) {
|
||||
|
@ -65,27 +63,27 @@ export class HighlightedLabel {
|
|||
continue;
|
||||
}
|
||||
if (pos < highlight.start) {
|
||||
htmlContent.push('<span>');
|
||||
htmlContent += '<span>';
|
||||
const substring = this.text.substring(pos, highlight.start);
|
||||
htmlContent.push(this.supportOcticons ? renderOcticons(substring) : escape(substring));
|
||||
htmlContent.push('</span>');
|
||||
htmlContent += this.supportOcticons ? renderOcticons(substring) : escape(substring);
|
||||
htmlContent += '</span>';
|
||||
pos = highlight.end;
|
||||
}
|
||||
htmlContent.push('<span class="highlight">');
|
||||
htmlContent += '<span class="highlight">';
|
||||
const substring = this.text.substring(highlight.start, highlight.end);
|
||||
htmlContent.push(this.supportOcticons ? renderOcticons(substring) : escape(substring));
|
||||
htmlContent.push('</span>');
|
||||
htmlContent += this.supportOcticons ? renderOcticons(substring) : escape(substring);
|
||||
htmlContent += '</span>';
|
||||
pos = highlight.end;
|
||||
}
|
||||
|
||||
if (pos < this.text.length) {
|
||||
htmlContent.push('<span>');
|
||||
htmlContent += '<span>';
|
||||
const substring = this.text.substring(pos);
|
||||
htmlContent.push(this.supportOcticons ? renderOcticons(substring) : escape(substring));
|
||||
htmlContent.push('</span>');
|
||||
htmlContent += this.supportOcticons ? renderOcticons(substring) : escape(substring);
|
||||
htmlContent += '</span>';
|
||||
}
|
||||
|
||||
this.domNode.innerHTML = htmlContent.join('');
|
||||
this.domNode.innerHTML = htmlContent;
|
||||
this.domNode.title = this.title;
|
||||
this.didEverRender = true;
|
||||
}
|
||||
|
|
|
@ -33,7 +33,7 @@ export interface KeybindingLabelOptions {
|
|||
export class KeybindingLabel {
|
||||
|
||||
private domNode: HTMLElement;
|
||||
private keybinding: ResolvedKeybinding | null | undefined;
|
||||
private keybinding: ResolvedKeybinding | undefined;
|
||||
private matches: Matches | undefined;
|
||||
private didEverRender: boolean;
|
||||
|
||||
|
@ -47,7 +47,7 @@ export class KeybindingLabel {
|
|||
return this.domNode;
|
||||
}
|
||||
|
||||
set(keybinding: ResolvedKeybinding | null | undefined, matches?: Matches) {
|
||||
set(keybinding: ResolvedKeybinding | undefined, matches?: Matches) {
|
||||
if (this.didEverRender && this.keybinding === keybinding && KeybindingLabel.areSame(this.matches, matches)) {
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -84,7 +84,7 @@ export function consolidate(groups: IRangedGroup[]): IRangedGroup[] {
|
|||
* collection.
|
||||
*/
|
||||
function concat(...groups: IRangedGroup[][]): IRangedGroup[] {
|
||||
return consolidate(groups.reduce((r, g) => r.concat(g), [] as IRangedGroup[]));
|
||||
return consolidate(groups.reduce((r, g) => r.concat(g), []));
|
||||
}
|
||||
|
||||
export class RangeMap {
|
||||
|
|
|
@ -10,6 +10,7 @@ import { Color } from 'vs/base/common/color';
|
|||
import { mixin } from 'vs/base/common/objects';
|
||||
import { removeClasses, addClass, hasClass, addClasses, removeClass, hide, show } from 'vs/base/browser/dom';
|
||||
import { RunOnceScheduler } from 'vs/base/common/async';
|
||||
import { isNumber } from 'vs/base/common/types';
|
||||
|
||||
const css_done = 'done';
|
||||
const css_active = 'active';
|
||||
|
@ -146,7 +147,7 @@ export class ProgressBar extends Disposable {
|
|||
* Finds out if this progress bar is configured with total work
|
||||
*/
|
||||
hasTotal(): boolean {
|
||||
return !isNaN(this.totalWork as number);
|
||||
return isNumber(this.totalWork);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -172,10 +173,11 @@ export class ProgressBar extends Disposable {
|
|||
}
|
||||
|
||||
private doSetWorked(value: number): ProgressBar {
|
||||
assert.ok(!isNaN(this.totalWork as number), 'Total work not set');
|
||||
assert.ok(isNumber(this.totalWork), 'Total work not set');
|
||||
const totalWork = this.totalWork!;
|
||||
|
||||
this.workedVal = value;
|
||||
this.workedVal = Math.min(this.totalWork as number, this.workedVal);
|
||||
this.workedVal = Math.min(totalWork, this.workedVal);
|
||||
|
||||
if (hasClass(this.element, css_infinite)) {
|
||||
removeClass(this.element, css_infinite);
|
||||
|
@ -193,7 +195,7 @@ export class ProgressBar extends Disposable {
|
|||
addClass(this.element, css_discrete);
|
||||
}
|
||||
|
||||
this.bit.style.width = 100 * (this.workedVal / (this.totalWork as number)) + '%';
|
||||
this.bit.style.width = 100 * (this.workedVal / (totalWork)) + '%';
|
||||
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -818,18 +818,32 @@ class Trait<T> {
|
|||
return;
|
||||
}
|
||||
|
||||
const identityProvider = this.identityProvider;
|
||||
const nodesByIdentity = new Map<string, ITreeNode<T, any>>();
|
||||
this.nodes.forEach(node => nodesByIdentity.set(identityProvider.getId(node.element).toString(), node));
|
||||
const deletedNodesIdSet = new Set<string>();
|
||||
const deletedNodesVisitor = (node: ITreeNode<T, any>) => deletedNodesIdSet.add(this.identityProvider!.getId(node.element).toString());
|
||||
deletedNodes.forEach(node => dfs(node, deletedNodesVisitor));
|
||||
|
||||
const toDeleteByIdentity = new Map<string, ITreeNode<T, any>>();
|
||||
const toRemoveSetter = (node: ITreeNode<T, any>) => toDeleteByIdentity.set(identityProvider.getId(node.element).toString(), node);
|
||||
const toRemoveDeleter = (node: { element: T; }) => toDeleteByIdentity.delete(identityProvider.getId(node.element).toString());
|
||||
deletedNodes.forEach(node => dfs(node, toRemoveSetter));
|
||||
insertedNodes.forEach(node => dfs(node, toRemoveDeleter));
|
||||
const insertedNodesMap = new Map<string, ITreeNode<T, any>>();
|
||||
const insertedNodesVisitor = (node: ITreeNode<T, any>) => insertedNodesMap.set(this.identityProvider!.getId(node.element).toString(), node);
|
||||
insertedNodes.forEach(node => dfs(node, insertedNodesVisitor));
|
||||
|
||||
toDeleteByIdentity.forEach((_, id) => nodesByIdentity.delete(id));
|
||||
this.set(values(nodesByIdentity));
|
||||
const nodes: ITreeNode<T, any>[] = [];
|
||||
|
||||
for (const node of this.nodes) {
|
||||
const id = this.identityProvider.getId(node.element).toString();
|
||||
const wasDeleted = deletedNodesIdSet.has(id);
|
||||
|
||||
if (!wasDeleted) {
|
||||
nodes.push(node);
|
||||
} else {
|
||||
const insertedNode = insertedNodesMap.get(id);
|
||||
|
||||
if (insertedNode) {
|
||||
nodes.push(insertedNode);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.set(nodes);
|
||||
}
|
||||
|
||||
private createNodeSet(): Set<ITreeNode<T, any>> {
|
||||
|
|
|
@ -659,19 +659,6 @@ export class RunOnceWorker<T> extends RunOnceScheduler {
|
|||
}
|
||||
}
|
||||
|
||||
export function nfcall(fn: Function, ...args: any[]): Promise<any>;
|
||||
export function nfcall<T>(fn: Function, ...args: any[]): Promise<T>;
|
||||
export function nfcall(fn: Function, ...args: any[]): any {
|
||||
return new Promise((c, e) => fn(...args, (err: any, result: any) => err ? e(err) : c(result)));
|
||||
}
|
||||
|
||||
export function ninvoke(thisArg: any, fn: Function, ...args: any[]): Promise<any>;
|
||||
export function ninvoke<T>(thisArg: any, fn: Function, ...args: any[]): Promise<T>;
|
||||
export function ninvoke(thisArg: any, fn: Function, ...args: any[]): any {
|
||||
return new Promise((resolve, reject) => fn.call(thisArg, ...args, (err: any, result: any) => err ? reject(err) : resolve(result)));
|
||||
}
|
||||
|
||||
|
||||
//#region -- run on idle tricks ------------
|
||||
|
||||
export interface IdleDeadline {
|
||||
|
|
|
@ -6,6 +6,9 @@
|
|||
declare var Buffer: any;
|
||||
const hasBuffer = (typeof Buffer !== 'undefined');
|
||||
|
||||
let textEncoder: TextEncoder | null;
|
||||
let textDecoder: TextDecoder | null;
|
||||
|
||||
export class VSBuffer {
|
||||
|
||||
public static alloc(byteLength: number): VSBuffer {
|
||||
|
@ -21,7 +24,14 @@ export class VSBuffer {
|
|||
}
|
||||
|
||||
public static fromString(source: string): VSBuffer {
|
||||
return new VSBuffer(Buffer.from(source));
|
||||
if (hasBuffer) {
|
||||
return new VSBuffer(Buffer.from(source));
|
||||
} else {
|
||||
if (!textEncoder) {
|
||||
textEncoder = new TextEncoder();
|
||||
}
|
||||
return new VSBuffer(textEncoder.encode(source));
|
||||
}
|
||||
}
|
||||
|
||||
public static concat(buffers: VSBuffer[], totalLength?: number): VSBuffer {
|
||||
|
@ -52,7 +62,14 @@ export class VSBuffer {
|
|||
}
|
||||
|
||||
public toString(): string {
|
||||
return this.buffer.toString();
|
||||
if (hasBuffer) {
|
||||
return this.buffer.toString();
|
||||
} else {
|
||||
if (!textDecoder) {
|
||||
textDecoder = new TextDecoder();
|
||||
}
|
||||
return textDecoder.decode(this.buffer);
|
||||
}
|
||||
}
|
||||
|
||||
public slice(start?: number, end?: number): VSBuffer {
|
||||
|
|
|
@ -87,7 +87,12 @@ class MutableToken implements CancellationToken {
|
|||
|
||||
export class CancellationTokenSource {
|
||||
|
||||
private _token?: CancellationToken;
|
||||
private _token?: CancellationToken = undefined;
|
||||
private _parentListener?: IDisposable = undefined;
|
||||
|
||||
constructor(parent?: CancellationToken) {
|
||||
this._parentListener = parent && parent.onCancellationRequested(this.cancel, this);
|
||||
}
|
||||
|
||||
get token(): CancellationToken {
|
||||
if (!this._token) {
|
||||
|
@ -112,6 +117,9 @@ export class CancellationTokenSource {
|
|||
}
|
||||
|
||||
dispose(): void {
|
||||
if (this._parentListener) {
|
||||
this._parentListener.dispose();
|
||||
}
|
||||
if (!this._token) {
|
||||
// ensure to initialize with an empty token if we had none
|
||||
this._token = CancellationToken.None;
|
||||
|
|
|
@ -130,7 +130,7 @@ export namespace Event {
|
|||
* @param leading Whether the event should fire in the leading phase of the timeout.
|
||||
* @param leakWarningThreshold The leak warning threshold override.
|
||||
*/
|
||||
export function debounce<T>(event: Event<T>, merge: (last: T, event: T) => T, delay?: number, leading?: boolean, leakWarningThreshold?: number): Event<T>;
|
||||
export function debounce<T>(event: Event<T>, merge: (last: T | undefined, event: T) => T, delay?: number, leading?: boolean, leakWarningThreshold?: number): Event<T>;
|
||||
export function debounce<I, O>(event: Event<I>, merge: (last: O | undefined, event: I) => O, delay?: number, leading?: boolean, leakWarningThreshold?: number): Event<O>;
|
||||
export function debounce<I, O>(event: Event<I>, merge: (last: O | undefined, event: I) => O, delay: number = 100, leading = false, leakWarningThreshold?: number): Event<O> {
|
||||
|
||||
|
@ -488,7 +488,7 @@ export class Emitter<T> {
|
|||
private readonly _leakageMon?: LeakageMonitor;
|
||||
private _disposed: boolean = false;
|
||||
private _event?: Event<T>;
|
||||
private _deliveryQueue: [Listener<T>, T][];
|
||||
private _deliveryQueue?: LinkedList<[Listener<T>, T]>;
|
||||
protected _listeners?: LinkedList<Listener<T>>;
|
||||
|
||||
constructor(options?: EmitterOptions) {
|
||||
|
@ -570,14 +570,14 @@ export class Emitter<T> {
|
|||
// the driver of this
|
||||
|
||||
if (!this._deliveryQueue) {
|
||||
this._deliveryQueue = [];
|
||||
this._deliveryQueue = new LinkedList();
|
||||
}
|
||||
|
||||
for (let iter = this._listeners.iterator(), e = iter.next(); !e.done; e = iter.next()) {
|
||||
this._deliveryQueue.push([e.value, event]);
|
||||
}
|
||||
|
||||
while (this._deliveryQueue.length > 0) {
|
||||
while (this._deliveryQueue.size > 0) {
|
||||
const [listener, event] = this._deliveryQueue.shift()!;
|
||||
try {
|
||||
if (typeof listener === 'function') {
|
||||
|
@ -594,10 +594,10 @@ export class Emitter<T> {
|
|||
|
||||
dispose() {
|
||||
if (this._listeners) {
|
||||
this._listeners = undefined;
|
||||
this._listeners.clear();
|
||||
}
|
||||
if (this._deliveryQueue) {
|
||||
this._deliveryQueue.length = 0;
|
||||
this._deliveryQueue.clear();
|
||||
}
|
||||
if (this._leakageMon) {
|
||||
this._leakageMon.dispose();
|
||||
|
@ -606,6 +606,51 @@ export class Emitter<T> {
|
|||
}
|
||||
}
|
||||
|
||||
export class PauseableEmitter<T> extends Emitter<T> {
|
||||
|
||||
private _isPaused = 0;
|
||||
private _eventQueue = new LinkedList<T>();
|
||||
private _mergeFn?: (input: T[]) => T;
|
||||
|
||||
constructor(options?: EmitterOptions & { merge?: (input: T[]) => T }) {
|
||||
super(options);
|
||||
this._mergeFn = options && options.merge;
|
||||
}
|
||||
|
||||
pause(): void {
|
||||
this._isPaused++;
|
||||
}
|
||||
|
||||
resume(): void {
|
||||
if (this._isPaused !== 0 && --this._isPaused === 0) {
|
||||
if (this._mergeFn) {
|
||||
// use the merge function to create a single composite
|
||||
// event. make a copy in case firing pauses this emitter
|
||||
const events = this._eventQueue.toArray();
|
||||
this._eventQueue.clear();
|
||||
super.fire(this._mergeFn(events));
|
||||
|
||||
} else {
|
||||
// no merging, fire each event individually and test
|
||||
// that this emitter isn't paused halfway through
|
||||
while (!this._isPaused && this._eventQueue.size !== 0) {
|
||||
super.fire(this._eventQueue.shift()!);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fire(event: T): void {
|
||||
if (this._listeners) {
|
||||
if (this._isPaused !== 0) {
|
||||
this._eventQueue.push(event);
|
||||
} else {
|
||||
super.fire(event);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export interface IWaitUntil {
|
||||
waitUntil(thenable: Promise<any>): void;
|
||||
}
|
||||
|
|
|
@ -4,9 +4,9 @@
|
|||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { isWindows } from 'vs/base/common/platform';
|
||||
import { startsWithIgnoreCase, equalsIgnoreCase } from 'vs/base/common/strings';
|
||||
import { startsWithIgnoreCase, equalsIgnoreCase, endsWith, rtrim } from 'vs/base/common/strings';
|
||||
import { CharCode } from 'vs/base/common/charCode';
|
||||
import { sep, posix } from 'vs/base/common/path';
|
||||
import { sep, posix, isAbsolute, join, normalize } from 'vs/base/common/path';
|
||||
|
||||
function isPathSeparator(code: number) {
|
||||
return code === CharCode.Slash || code === CharCode.Backslash;
|
||||
|
@ -227,4 +227,56 @@ export function isEqualOrParent(path: string, candidate: string, ignoreCase?: bo
|
|||
|
||||
export function isWindowsDriveLetter(char0: number): boolean {
|
||||
return char0 >= CharCode.A && char0 <= CharCode.Z || char0 >= CharCode.a && char0 <= CharCode.z;
|
||||
}
|
||||
|
||||
export function sanitizeFilePath(candidate: string, cwd: string): string {
|
||||
|
||||
// Special case: allow to open a drive letter without trailing backslash
|
||||
if (isWindows && endsWith(candidate, ':')) {
|
||||
candidate += sep;
|
||||
}
|
||||
|
||||
// Ensure absolute
|
||||
if (!isAbsolute(candidate)) {
|
||||
candidate = join(cwd, candidate);
|
||||
}
|
||||
|
||||
// Ensure normalized
|
||||
candidate = normalize(candidate);
|
||||
|
||||
// Ensure no trailing slash/backslash
|
||||
if (isWindows) {
|
||||
candidate = rtrim(candidate, sep);
|
||||
|
||||
// Special case: allow to open drive root ('C:\')
|
||||
if (endsWith(candidate, ':')) {
|
||||
candidate += sep;
|
||||
}
|
||||
|
||||
} else {
|
||||
candidate = rtrim(candidate, sep);
|
||||
|
||||
// Special case: allow to open root ('/')
|
||||
if (!candidate) {
|
||||
candidate = sep;
|
||||
}
|
||||
}
|
||||
|
||||
return candidate;
|
||||
}
|
||||
|
||||
export function isRootOrDriveLetter(path: string): boolean {
|
||||
const pathNormalized = normalize(path);
|
||||
|
||||
if (isWindows) {
|
||||
if (path.length > 3) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return isWindowsDriveLetter(pathNormalized.charCodeAt(0))
|
||||
&& pathNormalized.charCodeAt(1) === CharCode.Colon
|
||||
&& (path.length === 2 || pathNormalized.charCodeAt(2) === CharCode.Backslash);
|
||||
}
|
||||
|
||||
return pathNormalized === posix.sep;
|
||||
}
|
|
@ -12,7 +12,7 @@ import { CharCode } from 'vs/base/common/charCode';
|
|||
import { isThenable } from 'vs/base/common/async';
|
||||
|
||||
export interface IExpression {
|
||||
[pattern: string]: boolean | SiblingClause | any;
|
||||
[pattern: string]: boolean | SiblingClause;
|
||||
}
|
||||
|
||||
export interface IRelativePattern {
|
||||
|
@ -429,7 +429,7 @@ function toRegExp(pattern: string): ParsedStringPattern {
|
|||
*/
|
||||
export function match(pattern: string | IRelativePattern, path: string): boolean;
|
||||
export function match(expression: IExpression, path: string, hasSibling?: (name: string) => boolean): string /* the matching pattern */;
|
||||
export function match(arg1: string | IExpression | IRelativePattern, path: string, hasSibling?: (name: string) => boolean): any {
|
||||
export function match(arg1: string | IExpression | IRelativePattern, path: string, hasSibling?: (name: string) => boolean): boolean | string | null | Promise<string | null> {
|
||||
if (!arg1 || typeof path !== 'string') {
|
||||
return false;
|
||||
}
|
||||
|
@ -447,14 +447,14 @@ export function match(arg1: string | IExpression | IRelativePattern, path: strin
|
|||
*/
|
||||
export function parse(pattern: string | IRelativePattern, options?: IGlobOptions): ParsedPattern;
|
||||
export function parse(expression: IExpression, options?: IGlobOptions): ParsedExpression;
|
||||
export function parse(arg1: string | IExpression | IRelativePattern, options: IGlobOptions = {}): any {
|
||||
export function parse(arg1: string | IExpression | IRelativePattern, options: IGlobOptions = {}): ParsedPattern | ParsedExpression {
|
||||
if (!arg1) {
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
// Glob with String
|
||||
if (typeof arg1 === 'string' || isRelativePattern(arg1)) {
|
||||
const parsedPattern = parsePattern(arg1 as string | IRelativePattern, options);
|
||||
const parsedPattern = parsePattern(arg1, options);
|
||||
if (parsedPattern === NULL) {
|
||||
return FALSE;
|
||||
}
|
||||
|
@ -512,23 +512,12 @@ function listToMap(list: string[]) {
|
|||
return map;
|
||||
}
|
||||
|
||||
export function isRelativePattern(obj: any): obj is IRelativePattern {
|
||||
export function isRelativePattern(obj: unknown): obj is IRelativePattern {
|
||||
const rp = obj as IRelativePattern;
|
||||
|
||||
return rp && typeof rp.base === 'string' && typeof rp.pattern === 'string';
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as `parse`, but the ParsedExpression is guaranteed to return a Promise
|
||||
*/
|
||||
export function parseToAsync(expression: IExpression, options?: IGlobOptions): ParsedExpression {
|
||||
const parsedExpression = parse(expression, options);
|
||||
return (path: string, basename?: string, hasSibling?: (name: string) => boolean | Promise<boolean>): string | null | Promise<string | null> => {
|
||||
const result = parsedExpression(path, basename, hasSibling);
|
||||
return isThenable(result) ? result : Promise.resolve(result);
|
||||
};
|
||||
}
|
||||
|
||||
export function getBasenameTerms(patternOrExpression: ParsedPattern | ParsedExpression): string[] {
|
||||
return (<ParsedStringPattern>patternOrExpression).allBasenames || [];
|
||||
}
|
||||
|
@ -613,7 +602,7 @@ function parsedExpression(expression: IExpression, options: IGlobOptions): Parse
|
|||
return resultExpression;
|
||||
}
|
||||
|
||||
function parseExpressionPattern(pattern: string, value: any, options: IGlobOptions): (ParsedStringPattern | ParsedExpressionPattern) {
|
||||
function parseExpressionPattern(pattern: string, value: boolean | SiblingClause, options: IGlobOptions): (ParsedStringPattern | ParsedExpressionPattern) {
|
||||
if (value === false) {
|
||||
return NULL; // pattern is disabled
|
||||
}
|
||||
|
|
|
@ -6,19 +6,24 @@
|
|||
import { Iterator, IteratorResult, FIN } from 'vs/base/common/iterator';
|
||||
|
||||
class Node<E> {
|
||||
|
||||
static readonly Undefined = new Node<any>(undefined);
|
||||
|
||||
element: E;
|
||||
next: Node<E> | undefined;
|
||||
prev: Node<E> | undefined;
|
||||
next: Node<E>;
|
||||
prev: Node<E>;
|
||||
|
||||
constructor(element: E) {
|
||||
this.element = element;
|
||||
this.next = Node.Undefined;
|
||||
this.prev = Node.Undefined;
|
||||
}
|
||||
}
|
||||
|
||||
export class LinkedList<E> {
|
||||
|
||||
private _first: Node<E> | undefined;
|
||||
private _last: Node<E> | undefined;
|
||||
private _first: Node<E> = Node.Undefined;
|
||||
private _last: Node<E> = Node.Undefined;
|
||||
private _size: number = 0;
|
||||
|
||||
get size(): number {
|
||||
|
@ -26,12 +31,12 @@ export class LinkedList<E> {
|
|||
}
|
||||
|
||||
isEmpty(): boolean {
|
||||
return !this._first;
|
||||
return this._first === Node.Undefined;
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
this._first = undefined;
|
||||
this._last = undefined;
|
||||
this._first = Node.Undefined;
|
||||
this._last = Node.Undefined;
|
||||
this._size = 0;
|
||||
}
|
||||
|
||||
|
@ -45,7 +50,7 @@ export class LinkedList<E> {
|
|||
|
||||
private _insert(element: E, atTheEnd: boolean): () => void {
|
||||
const newNode = new Node(element);
|
||||
if (!this._first) {
|
||||
if (this._first === Node.Undefined) {
|
||||
this._first = newNode;
|
||||
this._last = newNode;
|
||||
|
||||
|
@ -64,12 +69,18 @@ export class LinkedList<E> {
|
|||
oldFirst.prev = newNode;
|
||||
}
|
||||
this._size += 1;
|
||||
return this._remove.bind(this, newNode);
|
||||
|
||||
let didRemove = false;
|
||||
return () => {
|
||||
if (!didRemove) {
|
||||
didRemove = true;
|
||||
this._remove(newNode);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
shift(): E | undefined {
|
||||
if (!this._first) {
|
||||
if (this._first === Node.Undefined) {
|
||||
return undefined;
|
||||
} else {
|
||||
const res = this._first.element;
|
||||
|
@ -79,7 +90,7 @@ export class LinkedList<E> {
|
|||
}
|
||||
|
||||
pop(): E | undefined {
|
||||
if (!this._last) {
|
||||
if (this._last === Node.Undefined) {
|
||||
return undefined;
|
||||
} else {
|
||||
const res = this._last.element;
|
||||
|
@ -89,38 +100,30 @@ export class LinkedList<E> {
|
|||
}
|
||||
|
||||
private _remove(node: Node<E>): void {
|
||||
let candidate: Node<E> | undefined = this._first;
|
||||
while (candidate instanceof Node) {
|
||||
if (candidate !== node) {
|
||||
candidate = candidate.next;
|
||||
continue;
|
||||
}
|
||||
if (candidate.prev && candidate.next) {
|
||||
// middle
|
||||
const anchor = candidate.prev;
|
||||
anchor.next = candidate.next;
|
||||
candidate.next.prev = anchor;
|
||||
if (node.prev !== Node.Undefined && node.next !== Node.Undefined) {
|
||||
// middle
|
||||
const anchor = node.prev;
|
||||
anchor.next = node.next;
|
||||
node.next.prev = anchor;
|
||||
|
||||
} else if (!candidate.prev && !candidate.next) {
|
||||
// only node
|
||||
this._first = undefined;
|
||||
this._last = undefined;
|
||||
} else if (node.prev === Node.Undefined && node.next === Node.Undefined) {
|
||||
// only node
|
||||
this._first = Node.Undefined;
|
||||
this._last = Node.Undefined;
|
||||
|
||||
} else if (!candidate.next) {
|
||||
// last
|
||||
this._last = this._last!.prev!;
|
||||
this._last.next = undefined;
|
||||
} else if (node.next === Node.Undefined) {
|
||||
// last
|
||||
this._last = this._last!.prev!;
|
||||
this._last.next = Node.Undefined;
|
||||
|
||||
} else if (!candidate.prev) {
|
||||
// first
|
||||
this._first = this._first!.next!;
|
||||
this._first.prev = undefined;
|
||||
}
|
||||
|
||||
// done
|
||||
this._size -= 1;
|
||||
break;
|
||||
} else if (node.prev === Node.Undefined) {
|
||||
// first
|
||||
this._first = this._first!.next!;
|
||||
this._first.prev = Node.Undefined;
|
||||
}
|
||||
|
||||
// done
|
||||
this._size -= 1;
|
||||
}
|
||||
|
||||
iterator(): Iterator<E> {
|
||||
|
@ -128,7 +131,7 @@ export class LinkedList<E> {
|
|||
let node = this._first;
|
||||
return {
|
||||
next(): IteratorResult<E> {
|
||||
if (!node) {
|
||||
if (node === Node.Undefined) {
|
||||
return FIN;
|
||||
}
|
||||
|
||||
|
@ -145,7 +148,7 @@ export class LinkedList<E> {
|
|||
|
||||
toArray(): E[] {
|
||||
const result: E[] = [];
|
||||
for (let node = this._first; node instanceof Node; node = node.next) {
|
||||
for (let node = this._first; node !== Node.Undefined; node = node.next) {
|
||||
result.push(node.element);
|
||||
}
|
||||
return result;
|
||||
|
|
|
@ -175,34 +175,6 @@ export function equals(one: any, other: any): boolean {
|
|||
return true;
|
||||
}
|
||||
|
||||
function arrayToHash(array: string[]): { [name: string]: true } {
|
||||
const result: any = {};
|
||||
for (const e of array) {
|
||||
result[e] = true;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given an array of strings, returns a function which, given a string
|
||||
* returns true or false whether the string is in that array.
|
||||
*/
|
||||
export function createKeywordMatcher(arr: string[], caseInsensitive: boolean = false): (str: string) => boolean {
|
||||
if (caseInsensitive) {
|
||||
arr = arr.map(function (x) { return x.toLowerCase(); });
|
||||
}
|
||||
const hash = arrayToHash(arr);
|
||||
if (caseInsensitive) {
|
||||
return function (word) {
|
||||
return hash[word.toLowerCase()] !== undefined && hash.hasOwnProperty(word.toLowerCase());
|
||||
};
|
||||
} else {
|
||||
return function (word) {
|
||||
return hash[word] !== undefined && hash.hasOwnProperty(word);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls JSON.Stringify with a replacer to break apart any circular references.
|
||||
* This prevents JSON.stringify from throwing the exception
|
||||
|
|
|
@ -87,6 +87,17 @@ export const enum TerminateResponseCode {
|
|||
ProcessNotFound = 3,
|
||||
}
|
||||
|
||||
export interface ProcessItem {
|
||||
name: string;
|
||||
cmd: string;
|
||||
pid: number;
|
||||
ppid: number;
|
||||
load: number;
|
||||
mem: number;
|
||||
|
||||
children?: ProcessItem[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitizes a VS Code process environment by removing all Electron/VS Code-related values.
|
||||
*/
|
||||
|
|
|
@ -159,27 +159,6 @@ export function validateConstraint(arg: any, constraint: TypeConstraint | undefi
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new object of the provided class and will call the constructor with
|
||||
* any additional argument supplied.
|
||||
*/
|
||||
export function create(ctor: Function, ...args: any[]): any {
|
||||
if (isNativeClass(ctor)) {
|
||||
return new (ctor as any)(...args);
|
||||
} else {
|
||||
const obj = Object.create(ctor.prototype);
|
||||
ctor.apply(obj, args);
|
||||
return obj;
|
||||
}
|
||||
}
|
||||
|
||||
// https://stackoverflow.com/a/32235645/1499159
|
||||
function isNativeClass(thing: any): boolean {
|
||||
return typeof thing === 'function'
|
||||
&& thing.hasOwnProperty('prototype')
|
||||
&& !thing.hasOwnProperty('arguments');
|
||||
}
|
||||
|
||||
export function getAllPropertyNames(obj: object): string[] {
|
||||
let res: string[] = [];
|
||||
let proto = Object.getPrototypeOf(obj);
|
||||
|
@ -202,4 +181,4 @@ export function withNullAsUndefined<T>(x: T | null): T | undefined {
|
|||
*/
|
||||
export function withUndefinedAsNull<T>(x: T | undefined): T | null {
|
||||
return typeof x === 'undefined' ? null : x;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,112 +0,0 @@
|
|||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
export function toUint8ArrayBuffer(str: string): ArrayBuffer {
|
||||
|
||||
if (typeof TextEncoder !== 'undefined') {
|
||||
return new TextEncoder().encode(str).buffer;
|
||||
}
|
||||
|
||||
let i: number, len: number, length = 0, charCode = 0, trailCharCode = 0, codepoint = 0;
|
||||
|
||||
// First pass, for the size
|
||||
for (i = 0, len = str.length; i < len; i++) {
|
||||
charCode = str.charCodeAt(i);
|
||||
|
||||
// Surrogate pair
|
||||
if (charCode >= 0xD800 && charCode < 0xDC00) {
|
||||
trailCharCode = str.charCodeAt(++i);
|
||||
|
||||
if (!(trailCharCode >= 0xDC00 && trailCharCode < 0xE000)) {
|
||||
throw new Error('Invalid char code');
|
||||
}
|
||||
|
||||
// Code point can be obtained by subtracting 0xD800 and 0xDC00 from both char codes respectively
|
||||
// and joining the 10 least significant bits from each, finally adding 0x10000.
|
||||
codepoint = ((((charCode - 0xD800) & 0x3FF) << 10) | ((trailCharCode - 0xDC00) & 0x3FF)) + 0x10000;
|
||||
|
||||
} else {
|
||||
codepoint = charCode;
|
||||
}
|
||||
|
||||
length += byteSizeInUTF8(codepoint);
|
||||
}
|
||||
|
||||
let result = new ArrayBuffer(length);
|
||||
let view = new Uint8Array(result);
|
||||
let pos = 0;
|
||||
|
||||
// Second pass, for the data
|
||||
for (i = 0, len = str.length; i < len; i++) {
|
||||
charCode = str.charCodeAt(i);
|
||||
|
||||
if (charCode >= 0xD800 && charCode < 0xDC00) {
|
||||
trailCharCode = str.charCodeAt(++i);
|
||||
codepoint = ((((charCode - 0xD800) & 0x3FF) << 10) | ((trailCharCode - 0xDC00) & 0x3FF)) + 0x10000;
|
||||
} else {
|
||||
codepoint = charCode;
|
||||
}
|
||||
|
||||
pos += writeUTF8(codepoint, view, pos);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function byteSizeInUTF8(codePoint: number): number {
|
||||
codePoint = codePoint >>> 0;
|
||||
|
||||
if (codePoint < 0x80) {
|
||||
return 1;
|
||||
} else if (codePoint < 0x800) {
|
||||
return 2;
|
||||
} else if (codePoint < 0x10000) {
|
||||
return 3;
|
||||
} else if (codePoint < 0x200000) {
|
||||
return 4;
|
||||
} else if (codePoint < 0x4000000) {
|
||||
return 5;
|
||||
} else if (codePoint < 0x80000000) {
|
||||
return 6;
|
||||
} else {
|
||||
throw new Error('Code point 0x' + toHexString(codePoint) + ' not encodable in UTF8.');
|
||||
}
|
||||
}
|
||||
|
||||
function writeUTF8(codePoint: number, buffer: Uint8Array, pos: number): number {
|
||||
|
||||
// How many bits needed for codePoint
|
||||
let byteSize = byteSizeInUTF8(codePoint);
|
||||
|
||||
// 0xxxxxxx
|
||||
if (byteSize === 1) {
|
||||
buffer[pos] = codePoint;
|
||||
return 1;
|
||||
}
|
||||
|
||||
// 110xxxxx 10xxxxxx
|
||||
// 1110xxxx 10xxxxxx 10xxxxxx
|
||||
// 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx
|
||||
// 111110xx 10xxxxxx 10xxxxxx 10xxxxxx 10xxxxxx
|
||||
// 1111110x 10xxxxxx 10xxxxxx 10xxxxxx 10xxxxxx 10xxxxxx
|
||||
|
||||
// first byte
|
||||
buffer[pos] = ((0xFC << (6 - byteSize)) | (codePoint >>> (6 * (byteSize - 1)))) & 0xFF;
|
||||
|
||||
// successive bytes
|
||||
for (let i = 1; i < byteSize; i++) {
|
||||
buffer[pos + i] = (0x80 | (0x3F & (codePoint >>> (6 * (byteSize - i - 1))))) & 0xFF;
|
||||
}
|
||||
|
||||
return byteSize;
|
||||
}
|
||||
|
||||
function leftPad(value: string, length: number, char: string = '0'): string {
|
||||
return new Array(length - value.length + 1).join(char) + value;
|
||||
}
|
||||
|
||||
function toHexString(value: number, bitsize: number = 32): string {
|
||||
return leftPad((value >>> 0).toString(16), bitsize / 4);
|
||||
}
|
|
@ -4,13 +4,13 @@
|
|||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import { dirname, basename } from 'vs/base/common/path';
|
||||
import { dirname } from 'vs/base/common/path';
|
||||
import * as objects from 'vs/base/common/objects';
|
||||
import { IDisposable, dispose } from 'vs/base/common/lifecycle';
|
||||
import { Event, Emitter } from 'vs/base/common/event';
|
||||
import * as json from 'vs/base/common/json';
|
||||
import * as extfs from 'vs/base/node/extfs';
|
||||
import { isWindows } from 'vs/base/common/platform';
|
||||
import { readlink, statLink } from 'vs/base/node/pfs';
|
||||
import { watchFolder, watchFile } from 'vs/base/node/watcher';
|
||||
|
||||
export interface IConfigurationChangeEvent<T> {
|
||||
config: T;
|
||||
|
@ -48,11 +48,9 @@ export class ConfigWatcher<T> implements IConfigWatcher<T>, IDisposable {
|
|||
private timeoutHandle: NodeJS.Timer | null;
|
||||
private disposables: IDisposable[];
|
||||
private readonly _onDidUpdateConfiguration: Emitter<IConfigurationChangeEvent<T>>;
|
||||
private configName: string;
|
||||
|
||||
constructor(private _path: string, private options: IConfigOptions<T> = { defaultConfig: Object.create(null), onError: error => console.error(error) }) {
|
||||
this.disposables = [];
|
||||
this.configName = basename(this._path);
|
||||
|
||||
this._onDidUpdateConfiguration = new Emitter<IConfigurationChangeEvent<T>>();
|
||||
this.disposables.push(this._onDidUpdateConfiguration);
|
||||
|
@ -61,15 +59,15 @@ export class ConfigWatcher<T> implements IConfigWatcher<T>, IDisposable {
|
|||
this.initAsync();
|
||||
}
|
||||
|
||||
public get path(): string {
|
||||
get path(): string {
|
||||
return this._path;
|
||||
}
|
||||
|
||||
public get hasParseErrors(): boolean {
|
||||
get hasParseErrors(): boolean {
|
||||
return this.parseErrors && this.parseErrors.length > 0;
|
||||
}
|
||||
|
||||
public get onDidUpdateConfiguration(): Event<IConfigurationChangeEvent<T>> {
|
||||
get onDidUpdateConfiguration(): Event<IConfigurationChangeEvent<T>> {
|
||||
return this._onDidUpdateConfiguration.event;
|
||||
}
|
||||
|
||||
|
@ -126,50 +124,31 @@ export class ConfigWatcher<T> implements IConfigWatcher<T>, IDisposable {
|
|||
this.watch(parentFolder, true);
|
||||
|
||||
// Check if the path is a symlink and watch its target if so
|
||||
fs.lstat(this._path, (err, stat) => {
|
||||
if (err || stat.isDirectory()) {
|
||||
return; // path is not a valid file
|
||||
}
|
||||
|
||||
// We found a symlink
|
||||
if (stat.isSymbolicLink()) {
|
||||
fs.readlink(this._path, (err, realPath) => {
|
||||
if (err) {
|
||||
return; // path is not a valid symlink
|
||||
}
|
||||
|
||||
this.watch(realPath, false);
|
||||
});
|
||||
}
|
||||
});
|
||||
this.handleSymbolicLink().then(undefined, error => { /* ignore error */ });
|
||||
}
|
||||
|
||||
private watch(path: string, isParentFolder: boolean): void {
|
||||
private async handleSymbolicLink(): Promise<void> {
|
||||
const { stat, isSymbolicLink } = await statLink(this._path);
|
||||
if (isSymbolicLink && !stat.isDirectory()) {
|
||||
const realPath = await readlink(this._path);
|
||||
|
||||
this.watch(realPath, false);
|
||||
}
|
||||
}
|
||||
|
||||
private watch(path: string, isFolder: boolean): void {
|
||||
if (this.disposed) {
|
||||
return; // avoid watchers that will never get disposed by checking for being disposed
|
||||
}
|
||||
|
||||
this.disposables.push(extfs.watch(path,
|
||||
(type, file) => this.onConfigFileChange(type, file, isParentFolder),
|
||||
(error: string) => this.options.onError(error)
|
||||
));
|
||||
if (isFolder) {
|
||||
this.disposables.push(watchFolder(path, (type, path) => path === this._path ? this.onConfigFileChange() : undefined, error => this.options.onError(error)));
|
||||
} else {
|
||||
this.disposables.push(watchFile(path, (type, path) => this.onConfigFileChange(), error => this.options.onError(error)));
|
||||
}
|
||||
}
|
||||
|
||||
private onConfigFileChange(eventType: string, filename: string | undefined, isParentFolder: boolean): void {
|
||||
if (isParentFolder) {
|
||||
|
||||
// Windows: in some cases the filename contains artifacts from the absolute path
|
||||
// see https://github.com/nodejs/node/issues/19170
|
||||
// As such, we have to ensure that the filename basename is used for comparison.
|
||||
if (isWindows && filename && filename !== this.configName) {
|
||||
filename = basename(filename);
|
||||
}
|
||||
|
||||
if (filename !== this.configName) {
|
||||
return; // a change to a sibling file that is not our config file
|
||||
}
|
||||
}
|
||||
|
||||
private onConfigFileChange(): void {
|
||||
if (this.timeoutHandle) {
|
||||
global.clearTimeout(this.timeoutHandle);
|
||||
this.timeoutHandle = null;
|
||||
|
@ -179,7 +158,7 @@ export class ConfigWatcher<T> implements IConfigWatcher<T>, IDisposable {
|
|||
this.timeoutHandle = global.setTimeout(() => this.reload(), this.options.changeBufferDelay || 0);
|
||||
}
|
||||
|
||||
public reload(callback?: (config: T) => void): void {
|
||||
reload(callback?: (config: T) => void): void {
|
||||
this.loadAsync(currentConfig => {
|
||||
if (!objects.equals(currentConfig, this.cache)) {
|
||||
this.updateCache(currentConfig);
|
||||
|
@ -193,7 +172,7 @@ export class ConfigWatcher<T> implements IConfigWatcher<T>, IDisposable {
|
|||
});
|
||||
}
|
||||
|
||||
public getConfig(): T {
|
||||
getConfig(): T {
|
||||
this.ensureLoaded();
|
||||
|
||||
return this.cache;
|
||||
|
@ -205,7 +184,7 @@ export class ConfigWatcher<T> implements IConfigWatcher<T>, IDisposable {
|
|||
}
|
||||
}
|
||||
|
||||
public dispose(): void {
|
||||
dispose(): void {
|
||||
this.disposed = true;
|
||||
this.disposables = dispose(this.disposables);
|
||||
}
|
||||
|
|
|
@ -56,7 +56,7 @@ for PID in "$@"; do
|
|||
PROCESS_TIME_BEFORE=${PROCESS_BEFORE_TIMES[$ITER]}
|
||||
let PROCESS_DELTA=$PROCESS_TIME_AFTER-$PROCESS_TIME_BEFORE
|
||||
let TOTAL_DELTA=$TOTAL_TIME_AFTER-$TOTAL_TIME_BEFORE
|
||||
CPU_USAGE=`echo "100*$PROCESS_DELTA/$TOTAL_DELTA" | bc -l`
|
||||
CPU_USAGE=`echo "$((100*$PROCESS_DELTA/$TOTAL_DELTA))"`
|
||||
|
||||
# Parent script reads from stdout, so echo result to be read
|
||||
echo $CPU_USAGE
|
||||
|
|
|
@ -23,7 +23,7 @@ export class LineDecoder {
|
|||
this.remaining = null;
|
||||
}
|
||||
|
||||
public write(buffer: Buffer): string[] {
|
||||
write(buffer: Buffer): string[] {
|
||||
const result: string[] = [];
|
||||
const value = this.remaining
|
||||
? this.remaining + this.stringDecoder.write(buffer)
|
||||
|
@ -56,7 +56,7 @@ export class LineDecoder {
|
|||
return result;
|
||||
}
|
||||
|
||||
public end(): string | null {
|
||||
end(): string | null {
|
||||
return this.remaining;
|
||||
}
|
||||
}
|
|
@ -36,7 +36,7 @@ export function toDecodeStream(readable: Readable, options: IDecodeStreamOptions
|
|||
readable.pipe(new class extends Writable {
|
||||
|
||||
private _decodeStream: NodeJS.ReadWriteStream;
|
||||
private _decodeStreamConstruction: Promise<any>;
|
||||
private _decodeStreamConstruction: Promise<void>;
|
||||
private _buffer: Buffer[] = [];
|
||||
private _bytesBuffered = 0;
|
||||
|
||||
|
|
|
@ -1,707 +0,0 @@
|
|||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as paths from 'vs/base/common/path';
|
||||
import { nfcall } from 'vs/base/common/async';
|
||||
import { normalizeNFC } from 'vs/base/common/normalization';
|
||||
import * as platform from 'vs/base/common/platform';
|
||||
import * as strings from 'vs/base/common/strings';
|
||||
import * as uuid from 'vs/base/common/uuid';
|
||||
import { encode, encodeStream } from 'vs/base/node/encoding';
|
||||
import * as flow from 'vs/base/node/flow';
|
||||
import { CancellationToken } from 'vs/base/common/cancellation';
|
||||
import { IDisposable, toDisposable, Disposable } from 'vs/base/common/lifecycle';
|
||||
|
||||
const loop = flow.loop;
|
||||
|
||||
export function readdirSync(path: string): string[] {
|
||||
// Mac: uses NFD unicode form on disk, but we want NFC
|
||||
// See also https://github.com/nodejs/node/issues/2165
|
||||
if (platform.isMacintosh) {
|
||||
return fs.readdirSync(path).map(c => normalizeNFC(c));
|
||||
}
|
||||
|
||||
return fs.readdirSync(path);
|
||||
}
|
||||
|
||||
export function readdir(path: string, callback: (error: Error | null, files: string[]) => void): void {
|
||||
// Mac: uses NFD unicode form on disk, but we want NFC
|
||||
// See also https://github.com/nodejs/node/issues/2165
|
||||
if (platform.isMacintosh) {
|
||||
return fs.readdir(path, (error, children) => {
|
||||
if (error) {
|
||||
return callback(error, []);
|
||||
}
|
||||
|
||||
return callback(null, children.map(c => normalizeNFC(c)));
|
||||
});
|
||||
}
|
||||
|
||||
return fs.readdir(path, callback);
|
||||
}
|
||||
|
||||
export interface IStatAndLink {
|
||||
stat: fs.Stats;
|
||||
isSymbolicLink: boolean;
|
||||
}
|
||||
|
||||
export function statLink(path: string, callback: (error: Error | null, statAndIsLink: IStatAndLink | null) => void): void {
|
||||
fs.lstat(path, (error, lstat) => {
|
||||
if (error || lstat.isSymbolicLink()) {
|
||||
fs.stat(path, (error, stat) => {
|
||||
if (error) {
|
||||
return callback(error, null);
|
||||
}
|
||||
|
||||
callback(null, { stat, isSymbolicLink: lstat && lstat.isSymbolicLink() });
|
||||
});
|
||||
} else {
|
||||
callback(null, { stat: lstat, isSymbolicLink: false });
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function copy(source: string, target: string, callback: (error: Error | null) => void, copiedSourcesIn?: { [path: string]: boolean }): void {
|
||||
const copiedSources = copiedSourcesIn ? copiedSourcesIn : Object.create(null);
|
||||
|
||||
fs.stat(source, (error, stat) => {
|
||||
if (error) {
|
||||
return callback(error);
|
||||
}
|
||||
|
||||
if (!stat.isDirectory()) {
|
||||
return doCopyFile(source, target, stat.mode & 511, callback);
|
||||
}
|
||||
|
||||
if (copiedSources[source]) {
|
||||
return callback(null); // escape when there are cycles (can happen with symlinks)
|
||||
}
|
||||
|
||||
copiedSources[source] = true; // remember as copied
|
||||
|
||||
const proceed = function () {
|
||||
readdir(source, (err, files) => {
|
||||
loop(files, (file: string, clb: (error: Error | null, result: string[]) => void) => {
|
||||
copy(paths.join(source, file), paths.join(target, file), (error: Error) => clb(error, []), copiedSources);
|
||||
}, callback);
|
||||
});
|
||||
};
|
||||
|
||||
mkdirp(target, stat.mode & 511).then(proceed, proceed);
|
||||
});
|
||||
}
|
||||
|
||||
function doCopyFile(source: string, target: string, mode: number, callback: (error: Error) => void): void {
|
||||
const reader = fs.createReadStream(source);
|
||||
const writer = fs.createWriteStream(target, { mode });
|
||||
|
||||
let finished = false;
|
||||
const finish = (error?: Error) => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
|
||||
// in error cases, pass to callback
|
||||
if (error) {
|
||||
callback(error);
|
||||
}
|
||||
|
||||
// we need to explicitly chmod because of https://github.com/nodejs/node/issues/1104
|
||||
else {
|
||||
fs.chmod(target, mode, callback);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// handle errors properly
|
||||
reader.once('error', error => finish(error));
|
||||
writer.once('error', error => finish(error));
|
||||
|
||||
// we are done (underlying fd has been closed)
|
||||
writer.once('close', () => finish());
|
||||
|
||||
// start piping
|
||||
reader.pipe(writer);
|
||||
}
|
||||
|
||||
export function mkdirp(path: string, mode?: number, token?: CancellationToken): Promise<boolean> {
|
||||
const mkdir = (): Promise<null> => {
|
||||
return nfcall(fs.mkdir, path, mode).then(undefined, (mkdirErr: NodeJS.ErrnoException) => {
|
||||
|
||||
// ENOENT: a parent folder does not exist yet
|
||||
if (mkdirErr.code === 'ENOENT') {
|
||||
return Promise.reject(mkdirErr);
|
||||
}
|
||||
|
||||
// Any other error: check if folder exists and
|
||||
// return normally in that case if its a folder
|
||||
return nfcall(fs.stat, path).then((stat: fs.Stats) => {
|
||||
if (!stat.isDirectory()) {
|
||||
return Promise.reject(new Error(`'${path}' exists and is not a directory.`));
|
||||
}
|
||||
|
||||
return null;
|
||||
}, statErr => {
|
||||
return Promise.reject(mkdirErr); // bubble up original mkdir error
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
// stop at root
|
||||
if (path === paths.dirname(path)) {
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
|
||||
// recursively mkdir
|
||||
return mkdir().then(undefined, (err: NodeJS.ErrnoException) => {
|
||||
|
||||
// Respect cancellation
|
||||
if (token && token.isCancellationRequested) {
|
||||
return Promise.resolve(false);
|
||||
}
|
||||
|
||||
// ENOENT: a parent folder does not exist yet, continue
|
||||
// to create the parent folder and then try again.
|
||||
if (err.code === 'ENOENT') {
|
||||
return mkdirp(paths.dirname(path), mode).then(mkdir);
|
||||
}
|
||||
|
||||
// Any other error
|
||||
return Promise.reject(err);
|
||||
});
|
||||
}
|
||||
|
||||
// Deletes the given path by first moving it out of the workspace. This has two benefits. For one, the operation can return fast because
|
||||
// after the rename, the contents are out of the workspace although not yet deleted. The greater benefit however is that this operation
|
||||
// will fail in case any file is used by another process. fs.unlink() in node will not bail if a file unlinked is used by another process.
|
||||
// However, the consequences are bad as outlined in all the related bugs from https://github.com/joyent/node/issues/7164
|
||||
export function del(path: string, tmpFolder: string, callback: (error: Error | null) => void, done?: (error: Error | null) => void): void {
|
||||
fs.exists(path, exists => {
|
||||
if (!exists) {
|
||||
return callback(null);
|
||||
}
|
||||
|
||||
fs.stat(path, (err, stat) => {
|
||||
if (err || !stat) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
// Special windows workaround: A file or folder that ends with a "." cannot be moved to another place
|
||||
// because it is not a valid file name. In this case, we really have to do the deletion without prior move.
|
||||
if (path[path.length - 1] === '.' || strings.endsWith(path, './') || strings.endsWith(path, '.\\')) {
|
||||
return rmRecursive(path, callback);
|
||||
}
|
||||
|
||||
const pathInTemp = paths.join(tmpFolder, uuid.generateUuid());
|
||||
fs.rename(path, pathInTemp, (error: Error | null) => {
|
||||
if (error) {
|
||||
return rmRecursive(path, callback); // if rename fails, delete without tmp dir
|
||||
}
|
||||
|
||||
// Return early since the move succeeded
|
||||
callback(null);
|
||||
|
||||
// do the heavy deletion outside the callers callback
|
||||
rmRecursive(pathInTemp, error => {
|
||||
if (done) {
|
||||
done(error);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function rmRecursive(path: string, callback: (error: Error | null) => void): void {
|
||||
if (path === paths.win32.sep || path === paths.posix.sep) {
|
||||
return callback(new Error('Will not delete root!'));
|
||||
}
|
||||
|
||||
fs.exists(path, exists => {
|
||||
if (!exists) {
|
||||
callback(null);
|
||||
} else {
|
||||
fs.lstat(path, (err, stat) => {
|
||||
if (err || !stat) {
|
||||
callback(err);
|
||||
} else if (!stat.isDirectory() || stat.isSymbolicLink() /* !!! never recurse into links when deleting !!! */) {
|
||||
const mode = stat.mode;
|
||||
if (!(mode & 128)) { // 128 === 0200
|
||||
fs.chmod(path, mode | 128, (err: Error) => { // 128 === 0200
|
||||
if (err) {
|
||||
callback(err);
|
||||
} else {
|
||||
fs.unlink(path, callback);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
fs.unlink(path, callback);
|
||||
}
|
||||
} else {
|
||||
readdir(path, (err, children) => {
|
||||
if (err || !children) {
|
||||
callback(err);
|
||||
} else if (children.length === 0) {
|
||||
fs.rmdir(path, callback);
|
||||
} else {
|
||||
let firstError: Error | null = null;
|
||||
let childrenLeft = children.length;
|
||||
children.forEach(child => {
|
||||
rmRecursive(paths.join(path, child), (err: Error) => {
|
||||
childrenLeft--;
|
||||
if (err) {
|
||||
firstError = firstError || err;
|
||||
}
|
||||
|
||||
if (childrenLeft === 0) {
|
||||
if (firstError) {
|
||||
callback(firstError);
|
||||
} else {
|
||||
fs.rmdir(path, callback);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function delSync(path: string): void {
|
||||
if (path === paths.win32.sep || path === paths.posix.sep) {
|
||||
throw new Error('Will not delete root!');
|
||||
}
|
||||
|
||||
try {
|
||||
const stat = fs.lstatSync(path);
|
||||
if (stat.isDirectory() && !stat.isSymbolicLink()) {
|
||||
readdirSync(path).forEach(child => delSync(paths.join(path, child)));
|
||||
fs.rmdirSync(path);
|
||||
} else {
|
||||
fs.unlinkSync(path);
|
||||
}
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return; // not found
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
export function mv(source: string, target: string, callback: (error: Error | null) => void): void {
|
||||
if (source === target) {
|
||||
return callback(null);
|
||||
}
|
||||
|
||||
function updateMtime(err: Error | null): void {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
fs.lstat(target, (error, stat) => {
|
||||
if (error) {
|
||||
return callback(error);
|
||||
}
|
||||
|
||||
if (stat.isDirectory() || stat.isSymbolicLink()) {
|
||||
return callback(null);
|
||||
}
|
||||
|
||||
fs.open(target, 'a', null, (err: Error, fd: number) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
fs.futimes(fd, stat.atime, new Date(), (err: Error) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
fs.close(fd, callback);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Try native rename()
|
||||
fs.rename(source, target, (err: Error) => {
|
||||
if (!err) {
|
||||
return updateMtime(null);
|
||||
}
|
||||
|
||||
// In two cases we fallback to classic copy and delete:
|
||||
//
|
||||
// 1.) The EXDEV error indicates that source and target are on different devices
|
||||
// In this case, fallback to using a copy() operation as there is no way to
|
||||
// rename() between different devices.
|
||||
//
|
||||
// 2.) The user tries to rename a file/folder that ends with a dot. This is not
|
||||
// really possible to move then, at least on UNC devices.
|
||||
if (err && source.toLowerCase() !== target.toLowerCase() && ((<any>err).code === 'EXDEV') || strings.endsWith(source, '.')) {
|
||||
return copy(source, target, (err: Error) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
rmRecursive(source, updateMtime);
|
||||
});
|
||||
}
|
||||
|
||||
return callback(err);
|
||||
});
|
||||
}
|
||||
|
||||
export interface IWriteFileOptions {
|
||||
mode?: number;
|
||||
flag?: string;
|
||||
encoding?: {
|
||||
charset: string;
|
||||
addBOM: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
interface IEnsuredWriteFileOptions extends IWriteFileOptions {
|
||||
mode: number;
|
||||
flag: string;
|
||||
}
|
||||
|
||||
let canFlush = true;
|
||||
export function writeFileAndFlush(path: string, data: string | Buffer | NodeJS.ReadableStream | Uint8Array, options: IWriteFileOptions, callback: (error?: Error) => void): void {
|
||||
const ensuredOptions = ensureWriteOptions(options);
|
||||
|
||||
if (typeof data === 'string' || Buffer.isBuffer(data) || data instanceof Uint8Array) {
|
||||
doWriteFileAndFlush(path, data, ensuredOptions, callback);
|
||||
} else {
|
||||
doWriteFileStreamAndFlush(path, data, ensuredOptions, callback);
|
||||
}
|
||||
}
|
||||
|
||||
function doWriteFileStreamAndFlush(path: string, reader: NodeJS.ReadableStream, options: IEnsuredWriteFileOptions, callback: (error?: Error) => void): void {
|
||||
|
||||
// finish only once
|
||||
let finished = false;
|
||||
const finish = (error?: Error) => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
|
||||
// in error cases we need to manually close streams
|
||||
// if the write stream was successfully opened
|
||||
if (error) {
|
||||
if (isOpen) {
|
||||
writer.once('close', () => callback(error));
|
||||
writer.destroy();
|
||||
} else {
|
||||
callback(error);
|
||||
}
|
||||
}
|
||||
|
||||
// otherwise just return without error
|
||||
else {
|
||||
callback();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// create writer to target. we set autoClose: false because we want to use the streams
|
||||
// file descriptor to call fs.fdatasync to ensure the data is flushed to disk
|
||||
const writer = fs.createWriteStream(path, { mode: options.mode, flags: options.flag, autoClose: false });
|
||||
|
||||
// Event: 'open'
|
||||
// Purpose: save the fd for later use and start piping
|
||||
// Notes: will not be called when there is an error opening the file descriptor!
|
||||
let fd: number;
|
||||
let isOpen: boolean;
|
||||
writer.once('open', descriptor => {
|
||||
fd = descriptor;
|
||||
isOpen = true;
|
||||
|
||||
// if an encoding is provided, we need to pipe the stream through
|
||||
// an encoder stream and forward the encoding related options
|
||||
if (options.encoding) {
|
||||
reader = reader.pipe(encodeStream(options.encoding.charset, { addBOM: options.encoding.addBOM }));
|
||||
}
|
||||
|
||||
// start data piping only when we got a successful open. this ensures that we do
|
||||
// not consume the stream when an error happens and helps to fix this issue:
|
||||
// https://github.com/Microsoft/vscode/issues/42542
|
||||
reader.pipe(writer);
|
||||
});
|
||||
|
||||
// Event: 'error'
|
||||
// Purpose: to return the error to the outside and to close the write stream (does not happen automatically)
|
||||
reader.once('error', error => finish(error));
|
||||
writer.once('error', error => finish(error));
|
||||
|
||||
// Event: 'finish'
|
||||
// Purpose: use fs.fdatasync to flush the contents to disk
|
||||
// Notes: event is called when the writer has finished writing to the underlying resource. we must call writer.close()
|
||||
// because we have created the WriteStream with autoClose: false
|
||||
writer.once('finish', () => {
|
||||
|
||||
// flush to disk
|
||||
if (canFlush && isOpen) {
|
||||
fs.fdatasync(fd, (syncError: Error) => {
|
||||
|
||||
// In some exotic setups it is well possible that node fails to sync
|
||||
// In that case we disable flushing and warn to the console
|
||||
if (syncError) {
|
||||
console.warn('[node.js fs] fdatasync is now disabled for this session because it failed: ', syncError);
|
||||
canFlush = false;
|
||||
}
|
||||
|
||||
writer.destroy();
|
||||
});
|
||||
} else {
|
||||
writer.destroy();
|
||||
}
|
||||
});
|
||||
|
||||
// Event: 'close'
|
||||
// Purpose: signal we are done to the outside
|
||||
// Notes: event is called when the writer's filedescriptor is closed
|
||||
writer.once('close', () => finish());
|
||||
}
|
||||
|
||||
// Calls fs.writeFile() followed by a fs.sync() call to flush the changes to disk
|
||||
// We do this in cases where we want to make sure the data is really on disk and
|
||||
// not in some cache.
|
||||
//
|
||||
// See https://github.com/nodejs/node/blob/v5.10.0/lib/fs.js#L1194
|
||||
function doWriteFileAndFlush(path: string, data: string | Buffer | Uint8Array, options: IEnsuredWriteFileOptions, callback: (error?: Error) => void): void {
|
||||
if (options.encoding) {
|
||||
data = encode(data instanceof Uint8Array ? Buffer.from(data) : data, options.encoding.charset, { addBOM: options.encoding.addBOM });
|
||||
}
|
||||
|
||||
if (!canFlush) {
|
||||
return fs.writeFile(path, data, { mode: options.mode, flag: options.flag }, callback);
|
||||
}
|
||||
|
||||
// Open the file with same flags and mode as fs.writeFile()
|
||||
fs.open(path, options.flag, options.mode, (openError, fd) => {
|
||||
if (openError) {
|
||||
return callback(openError);
|
||||
}
|
||||
|
||||
// It is valid to pass a fd handle to fs.writeFile() and this will keep the handle open!
|
||||
fs.writeFile(fd, data, writeError => {
|
||||
if (writeError) {
|
||||
return fs.close(fd, () => callback(writeError)); // still need to close the handle on error!
|
||||
}
|
||||
|
||||
// Flush contents (not metadata) of the file to disk
|
||||
fs.fdatasync(fd, (syncError: Error) => {
|
||||
|
||||
// In some exotic setups it is well possible that node fails to sync
|
||||
// In that case we disable flushing and warn to the console
|
||||
if (syncError) {
|
||||
console.warn('[node.js fs] fdatasync is now disabled for this session because it failed: ', syncError);
|
||||
canFlush = false;
|
||||
}
|
||||
|
||||
return fs.close(fd, closeError => callback(closeError));
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export function writeFileAndFlushSync(path: string, data: string | Buffer, options?: IWriteFileOptions): void {
|
||||
const ensuredOptions = ensureWriteOptions(options);
|
||||
|
||||
if (ensuredOptions.encoding) {
|
||||
data = encode(data, ensuredOptions.encoding.charset, { addBOM: ensuredOptions.encoding.addBOM });
|
||||
}
|
||||
|
||||
if (!canFlush) {
|
||||
return fs.writeFileSync(path, data, { mode: ensuredOptions.mode, flag: ensuredOptions.flag });
|
||||
}
|
||||
|
||||
// Open the file with same flags and mode as fs.writeFile()
|
||||
const fd = fs.openSync(path, ensuredOptions.flag, ensuredOptions.mode);
|
||||
|
||||
try {
|
||||
|
||||
// It is valid to pass a fd handle to fs.writeFile() and this will keep the handle open!
|
||||
fs.writeFileSync(fd, data);
|
||||
|
||||
// Flush contents (not metadata) of the file to disk
|
||||
try {
|
||||
fs.fdatasyncSync(fd);
|
||||
} catch (syncError) {
|
||||
console.warn('[node.js fs] fdatasyncSync is now disabled for this session because it failed: ', syncError);
|
||||
canFlush = false;
|
||||
}
|
||||
} finally {
|
||||
fs.closeSync(fd);
|
||||
}
|
||||
}
|
||||
|
||||
function ensureWriteOptions(options?: IWriteFileOptions): IEnsuredWriteFileOptions {
|
||||
if (!options) {
|
||||
return { mode: 0o666, flag: 'w' };
|
||||
}
|
||||
|
||||
return {
|
||||
mode: typeof options.mode === 'number' ? options.mode : 0o666,
|
||||
flag: typeof options.flag === 'string' ? options.flag : 'w',
|
||||
encoding: options.encoding
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Copied from: https://github.com/Microsoft/vscode-node-debug/blob/master/src/node/pathUtilities.ts#L83
|
||||
*
|
||||
* Given an absolute, normalized, and existing file path 'realcase' returns the exact path that the file has on disk.
|
||||
* On a case insensitive file system, the returned path might differ from the original path by character casing.
|
||||
* On a case sensitive file system, the returned path will always be identical to the original path.
|
||||
* In case of errors, null is returned. But you cannot use this function to verify that a path exists.
|
||||
* realcaseSync does not handle '..' or '.' path segments and it does not take the locale into account.
|
||||
*/
|
||||
export function realcaseSync(path: string): string | null {
|
||||
const dir = paths.dirname(path);
|
||||
if (path === dir) { // end recursion
|
||||
return path;
|
||||
}
|
||||
|
||||
const name = (paths.basename(path) /* can be '' for windows drive letters */ || path).toLowerCase();
|
||||
try {
|
||||
const entries = readdirSync(dir);
|
||||
const found = entries.filter(e => e.toLowerCase() === name); // use a case insensitive search
|
||||
if (found.length === 1) {
|
||||
// on a case sensitive filesystem we cannot determine here, whether the file exists or not, hence we need the 'file exists' precondition
|
||||
const prefix = realcaseSync(dir); // recurse
|
||||
if (prefix) {
|
||||
return paths.join(prefix, found[0]);
|
||||
}
|
||||
} else if (found.length > 1) {
|
||||
// must be a case sensitive $filesystem
|
||||
const ix = found.indexOf(name);
|
||||
if (ix >= 0) { // case sensitive
|
||||
const prefix = realcaseSync(dir); // recurse
|
||||
if (prefix) {
|
||||
return paths.join(prefix, found[ix]);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// silently ignore error
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
export function realpathSync(path: string): string {
|
||||
try {
|
||||
return fs.realpathSync(path);
|
||||
} catch (error) {
|
||||
|
||||
// We hit an error calling fs.realpathSync(). Since fs.realpathSync() is doing some path normalization
|
||||
// we now do a similar normalization and then try again if we can access the path with read
|
||||
// permissions at least. If that succeeds, we return that path.
|
||||
// fs.realpath() is resolving symlinks and that can fail in certain cases. The workaround is
|
||||
// to not resolve links but to simply see if the path is read accessible or not.
|
||||
const normalizedPath = normalizePath(path);
|
||||
fs.accessSync(normalizedPath, fs.constants.R_OK); // throws in case of an error
|
||||
|
||||
return normalizedPath;
|
||||
}
|
||||
}
|
||||
|
||||
export function realpath(path: string, callback: (error: Error | null, realpath: string) => void): void {
|
||||
return fs.realpath(path, (error, realpath) => {
|
||||
if (!error) {
|
||||
return callback(null, realpath);
|
||||
}
|
||||
|
||||
// We hit an error calling fs.realpath(). Since fs.realpath() is doing some path normalization
|
||||
// we now do a similar normalization and then try again if we can access the path with read
|
||||
// permissions at least. If that succeeds, we return that path.
|
||||
// fs.realpath() is resolving symlinks and that can fail in certain cases. The workaround is
|
||||
// to not resolve links but to simply see if the path is read accessible or not.
|
||||
const normalizedPath = normalizePath(path);
|
||||
|
||||
return fs.access(normalizedPath, fs.constants.R_OK, error => {
|
||||
return callback(error, normalizedPath);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function normalizePath(path: string): string {
|
||||
return strings.rtrim(paths.normalize(path), paths.sep);
|
||||
}
|
||||
|
||||
export function watch(path: string, onChange: (type: string, path?: string) => void, onError: (error: string) => void): IDisposable {
|
||||
try {
|
||||
const watcher = fs.watch(path);
|
||||
|
||||
watcher.on('change', (type, raw) => {
|
||||
let file: string | undefined;
|
||||
if (raw) { // https://github.com/Microsoft/vscode/issues/38191
|
||||
file = raw.toString();
|
||||
if (platform.isMacintosh) {
|
||||
// Mac: uses NFD unicode form on disk, but we want NFC
|
||||
// See also https://github.com/nodejs/node/issues/2165
|
||||
file = normalizeNFC(file);
|
||||
}
|
||||
}
|
||||
|
||||
onChange(type, file);
|
||||
});
|
||||
|
||||
watcher.on('error', (code: number, signal: string) => onError(`Failed to watch ${path} for changes (${code}, ${signal})`));
|
||||
|
||||
return toDisposable(() => {
|
||||
watcher.removeAllListeners();
|
||||
watcher.close();
|
||||
});
|
||||
} catch (error) {
|
||||
fs.exists(path, exists => {
|
||||
if (exists) {
|
||||
onError(`Failed to watch ${path} for changes (${error.toString()})`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return Disposable.None;
|
||||
}
|
||||
|
||||
export function sanitizeFilePath(candidate: string, cwd: string): string {
|
||||
|
||||
// Special case: allow to open a drive letter without trailing backslash
|
||||
if (platform.isWindows && strings.endsWith(candidate, ':')) {
|
||||
candidate += paths.sep;
|
||||
}
|
||||
|
||||
// Ensure absolute
|
||||
if (!paths.isAbsolute(candidate)) {
|
||||
candidate = paths.join(cwd, candidate);
|
||||
}
|
||||
|
||||
// Ensure normalized
|
||||
candidate = paths.normalize(candidate);
|
||||
|
||||
// Ensure no trailing slash/backslash
|
||||
if (platform.isWindows) {
|
||||
candidate = strings.rtrim(candidate, paths.sep);
|
||||
|
||||
// Special case: allow to open drive root ('C:\')
|
||||
if (strings.endsWith(candidate, ':')) {
|
||||
candidate += paths.sep;
|
||||
}
|
||||
|
||||
} else {
|
||||
candidate = strings.rtrim(candidate, paths.sep);
|
||||
|
||||
// Special case: allow to open root ('/')
|
||||
if (!candidate) {
|
||||
candidate = paths.sep;
|
||||
}
|
||||
}
|
||||
|
||||
return candidate;
|
||||
}
|
|
@ -0,0 +1,91 @@
|
|||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import { rtrim } from 'vs/base/common/strings';
|
||||
import { sep, join, normalize, dirname, basename } from 'vs/base/common/path';
|
||||
import { readdirSync } from 'vs/base/node/pfs';
|
||||
import { promisify } from 'util';
|
||||
|
||||
/**
|
||||
* Copied from: https://github.com/Microsoft/vscode-node-debug/blob/master/src/node/pathUtilities.ts#L83
|
||||
*
|
||||
* Given an absolute, normalized, and existing file path 'realcase' returns the exact path that the file has on disk.
|
||||
* On a case insensitive file system, the returned path might differ from the original path by character casing.
|
||||
* On a case sensitive file system, the returned path will always be identical to the original path.
|
||||
* In case of errors, null is returned. But you cannot use this function to verify that a path exists.
|
||||
* realcaseSync does not handle '..' or '.' path segments and it does not take the locale into account.
|
||||
*/
|
||||
export function realcaseSync(path: string): string | null {
|
||||
const dir = dirname(path);
|
||||
if (path === dir) { // end recursion
|
||||
return path;
|
||||
}
|
||||
|
||||
const name = (basename(path) /* can be '' for windows drive letters */ || path).toLowerCase();
|
||||
try {
|
||||
const entries = readdirSync(dir);
|
||||
const found = entries.filter(e => e.toLowerCase() === name); // use a case insensitive search
|
||||
if (found.length === 1) {
|
||||
// on a case sensitive filesystem we cannot determine here, whether the file exists or not, hence we need the 'file exists' precondition
|
||||
const prefix = realcaseSync(dir); // recurse
|
||||
if (prefix) {
|
||||
return join(prefix, found[0]);
|
||||
}
|
||||
} else if (found.length > 1) {
|
||||
// must be a case sensitive $filesystem
|
||||
const ix = found.indexOf(name);
|
||||
if (ix >= 0) { // case sensitive
|
||||
const prefix = realcaseSync(dir); // recurse
|
||||
if (prefix) {
|
||||
return join(prefix, found[ix]);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// silently ignore error
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
export async function realpath(path: string): Promise<string> {
|
||||
try {
|
||||
return await promisify(fs.realpath)(path);
|
||||
} catch (error) {
|
||||
|
||||
// We hit an error calling fs.realpath(). Since fs.realpath() is doing some path normalization
|
||||
// we now do a similar normalization and then try again if we can access the path with read
|
||||
// permissions at least. If that succeeds, we return that path.
|
||||
// fs.realpath() is resolving symlinks and that can fail in certain cases. The workaround is
|
||||
// to not resolve links but to simply see if the path is read accessible or not.
|
||||
const normalizedPath = normalizePath(path);
|
||||
|
||||
await promisify(fs.access)(normalizedPath, fs.constants.R_OK);
|
||||
|
||||
return normalizedPath;
|
||||
}
|
||||
}
|
||||
|
||||
export function realpathSync(path: string): string {
|
||||
try {
|
||||
return fs.realpathSync(path);
|
||||
} catch (error) {
|
||||
|
||||
// We hit an error calling fs.realpathSync(). Since fs.realpathSync() is doing some path normalization
|
||||
// we now do a similar normalization and then try again if we can access the path with read
|
||||
// permissions at least. If that succeeds, we return that path.
|
||||
// fs.realpath() is resolving symlinks and that can fail in certain cases. The workaround is
|
||||
// to not resolve links but to simply see if the path is read accessible or not.
|
||||
const normalizedPath = normalizePath(path);
|
||||
fs.accessSync(normalizedPath, fs.constants.R_OK); // throws in case of an error
|
||||
|
||||
return normalizedPath;
|
||||
}
|
||||
}
|
||||
|
||||
function normalizePath(path: string): string {
|
||||
return rtrim(normalize(path), sep);
|
||||
}
|
|
@ -1,187 +0,0 @@
|
|||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as assert from 'assert';
|
||||
|
||||
/**
|
||||
* Executes the given function (fn) over the given array of items (list) in parallel and returns the resulting errors and results as
|
||||
* array to the callback (callback). The resulting errors and results are evaluated by calling the provided callback function.
|
||||
*/
|
||||
export function parallel<T, E>(list: T[], fn: (item: T, callback: (err: Error | null, result: E | null) => void) => void, callback: (err: Array<Error | null> | null, result: E[]) => void): void {
|
||||
const results = new Array(list.length);
|
||||
const errors = new Array<Error | null>(list.length);
|
||||
let didErrorOccur = false;
|
||||
let doneCount = 0;
|
||||
|
||||
if (list.length === 0) {
|
||||
return callback(null, []);
|
||||
}
|
||||
|
||||
list.forEach((item, index) => {
|
||||
fn(item, (error, result) => {
|
||||
if (error) {
|
||||
didErrorOccur = true;
|
||||
results[index] = null;
|
||||
errors[index] = error;
|
||||
} else {
|
||||
results[index] = result;
|
||||
errors[index] = null;
|
||||
}
|
||||
|
||||
if (++doneCount === list.length) {
|
||||
return callback(didErrorOccur ? errors : null, results);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the given function (fn) over the given array of items (param) in sequential order and returns the first occurred error or the result as
|
||||
* array to the callback (callback). The resulting errors and results are evaluated by calling the provided callback function. The first param can
|
||||
* either be a function that returns an array of results to loop in async fashion or be an array of items already.
|
||||
*/
|
||||
export function loop<T, E>(param: (callback: (error: Error, result: T[]) => void) => void, fn: (item: T, callback: (error: Error | null, result: E | null) => void, index: number, total: number) => void, callback: (error: Error | null, result: E[] | null) => void): void;
|
||||
export function loop<T, E>(param: T[], fn: (item: T, callback: (error: Error | null, result: E | null) => void, index: number, total: number) => void, callback: (error: Error | null, result: E[] | null) => void): void;
|
||||
export function loop<E>(param: any, fn: (item: any, callback: (error: Error | null, result: E | null) => void, index: number, total: number) => void, callback: (error: Error | null, result: E[] | null) => void): void {
|
||||
|
||||
// Assert
|
||||
assert.ok(param, 'Missing first parameter');
|
||||
assert.ok(typeof (fn) === 'function', 'Second parameter must be a function that is called for each element');
|
||||
assert.ok(typeof (callback) === 'function', 'Third parameter must be a function that is called on error and success');
|
||||
|
||||
// Param is function, execute to retrieve array
|
||||
if (typeof (param) === 'function') {
|
||||
try {
|
||||
param((error: Error, result: E[]) => {
|
||||
if (error) {
|
||||
callback(error, null);
|
||||
} else {
|
||||
loop(result, fn, callback);
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
callback(error, null);
|
||||
}
|
||||
}
|
||||
|
||||
// Expect the param to be an array and loop over it
|
||||
else {
|
||||
const results: E[] = [];
|
||||
|
||||
const looper: (i: number) => void = function (i: number): void {
|
||||
|
||||
// Still work to do
|
||||
if (i < param.length) {
|
||||
|
||||
// Execute function on array element
|
||||
try {
|
||||
fn(param[i], (error: any, result: E) => {
|
||||
|
||||
// A method might only send a boolean value as return value (e.g. fs.exists), support this case gracefully
|
||||
if (error === true || error === false) {
|
||||
result = error;
|
||||
error = null;
|
||||
}
|
||||
|
||||
// Quit looping on error
|
||||
if (error) {
|
||||
callback(error, null);
|
||||
}
|
||||
|
||||
// Otherwise push result on stack and continue looping
|
||||
else {
|
||||
if (result) { //Could be that provided function is not returning a result
|
||||
results.push(result);
|
||||
}
|
||||
|
||||
process.nextTick(() => {
|
||||
looper(i + 1);
|
||||
});
|
||||
}
|
||||
}, i, param.length);
|
||||
} catch (error) {
|
||||
callback(error, null);
|
||||
}
|
||||
}
|
||||
|
||||
// Done looping, pass back results too callback function
|
||||
else {
|
||||
callback(null, results);
|
||||
}
|
||||
};
|
||||
|
||||
// Start looping with first element in array
|
||||
looper(0);
|
||||
}
|
||||
}
|
||||
|
||||
function Sequence(sequences: { (...param: any[]): void; }[]): void {
|
||||
|
||||
// Assert
|
||||
assert.ok(sequences.length > 1, 'Need at least one error handler and one function to process sequence');
|
||||
sequences.forEach((sequence) => {
|
||||
assert.ok(typeof (sequence) === 'function');
|
||||
});
|
||||
|
||||
// Execute in Loop
|
||||
const errorHandler = sequences.splice(0, 1)[0]; //Remove error handler
|
||||
let sequenceResult: any = null;
|
||||
|
||||
loop(sequences, (sequence, clb) => {
|
||||
const sequenceFunction = function (error: any, result: any): void {
|
||||
|
||||
// A method might only send a boolean value as return value (e.g. fs.exists), support this case gracefully
|
||||
if (error === true || error === false) {
|
||||
result = error;
|
||||
error = null;
|
||||
}
|
||||
|
||||
// Handle Error and Result
|
||||
if (error) {
|
||||
clb(error, null);
|
||||
} else {
|
||||
sequenceResult = result; //Remember result of sequence
|
||||
clb(null, null); //Don't pass on result to Looper as we are not aggregating it
|
||||
}
|
||||
};
|
||||
|
||||
// We call the sequence function setting "this" to be the callback we define here
|
||||
// and we pass in the "sequenceResult" as first argument. Doing all this avoids having
|
||||
// to pass in a callback to the sequence because the callback is already "this".
|
||||
try {
|
||||
sequence.call(sequenceFunction, sequenceResult);
|
||||
} catch (error) {
|
||||
clb(error, null);
|
||||
}
|
||||
}, (error, result) => {
|
||||
if (error) {
|
||||
errorHandler(error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a variable list of functions to execute in sequence. The first function must be the error handler and the
|
||||
* following functions can do arbitrary work. "this" must be used as callback value for async functions to continue
|
||||
* through the sequence:
|
||||
* sequence(
|
||||
* function errorHandler(error) {
|
||||
* clb(error, null);
|
||||
* },
|
||||
*
|
||||
* function doSomethingAsync() {
|
||||
* fs.doAsync(path, this);
|
||||
* },
|
||||
*
|
||||
* function done(result) {
|
||||
* clb(null, result);
|
||||
* }
|
||||
* );
|
||||
*/
|
||||
export function sequence(errorHandler: (error: Error) => void, ...sequences: Function[]): void;
|
||||
export function sequence(sequences: Function[]): void;
|
||||
export function sequence(sequences: any): void {
|
||||
Sequence((Array.isArray(sequences)) ? sequences : Array.prototype.slice.call(arguments));
|
||||
}
|
|
@ -3,68 +3,200 @@
|
|||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as extfs from 'vs/base/node/extfs';
|
||||
import { join } from 'vs/base/common/path';
|
||||
import { nfcall, Queue } from 'vs/base/common/async';
|
||||
import { join, dirname } from 'vs/base/common/path';
|
||||
import { Queue } from 'vs/base/common/async';
|
||||
import * as fs from 'fs';
|
||||
import * as os from 'os';
|
||||
import * as platform from 'vs/base/common/platform';
|
||||
import { Event } from 'vs/base/common/event';
|
||||
import { endsWith } from 'vs/base/common/strings';
|
||||
import { promisify } from 'util';
|
||||
import { CancellationToken } from 'vs/base/common/cancellation';
|
||||
import { isRootOrDriveLetter } from 'vs/base/common/extpath';
|
||||
import { generateUuid } from 'vs/base/common/uuid';
|
||||
import { normalizeNFC } from 'vs/base/common/normalization';
|
||||
import { encode, encodeStream } from 'vs/base/node/encoding';
|
||||
|
||||
export function readdir(path: string): Promise<string[]> {
|
||||
return nfcall(extfs.readdir, path);
|
||||
export enum RimRafMode {
|
||||
|
||||
/**
|
||||
* Slow version that unlinks each file and folder.
|
||||
*/
|
||||
UNLINK,
|
||||
|
||||
/**
|
||||
* Fast version that first moves the file/folder
|
||||
* into a temp directory and then deletes that
|
||||
* without waiting for it.
|
||||
*/
|
||||
MOVE
|
||||
}
|
||||
|
||||
export async function rimraf(path: string, mode = RimRafMode.UNLINK): Promise<void> {
|
||||
if (isRootOrDriveLetter(path)) {
|
||||
throw new Error('rimraf - will refuse to recursively delete root');
|
||||
}
|
||||
|
||||
// delete: via unlink
|
||||
if (mode === RimRafMode.UNLINK) {
|
||||
return rimrafUnlink(path);
|
||||
}
|
||||
|
||||
// delete: via move
|
||||
return rimrafMove(path);
|
||||
}
|
||||
|
||||
async function rimrafUnlink(path: string): Promise<void> {
|
||||
try {
|
||||
const stat = await lstat(path);
|
||||
|
||||
// Folder delete (recursive) - NOT for symbolic links though!
|
||||
if (stat.isDirectory() && !stat.isSymbolicLink()) {
|
||||
|
||||
// Children
|
||||
const children = await readdir(path);
|
||||
await Promise.all(children.map(child => rimrafUnlink(join(path, child))));
|
||||
|
||||
// Folder
|
||||
await promisify(fs.rmdir)(path);
|
||||
}
|
||||
|
||||
// Single file delete
|
||||
else {
|
||||
|
||||
// chmod as needed to allow for unlink
|
||||
const mode = stat.mode;
|
||||
if (!(mode & 128)) { // 128 === 0200
|
||||
await chmod(path, mode | 128);
|
||||
}
|
||||
|
||||
return unlink(path);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code !== 'ENOENT') {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function rimrafMove(path: string): Promise<void> {
|
||||
try {
|
||||
const pathInTemp = join(os.tmpdir(), generateUuid());
|
||||
try {
|
||||
await rename(path, pathInTemp);
|
||||
} catch (error) {
|
||||
return rimrafUnlink(path); // if rename fails, delete without tmp dir
|
||||
}
|
||||
|
||||
// Delete but do not return as promise
|
||||
rimrafUnlink(pathInTemp);
|
||||
} catch (error) {
|
||||
if (error.code !== 'ENOENT') {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function rimrafSync(path: string): void {
|
||||
if (isRootOrDriveLetter(path)) {
|
||||
throw new Error('rimraf - will refuse to recursively delete root');
|
||||
}
|
||||
|
||||
try {
|
||||
const stat = fs.lstatSync(path);
|
||||
|
||||
// Folder delete (recursive) - NOT for symbolic links though!
|
||||
if (stat.isDirectory() && !stat.isSymbolicLink()) {
|
||||
|
||||
// Children
|
||||
const children = readdirSync(path);
|
||||
children.map(child => rimrafSync(join(path, child)));
|
||||
|
||||
// Folder
|
||||
fs.rmdirSync(path);
|
||||
}
|
||||
|
||||
// Single file delete
|
||||
else {
|
||||
|
||||
// chmod as needed to allow for unlink
|
||||
const mode = stat.mode;
|
||||
if (!(mode & 128)) { // 128 === 0200
|
||||
fs.chmodSync(path, mode | 128);
|
||||
}
|
||||
|
||||
return fs.unlinkSync(path);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code !== 'ENOENT') {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function readdir(path: string): Promise<string[]> {
|
||||
return handleDirectoryChildren(await promisify(fs.readdir)(path));
|
||||
}
|
||||
|
||||
export function readdirSync(path: string): string[] {
|
||||
return handleDirectoryChildren(fs.readdirSync(path));
|
||||
}
|
||||
|
||||
function handleDirectoryChildren(children: string[]): string[] {
|
||||
// Mac: uses NFD unicode form on disk, but we want NFC
|
||||
// See also https://github.com/nodejs/node/issues/2165
|
||||
if (platform.isMacintosh) {
|
||||
return children.map(child => normalizeNFC(child));
|
||||
}
|
||||
|
||||
return children;
|
||||
}
|
||||
|
||||
export function exists(path: string): Promise<boolean> {
|
||||
return new Promise(c => fs.exists(path, c));
|
||||
return promisify(fs.exists)(path);
|
||||
}
|
||||
|
||||
export function chmod(path: string, mode: number): Promise<boolean> {
|
||||
return nfcall(fs.chmod, path, mode);
|
||||
}
|
||||
|
||||
export import mkdirp = extfs.mkdirp;
|
||||
|
||||
export function rimraf(path: string): Promise<void> {
|
||||
return lstat(path).then(stat => {
|
||||
if (stat.isDirectory() && !stat.isSymbolicLink()) {
|
||||
return readdir(path)
|
||||
.then(children => Promise.all(children.map(child => rimraf(join(path, child)))))
|
||||
.then(() => rmdir(path));
|
||||
} else {
|
||||
return unlink(path);
|
||||
}
|
||||
}, (err: NodeJS.ErrnoException) => {
|
||||
if (err.code === 'ENOENT') {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return Promise.reject(err);
|
||||
});
|
||||
}
|
||||
|
||||
export function realpath(path: string): Promise<string> {
|
||||
return nfcall(extfs.realpath, path);
|
||||
export function chmod(path: string, mode: number): Promise<void> {
|
||||
return promisify(fs.chmod)(path, mode);
|
||||
}
|
||||
|
||||
export function stat(path: string): Promise<fs.Stats> {
|
||||
return nfcall(fs.stat, path);
|
||||
return promisify(fs.stat)(path);
|
||||
}
|
||||
|
||||
export function statLink(path: string): Promise<{ stat: fs.Stats, isSymbolicLink: boolean }> {
|
||||
return nfcall(extfs.statLink, path);
|
||||
export interface IStatAndLink {
|
||||
stat: fs.Stats;
|
||||
isSymbolicLink: boolean;
|
||||
}
|
||||
|
||||
export async function statLink(path: string): Promise<IStatAndLink> {
|
||||
|
||||
// First stat the link
|
||||
let linkStat: fs.Stats | undefined;
|
||||
let linkStatError: NodeJS.ErrnoException | undefined;
|
||||
try {
|
||||
linkStat = await lstat(path);
|
||||
} catch (error) {
|
||||
linkStatError = error;
|
||||
}
|
||||
|
||||
// Then stat the target and return that
|
||||
const isLink = !!(linkStat && linkStat.isSymbolicLink());
|
||||
if (linkStatError || isLink) {
|
||||
const fileStat = await stat(path);
|
||||
|
||||
return { stat: fileStat, isSymbolicLink: isLink };
|
||||
}
|
||||
|
||||
return { stat: linkStat!, isSymbolicLink: false };
|
||||
}
|
||||
|
||||
export function lstat(path: string): Promise<fs.Stats> {
|
||||
return nfcall(fs.lstat, path);
|
||||
}
|
||||
|
||||
export function move(oldPath: string, newPath: string): Promise<void> {
|
||||
return nfcall(extfs.mv, oldPath, newPath);
|
||||
return promisify(fs.lstat)(path);
|
||||
}
|
||||
|
||||
export function rename(oldPath: string, newPath: string): Promise<void> {
|
||||
return nfcall(fs.rename, oldPath, newPath);
|
||||
return promisify(fs.rename)(oldPath, newPath);
|
||||
}
|
||||
|
||||
export function renameIgnoreError(oldPath: string, newPath: string): Promise<void> {
|
||||
|
@ -73,30 +205,26 @@ export function renameIgnoreError(oldPath: string, newPath: string): Promise<voi
|
|||
});
|
||||
}
|
||||
|
||||
export function rmdir(path: string): Promise<void> {
|
||||
return nfcall(fs.rmdir, path);
|
||||
}
|
||||
|
||||
export function unlink(path: string): Promise<void> {
|
||||
return nfcall(fs.unlink, path);
|
||||
return promisify(fs.unlink)(path);
|
||||
}
|
||||
|
||||
export function symlink(target: string, path: string, type?: string): Promise<void> {
|
||||
return nfcall<void>(fs.symlink, target, path, type);
|
||||
return promisify(fs.symlink)(target, path, type);
|
||||
}
|
||||
|
||||
export function readlink(path: string): Promise<string> {
|
||||
return nfcall<string>(fs.readlink, path);
|
||||
return promisify(fs.readlink)(path);
|
||||
}
|
||||
|
||||
export function truncate(path: string, len: number): Promise<void> {
|
||||
return nfcall(fs.truncate, path, len);
|
||||
return promisify(fs.truncate)(path, len);
|
||||
}
|
||||
|
||||
export function readFile(path: string): Promise<Buffer>;
|
||||
export function readFile(path: string, encoding: string): Promise<string>;
|
||||
export function readFile(path: string, encoding?: string): Promise<Buffer | string> {
|
||||
return nfcall(fs.readFile, path, encoding);
|
||||
return promisify(fs.readFile)(path, encoding);
|
||||
}
|
||||
|
||||
// According to node.js docs (https://nodejs.org/docs/v6.5.0/api/fs.html#fs_fs_writefile_file_data_options_callback)
|
||||
|
@ -104,15 +232,15 @@ export function readFile(path: string, encoding?: string): Promise<Buffer | stri
|
|||
// Therefor we use a Queue on the path that is given to us to sequentialize calls to the same path properly.
|
||||
const writeFilePathQueue: { [path: string]: Queue<void> } = Object.create(null);
|
||||
|
||||
export function writeFile(path: string, data: string, options?: extfs.IWriteFileOptions): Promise<void>;
|
||||
export function writeFile(path: string, data: Buffer, options?: extfs.IWriteFileOptions): Promise<void>;
|
||||
export function writeFile(path: string, data: Uint8Array, options?: extfs.IWriteFileOptions): Promise<void>;
|
||||
export function writeFile(path: string, data: NodeJS.ReadableStream, options?: extfs.IWriteFileOptions): Promise<void>;
|
||||
export function writeFile(path: string, data: any, options?: extfs.IWriteFileOptions): Promise<void>;
|
||||
export function writeFile(path: string, data: any, options?: extfs.IWriteFileOptions): any {
|
||||
export function writeFile(path: string, data: string, options?: IWriteFileOptions): Promise<void>;
|
||||
export function writeFile(path: string, data: Buffer, options?: IWriteFileOptions): Promise<void>;
|
||||
export function writeFile(path: string, data: Uint8Array, options?: IWriteFileOptions): Promise<void>;
|
||||
export function writeFile(path: string, data: NodeJS.ReadableStream, options?: IWriteFileOptions): Promise<void>;
|
||||
export function writeFile(path: string, data: string | Buffer | NodeJS.ReadableStream | Uint8Array, options?: IWriteFileOptions): Promise<void>;
|
||||
export function writeFile(path: string, data: string | Buffer | NodeJS.ReadableStream | Uint8Array, options?: IWriteFileOptions): Promise<void> {
|
||||
const queueKey = toQueueKey(path);
|
||||
|
||||
return ensureWriteFileQueue(queueKey).queue(() => nfcall(extfs.writeFileAndFlush, path, data, options));
|
||||
return ensureWriteFileQueue(queueKey).queue(() => writeFileAndFlush(path, data, options));
|
||||
}
|
||||
|
||||
function toQueueKey(path: string): string {
|
||||
|
@ -140,43 +268,235 @@ function ensureWriteFileQueue(queueKey: string): Queue<void> {
|
|||
return writeFileQueue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a dir and return only subfolders
|
||||
*/
|
||||
export function readDirsInDir(dirPath: string): Promise<string[]> {
|
||||
return readdir(dirPath).then(children => {
|
||||
return Promise.all(children.map(c => dirExists(join(dirPath, c)))).then(exists => {
|
||||
return children.filter((_, i) => exists[i]);
|
||||
export interface IWriteFileOptions {
|
||||
mode?: number;
|
||||
flag?: string;
|
||||
encoding?: {
|
||||
charset: string;
|
||||
addBOM: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
interface IEnsuredWriteFileOptions extends IWriteFileOptions {
|
||||
mode: number;
|
||||
flag: string;
|
||||
}
|
||||
|
||||
let canFlush = true;
|
||||
function writeFileAndFlush(path: string, data: string | Buffer | NodeJS.ReadableStream | Uint8Array, options: IWriteFileOptions | undefined): Promise<void> {
|
||||
const ensuredOptions = ensureWriteOptions(options);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
if (typeof data === 'string' || Buffer.isBuffer(data) || data instanceof Uint8Array) {
|
||||
doWriteFileAndFlush(path, data, ensuredOptions, error => error ? reject(error) : resolve());
|
||||
} else {
|
||||
doWriteFileStreamAndFlush(path, data, ensuredOptions, error => error ? reject(error) : resolve());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function doWriteFileStreamAndFlush(path: string, reader: NodeJS.ReadableStream, options: IEnsuredWriteFileOptions, callback: (error?: Error) => void): void {
|
||||
|
||||
// finish only once
|
||||
let finished = false;
|
||||
const finish = (error?: Error) => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
|
||||
// in error cases we need to manually close streams
|
||||
// if the write stream was successfully opened
|
||||
if (error) {
|
||||
if (isOpen) {
|
||||
writer.once('close', () => callback(error));
|
||||
writer.destroy();
|
||||
} else {
|
||||
callback(error);
|
||||
}
|
||||
}
|
||||
|
||||
// otherwise just return without error
|
||||
else {
|
||||
callback();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// create writer to target. we set autoClose: false because we want to use the streams
|
||||
// file descriptor to call fs.fdatasync to ensure the data is flushed to disk
|
||||
const writer = fs.createWriteStream(path, { mode: options.mode, flags: options.flag, autoClose: false });
|
||||
|
||||
// Event: 'open'
|
||||
// Purpose: save the fd for later use and start piping
|
||||
// Notes: will not be called when there is an error opening the file descriptor!
|
||||
let fd: number;
|
||||
let isOpen: boolean;
|
||||
writer.once('open', descriptor => {
|
||||
fd = descriptor;
|
||||
isOpen = true;
|
||||
|
||||
// if an encoding is provided, we need to pipe the stream through
|
||||
// an encoder stream and forward the encoding related options
|
||||
if (options.encoding) {
|
||||
reader = reader.pipe(encodeStream(options.encoding.charset, { addBOM: options.encoding.addBOM }));
|
||||
}
|
||||
|
||||
// start data piping only when we got a successful open. this ensures that we do
|
||||
// not consume the stream when an error happens and helps to fix this issue:
|
||||
// https://github.com/Microsoft/vscode/issues/42542
|
||||
reader.pipe(writer);
|
||||
});
|
||||
|
||||
// Event: 'error'
|
||||
// Purpose: to return the error to the outside and to close the write stream (does not happen automatically)
|
||||
reader.once('error', error => finish(error));
|
||||
writer.once('error', error => finish(error));
|
||||
|
||||
// Event: 'finish'
|
||||
// Purpose: use fs.fdatasync to flush the contents to disk
|
||||
// Notes: event is called when the writer has finished writing to the underlying resource. we must call writer.close()
|
||||
// because we have created the WriteStream with autoClose: false
|
||||
writer.once('finish', () => {
|
||||
|
||||
// flush to disk
|
||||
if (canFlush && isOpen) {
|
||||
fs.fdatasync(fd, (syncError: Error) => {
|
||||
|
||||
// In some exotic setups it is well possible that node fails to sync
|
||||
// In that case we disable flushing and warn to the console
|
||||
if (syncError) {
|
||||
console.warn('[node.js fs] fdatasync is now disabled for this session because it failed: ', syncError);
|
||||
canFlush = false;
|
||||
}
|
||||
|
||||
writer.destroy();
|
||||
});
|
||||
} else {
|
||||
writer.destroy();
|
||||
}
|
||||
});
|
||||
|
||||
// Event: 'close'
|
||||
// Purpose: signal we are done to the outside
|
||||
// Notes: event is called when the writer's filedescriptor is closed
|
||||
writer.once('close', () => finish());
|
||||
}
|
||||
|
||||
// Calls fs.writeFile() followed by a fs.sync() call to flush the changes to disk
|
||||
// We do this in cases where we want to make sure the data is really on disk and
|
||||
// not in some cache.
|
||||
//
|
||||
// See https://github.com/nodejs/node/blob/v5.10.0/lib/fs.js#L1194
|
||||
function doWriteFileAndFlush(path: string, data: string | Buffer | Uint8Array, options: IEnsuredWriteFileOptions, callback: (error?: Error) => void): void {
|
||||
if (options.encoding) {
|
||||
data = encode(data instanceof Uint8Array ? Buffer.from(data) : data, options.encoding.charset, { addBOM: options.encoding.addBOM });
|
||||
}
|
||||
|
||||
if (!canFlush) {
|
||||
return fs.writeFile(path, data, { mode: options.mode, flag: options.flag }, callback);
|
||||
}
|
||||
|
||||
// Open the file with same flags and mode as fs.writeFile()
|
||||
fs.open(path, options.flag, options.mode, (openError, fd) => {
|
||||
if (openError) {
|
||||
return callback(openError);
|
||||
}
|
||||
|
||||
// It is valid to pass a fd handle to fs.writeFile() and this will keep the handle open!
|
||||
fs.writeFile(fd, data, writeError => {
|
||||
if (writeError) {
|
||||
return fs.close(fd, () => callback(writeError)); // still need to close the handle on error!
|
||||
}
|
||||
|
||||
// Flush contents (not metadata) of the file to disk
|
||||
fs.fdatasync(fd, (syncError: Error) => {
|
||||
|
||||
// In some exotic setups it is well possible that node fails to sync
|
||||
// In that case we disable flushing and warn to the console
|
||||
if (syncError) {
|
||||
console.warn('[node.js fs] fdatasync is now disabled for this session because it failed: ', syncError);
|
||||
canFlush = false;
|
||||
}
|
||||
|
||||
return fs.close(fd, closeError => callback(closeError));
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* `path` exists and is a directory
|
||||
*/
|
||||
export function dirExists(path: string): Promise<boolean> {
|
||||
return stat(path).then(stat => stat.isDirectory(), () => false);
|
||||
}
|
||||
export function writeFileSync(path: string, data: string | Buffer, options?: IWriteFileOptions): void {
|
||||
const ensuredOptions = ensureWriteOptions(options);
|
||||
|
||||
/**
|
||||
* `path` exists and is a file.
|
||||
*/
|
||||
export function fileExists(path: string): Promise<boolean> {
|
||||
return stat(path).then(stat => stat.isFile(), () => false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a path from disk.
|
||||
*/
|
||||
let _tmpDir: string | null = null;
|
||||
function getTmpDir(): string {
|
||||
if (!_tmpDir) {
|
||||
_tmpDir = os.tmpdir();
|
||||
if (ensuredOptions.encoding) {
|
||||
data = encode(data, ensuredOptions.encoding.charset, { addBOM: ensuredOptions.encoding.addBOM });
|
||||
}
|
||||
|
||||
if (!canFlush) {
|
||||
return fs.writeFileSync(path, data, { mode: ensuredOptions.mode, flag: ensuredOptions.flag });
|
||||
}
|
||||
|
||||
// Open the file with same flags and mode as fs.writeFile()
|
||||
const fd = fs.openSync(path, ensuredOptions.flag, ensuredOptions.mode);
|
||||
|
||||
try {
|
||||
|
||||
// It is valid to pass a fd handle to fs.writeFile() and this will keep the handle open!
|
||||
fs.writeFileSync(fd, data);
|
||||
|
||||
// Flush contents (not metadata) of the file to disk
|
||||
try {
|
||||
fs.fdatasyncSync(fd);
|
||||
} catch (syncError) {
|
||||
console.warn('[node.js fs] fdatasyncSync is now disabled for this session because it failed: ', syncError);
|
||||
canFlush = false;
|
||||
}
|
||||
} finally {
|
||||
fs.closeSync(fd);
|
||||
}
|
||||
return _tmpDir;
|
||||
}
|
||||
export function del(path: string, tmp = getTmpDir()): Promise<void> {
|
||||
return nfcall(extfs.del, path, tmp);
|
||||
|
||||
function ensureWriteOptions(options?: IWriteFileOptions): IEnsuredWriteFileOptions {
|
||||
if (!options) {
|
||||
return { mode: 0o666, flag: 'w' };
|
||||
}
|
||||
|
||||
return {
|
||||
mode: typeof options.mode === 'number' ? options.mode : 0o666,
|
||||
flag: typeof options.flag === 'string' ? options.flag : 'w',
|
||||
encoding: options.encoding
|
||||
};
|
||||
}
|
||||
|
||||
export async function readDirsInDir(dirPath: string): Promise<string[]> {
|
||||
const children = await readdir(dirPath);
|
||||
const directories: string[] = [];
|
||||
|
||||
for (const child of children) {
|
||||
if (await dirExists(join(dirPath, child))) {
|
||||
directories.push(child);
|
||||
}
|
||||
}
|
||||
|
||||
return directories;
|
||||
}
|
||||
|
||||
export async function dirExists(path: string): Promise<boolean> {
|
||||
try {
|
||||
const fileStat = await stat(path);
|
||||
|
||||
return fileStat.isDirectory();
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export async function fileExists(path: string): Promise<boolean> {
|
||||
try {
|
||||
const fileStat = await stat(path);
|
||||
|
||||
return fileStat.isFile();
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export function whenDeleted(path: string): Promise<void> {
|
||||
|
@ -200,6 +520,154 @@ export function whenDeleted(path: string): Promise<void> {
|
|||
});
|
||||
}
|
||||
|
||||
export function copy(source: string, target: string): Promise<void> {
|
||||
return nfcall(extfs.copy, source, target);
|
||||
export async function move(source: string, target: string): Promise<void> {
|
||||
if (source === target) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
async function updateMtime(path: string): Promise<void> {
|
||||
const stat = await lstat(path);
|
||||
if (stat.isDirectory() || stat.isSymbolicLink()) {
|
||||
return Promise.resolve(); // only for files
|
||||
}
|
||||
|
||||
const fd = await promisify(fs.open)(path, 'a');
|
||||
try {
|
||||
await promisify(fs.futimes)(fd, stat.atime, new Date());
|
||||
} catch (error) {
|
||||
//ignore
|
||||
}
|
||||
|
||||
return promisify(fs.close)(fd);
|
||||
}
|
||||
|
||||
try {
|
||||
await rename(source, target);
|
||||
await updateMtime(target);
|
||||
} catch (error) {
|
||||
|
||||
// In two cases we fallback to classic copy and delete:
|
||||
//
|
||||
// 1.) The EXDEV error indicates that source and target are on different devices
|
||||
// In this case, fallback to using a copy() operation as there is no way to
|
||||
// rename() between different devices.
|
||||
//
|
||||
// 2.) The user tries to rename a file/folder that ends with a dot. This is not
|
||||
// really possible to move then, at least on UNC devices.
|
||||
if (source.toLowerCase() !== target.toLowerCase() && error.code === 'EXDEV' || endsWith(source, '.')) {
|
||||
await copy(source, target);
|
||||
await rimraf(source, RimRafMode.MOVE);
|
||||
await updateMtime(target);
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function copy(source: string, target: string, copiedSourcesIn?: { [path: string]: boolean }): Promise<void> {
|
||||
const copiedSources = copiedSourcesIn ? copiedSourcesIn : Object.create(null);
|
||||
|
||||
const fileStat = await stat(source);
|
||||
if (!fileStat.isDirectory()) {
|
||||
return doCopyFile(source, target, fileStat.mode & 511);
|
||||
}
|
||||
|
||||
if (copiedSources[source]) {
|
||||
return Promise.resolve(); // escape when there are cycles (can happen with symlinks)
|
||||
}
|
||||
|
||||
copiedSources[source] = true; // remember as copied
|
||||
|
||||
// Create folder
|
||||
await mkdirp(target, fileStat.mode & 511);
|
||||
|
||||
// Copy each file recursively
|
||||
const files = await readdir(source);
|
||||
for (let i = 0; i < files.length; i++) {
|
||||
const file = files[i];
|
||||
await copy(join(source, file), join(target, file), copiedSources);
|
||||
}
|
||||
}
|
||||
|
||||
async function doCopyFile(source: string, target: string, mode: number): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = fs.createReadStream(source);
|
||||
const writer = fs.createWriteStream(target, { mode });
|
||||
|
||||
let finished = false;
|
||||
const finish = (error?: Error) => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
|
||||
// in error cases, pass to callback
|
||||
if (error) {
|
||||
return reject(error);
|
||||
}
|
||||
|
||||
// we need to explicitly chmod because of https://github.com/nodejs/node/issues/1104
|
||||
fs.chmod(target, mode, error => error ? reject(error) : resolve());
|
||||
}
|
||||
};
|
||||
|
||||
// handle errors properly
|
||||
reader.once('error', error => finish(error));
|
||||
writer.once('error', error => finish(error));
|
||||
|
||||
// we are done (underlying fd has been closed)
|
||||
writer.once('close', () => finish());
|
||||
|
||||
// start piping
|
||||
reader.pipe(writer);
|
||||
});
|
||||
}
|
||||
|
||||
export async function mkdirp(path: string, mode?: number, token?: CancellationToken): Promise<void> {
|
||||
const mkdir = async () => {
|
||||
try {
|
||||
await promisify(fs.mkdir)(path, mode);
|
||||
} catch (error) {
|
||||
|
||||
// ENOENT: a parent folder does not exist yet
|
||||
if (error.code === 'ENOENT') {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
|
||||
// Any other error: check if folder exists and
|
||||
// return normally in that case if its a folder
|
||||
try {
|
||||
const fileStat = await stat(path);
|
||||
if (!fileStat.isDirectory()) {
|
||||
return Promise.reject(new Error(`'${path}' exists and is not a directory.`));
|
||||
}
|
||||
} catch (statError) {
|
||||
throw error; // rethrow original error
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// stop at root
|
||||
if (path === dirname(path)) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
try {
|
||||
await mkdir();
|
||||
} catch (error) {
|
||||
|
||||
// Respect cancellation
|
||||
if (token && token.isCancellationRequested) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
// ENOENT: a parent folder does not exist yet, continue
|
||||
// to create the parent folder and then try again.
|
||||
if (error.code === 'ENOENT') {
|
||||
await mkdirp(dirname(path), mode);
|
||||
|
||||
return mkdir();
|
||||
}
|
||||
|
||||
// Any other error
|
||||
return Promise.reject(error);
|
||||
}
|
||||
}
|
|
@ -4,20 +4,9 @@
|
|||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { exec } from 'child_process';
|
||||
|
||||
import { ProcessItem } from 'vs/base/common/processes';
|
||||
import { getPathFromAmdModule } from 'vs/base/common/amd';
|
||||
|
||||
export interface ProcessItem {
|
||||
name: string;
|
||||
cmd: string;
|
||||
pid: number;
|
||||
ppid: number;
|
||||
load: number;
|
||||
mem: number;
|
||||
|
||||
children?: ProcessItem[];
|
||||
}
|
||||
|
||||
export function listProcesses(rootPid: number): Promise<ProcessItem> {
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
|
@ -181,7 +170,7 @@ export function listProcesses(rootPid: number): Promise<ProcessItem> {
|
|||
exec(CMD, { maxBuffer: 1000 * 1024, env: { LC_NUMERIC: 'en_US.UTF-8' } }, (err, stdout, stderr) => {
|
||||
|
||||
if (err || stderr) {
|
||||
reject(err || stderr.toString());
|
||||
reject(err || new Error(stderr.toString()));
|
||||
} else {
|
||||
|
||||
const lines = stdout.toString().split('\n');
|
||||
|
@ -214,7 +203,7 @@ export function listProcesses(rootPid: number): Promise<ProcessItem> {
|
|||
|
||||
exec(cmd, {}, (err, stdout, stderr) => {
|
||||
if (err || stderr) {
|
||||
reject(err || stderr.toString());
|
||||
reject(err || new Error(stderr.toString()));
|
||||
} else {
|
||||
const cpuUsage = stdout.toString().split('\n');
|
||||
for (let i = 0; i < pids.length; i++) {
|
||||
|
|
|
@ -64,7 +64,7 @@ export interface IStorage extends IDisposable {
|
|||
getNumber(key: string, fallbackValue: number): number;
|
||||
getNumber(key: string, fallbackValue?: number): number | undefined;
|
||||
|
||||
set(key: string, value: string | boolean | number): Promise<void>;
|
||||
set(key: string, value: string | boolean | number | undefined | null): Promise<void>;
|
||||
delete(key: string): Promise<void>;
|
||||
|
||||
close(): Promise<void>;
|
||||
|
@ -152,7 +152,7 @@ export class Storage extends Disposable implements IStorage {
|
|||
return this.cache.size;
|
||||
}
|
||||
|
||||
init(): Promise<void> {
|
||||
async init(): Promise<void> {
|
||||
if (this.state !== StorageState.None) {
|
||||
return Promise.resolve(); // either closed or already initialized
|
||||
}
|
||||
|
@ -166,9 +166,7 @@ export class Storage extends Disposable implements IStorage {
|
|||
return Promise.resolve();
|
||||
}
|
||||
|
||||
return this.database.getItems().then(items => {
|
||||
this.cache = items;
|
||||
});
|
||||
this.cache = await this.database.getItems();
|
||||
}
|
||||
|
||||
get(key: string, fallbackValue: string): string;
|
||||
|
@ -207,7 +205,7 @@ export class Storage extends Disposable implements IStorage {
|
|||
return parseInt(value, 10);
|
||||
}
|
||||
|
||||
set(key: string, value: string | boolean | number): Promise<void> {
|
||||
set(key: string, value: string | boolean | number | null | undefined): Promise<void> {
|
||||
if (this.state === StorageState.Closed) {
|
||||
return Promise.resolve(); // Return early if we are already closed
|
||||
}
|
||||
|
@ -262,7 +260,7 @@ export class Storage extends Disposable implements IStorage {
|
|||
return this.flushDelayer.trigger(() => this.flushPending());
|
||||
}
|
||||
|
||||
close(): Promise<void> {
|
||||
async close(): Promise<void> {
|
||||
if (this.state === StorageState.Closed) {
|
||||
return Promise.resolve(); // return if already closed
|
||||
}
|
||||
|
@ -276,8 +274,13 @@ export class Storage extends Disposable implements IStorage {
|
|||
//
|
||||
// Recovery: we pass our cache over as recovery option in case
|
||||
// the DB is not healthy.
|
||||
const onDone = () => this.database.close(() => this.cache);
|
||||
return this.flushDelayer.trigger(() => this.flushPending(), 0 /* as soon as possible */).then(onDone, onDone);
|
||||
try {
|
||||
await this.flushDelayer.trigger(() => this.flushPending(), 0 /* as soon as possible */);
|
||||
} catch (error) {
|
||||
// Ignore
|
||||
}
|
||||
|
||||
await this.database.close(() => this.cache);
|
||||
}
|
||||
|
||||
private flushPending(): Promise<void> {
|
||||
|
@ -344,24 +347,25 @@ export class SQLiteStorageDatabase implements IStorageDatabase {
|
|||
this.whenConnected = this.connect(path);
|
||||
}
|
||||
|
||||
getItems(): Promise<Map<string, string>> {
|
||||
return this.whenConnected.then(connection => {
|
||||
const items = new Map<string, string>();
|
||||
async getItems(): Promise<Map<string, string>> {
|
||||
const connection = await this.whenConnected;
|
||||
|
||||
return this.all(connection, 'SELECT * FROM ItemTable').then(rows => {
|
||||
rows.forEach(row => items.set(row.key, row.value));
|
||||
const items = new Map<string, string>();
|
||||
|
||||
if (this.logger.isTracing) {
|
||||
this.logger.trace(`[storage ${this.name}] getItems(): ${items.size} rows`);
|
||||
}
|
||||
const rows = await this.all(connection, 'SELECT * FROM ItemTable');
|
||||
rows.forEach(row => items.set(row.key, row.value));
|
||||
|
||||
return items;
|
||||
});
|
||||
});
|
||||
if (this.logger.isTracing) {
|
||||
this.logger.trace(`[storage ${this.name}] getItems(): ${items.size} rows`);
|
||||
}
|
||||
|
||||
return items;
|
||||
}
|
||||
|
||||
updateItems(request: IUpdateRequest): Promise<void> {
|
||||
return this.whenConnected.then(connection => this.doUpdateItems(connection, request));
|
||||
async updateItems(request: IUpdateRequest): Promise<void> {
|
||||
const connection = await this.whenConnected;
|
||||
|
||||
return this.doUpdateItems(connection, request);
|
||||
}
|
||||
|
||||
private doUpdateItems(connection: IDatabaseConnection, request: IUpdateRequest): Promise<void> {
|
||||
|
@ -452,10 +456,12 @@ export class SQLiteStorageDatabase implements IStorageDatabase {
|
|||
});
|
||||
}
|
||||
|
||||
close(recovery?: () => Map<string, string>): Promise<void> {
|
||||
async close(recovery?: () => Map<string, string>): Promise<void> {
|
||||
this.logger.trace(`[storage ${this.name}] close()`);
|
||||
|
||||
return this.whenConnected.then(connection => this.doClose(connection, recovery));
|
||||
const connection = await this.whenConnected;
|
||||
|
||||
return this.doClose(connection, recovery);
|
||||
}
|
||||
|
||||
private doClose(connection: IDatabaseConnection, recovery?: () => Map<string, string>): Promise<void> {
|
||||
|
@ -529,24 +535,23 @@ export class SQLiteStorageDatabase implements IStorageDatabase {
|
|||
return `${path}.backup`;
|
||||
}
|
||||
|
||||
checkIntegrity(full: boolean): Promise<string> {
|
||||
async checkIntegrity(full: boolean): Promise<string> {
|
||||
this.logger.trace(`[storage ${this.name}] checkIntegrity(full: ${full})`);
|
||||
|
||||
return this.whenConnected.then(connection => {
|
||||
return this.get(connection, full ? 'PRAGMA integrity_check' : 'PRAGMA quick_check').then(row => {
|
||||
const integrity = full ? row['integrity_check'] : row['quick_check'];
|
||||
const connection = await this.whenConnected;
|
||||
const row = await this.get(connection, full ? 'PRAGMA integrity_check' : 'PRAGMA quick_check');
|
||||
|
||||
if (connection.isErroneous) {
|
||||
return `${integrity} (last error: ${connection.lastError})`;
|
||||
}
|
||||
const integrity = full ? row['integrity_check'] : row['quick_check'];
|
||||
|
||||
if (connection.isInMemory) {
|
||||
return `${integrity} (in-memory!)`;
|
||||
}
|
||||
if (connection.isErroneous) {
|
||||
return `${integrity} (last error: ${connection.lastError})`;
|
||||
}
|
||||
|
||||
return integrity;
|
||||
});
|
||||
});
|
||||
if (connection.isInMemory) {
|
||||
return `${integrity} (in-memory!)`;
|
||||
}
|
||||
|
||||
return integrity;
|
||||
}
|
||||
|
||||
private connect(path: string, retryOnBusy: boolean = true): Promise<IDatabaseConnection> {
|
||||
|
|
|
@ -0,0 +1,192 @@
|
|||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { join, basename } from 'vs/base/common/path';
|
||||
import { watch } from 'fs';
|
||||
import { isMacintosh } from 'vs/base/common/platform';
|
||||
import { normalizeNFC } from 'vs/base/common/normalization';
|
||||
import { toDisposable, IDisposable, dispose } from 'vs/base/common/lifecycle';
|
||||
import { exists, readdir } from 'vs/base/node/pfs';
|
||||
|
||||
export function watchFile(path: string, onChange: (type: 'changed' | 'deleted', path: string) => void, onError: (error: string) => void): IDisposable {
|
||||
return doWatchNonRecursive({ path, isDirectory: false }, onChange, onError);
|
||||
}
|
||||
|
||||
export function watchFolder(path: string, onChange: (type: 'added' | 'changed' | 'deleted', path: string) => void, onError: (error: string) => void): IDisposable {
|
||||
return doWatchNonRecursive({ path, isDirectory: true }, onChange, onError);
|
||||
}
|
||||
|
||||
export const CHANGE_BUFFER_DELAY = 100;
|
||||
|
||||
function doWatchNonRecursive(file: { path: string, isDirectory: boolean }, onChange: (type: 'added' | 'changed' | 'deleted', path: string) => void, onError: (error: string) => void): IDisposable {
|
||||
const originalFileName = basename(file.path);
|
||||
const mapPathToStatDisposable = new Map<string, IDisposable>();
|
||||
|
||||
let disposed = false;
|
||||
let watcherDisposables: IDisposable[] = [toDisposable(() => {
|
||||
mapPathToStatDisposable.forEach(disposable => dispose(disposable));
|
||||
mapPathToStatDisposable.clear();
|
||||
})];
|
||||
|
||||
try {
|
||||
|
||||
// Creating watcher can fail with an exception
|
||||
const watcher = watch(file.path);
|
||||
watcherDisposables.push(toDisposable(() => {
|
||||
watcher.removeAllListeners();
|
||||
watcher.close();
|
||||
}));
|
||||
|
||||
// Folder: resolve children to emit proper events
|
||||
const folderChildren: Set<string> = new Set<string>();
|
||||
if (file.isDirectory) {
|
||||
readdir(file.path).then(children => children.forEach(child => folderChildren.add(child)));
|
||||
}
|
||||
|
||||
watcher.on('error', (code: number, signal: string) => {
|
||||
if (!disposed) {
|
||||
onError(`Failed to watch ${file.path} for changes using fs.watch() (${code}, ${signal})`);
|
||||
}
|
||||
});
|
||||
|
||||
watcher.on('change', (type, raw) => {
|
||||
if (disposed) {
|
||||
return; // ignore if already disposed
|
||||
}
|
||||
|
||||
// Normalize file name
|
||||
let changedFileName: string = '';
|
||||
if (raw) { // https://github.com/Microsoft/vscode/issues/38191
|
||||
changedFileName = raw.toString();
|
||||
if (isMacintosh) {
|
||||
// Mac: uses NFD unicode form on disk, but we want NFC
|
||||
// See also https://github.com/nodejs/node/issues/2165
|
||||
changedFileName = normalizeNFC(changedFileName);
|
||||
}
|
||||
}
|
||||
|
||||
if (!changedFileName || (type !== 'change' && type !== 'rename')) {
|
||||
return; // ignore unexpected events
|
||||
}
|
||||
|
||||
// File path: use path directly for files and join with changed file name otherwise
|
||||
const changedFilePath = file.isDirectory ? join(file.path, changedFileName) : file.path;
|
||||
|
||||
// File
|
||||
if (!file.isDirectory) {
|
||||
if (type === 'rename' || changedFileName !== originalFileName) {
|
||||
// The file was either deleted or renamed. Many tools apply changes to files in an
|
||||
// atomic way ("Atomic Save") by first renaming the file to a temporary name and then
|
||||
// renaming it back to the original name. Our watcher will detect this as a rename
|
||||
// and then stops to work on Mac and Linux because the watcher is applied to the
|
||||
// inode and not the name. The fix is to detect this case and trying to watch the file
|
||||
// again after a certain delay.
|
||||
// In addition, we send out a delete event if after a timeout we detect that the file
|
||||
// does indeed not exist anymore.
|
||||
|
||||
const timeoutHandle = setTimeout(async () => {
|
||||
const fileExists = await exists(changedFilePath);
|
||||
|
||||
if (disposed) {
|
||||
return; // ignore if disposed by now
|
||||
}
|
||||
|
||||
// File still exists, so emit as change event and reapply the watcher
|
||||
if (fileExists) {
|
||||
onChange('changed', changedFilePath);
|
||||
|
||||
watcherDisposables = [doWatchNonRecursive(file, onChange, onError)];
|
||||
}
|
||||
|
||||
// File seems to be really gone, so emit a deleted event
|
||||
else {
|
||||
onChange('deleted', changedFilePath);
|
||||
}
|
||||
}, CHANGE_BUFFER_DELAY);
|
||||
|
||||
// Very important to dispose the watcher which now points to a stale inode
|
||||
// and wire in a new disposable that tracks our timeout that is installed
|
||||
dispose(watcherDisposables);
|
||||
watcherDisposables = [toDisposable(() => clearTimeout(timeoutHandle))];
|
||||
} else {
|
||||
onChange('changed', changedFilePath);
|
||||
}
|
||||
}
|
||||
|
||||
// Folder
|
||||
else {
|
||||
|
||||
// Children add/delete
|
||||
if (type === 'rename') {
|
||||
|
||||
// Cancel any previous stats for this file path if existing
|
||||
const statDisposable = mapPathToStatDisposable.get(changedFilePath);
|
||||
if (statDisposable) {
|
||||
dispose(statDisposable);
|
||||
}
|
||||
|
||||
// Wait a bit and try see if the file still exists on disk to decide on the resulting event
|
||||
const timeoutHandle = setTimeout(async () => {
|
||||
mapPathToStatDisposable.delete(changedFilePath);
|
||||
|
||||
const fileExists = await exists(changedFilePath);
|
||||
|
||||
if (disposed) {
|
||||
return; // ignore if disposed by now
|
||||
}
|
||||
|
||||
// Figure out the correct event type:
|
||||
// File Exists: either 'added' or 'changed' if known before
|
||||
// File Does not Exist: always 'deleted'
|
||||
let type: 'added' | 'deleted' | 'changed';
|
||||
if (fileExists) {
|
||||
if (folderChildren.has(changedFileName)) {
|
||||
type = 'changed';
|
||||
} else {
|
||||
type = 'added';
|
||||
folderChildren.add(changedFileName);
|
||||
}
|
||||
} else {
|
||||
folderChildren.delete(changedFileName);
|
||||
type = 'deleted';
|
||||
}
|
||||
|
||||
onChange(type, changedFilePath);
|
||||
}, CHANGE_BUFFER_DELAY);
|
||||
|
||||
mapPathToStatDisposable.set(changedFilePath, toDisposable(() => clearTimeout(timeoutHandle)));
|
||||
}
|
||||
|
||||
// Other events
|
||||
else {
|
||||
|
||||
// Figure out the correct event type: if this is the
|
||||
// first time we see this child, it can only be added
|
||||
let type: 'added' | 'changed';
|
||||
if (folderChildren.has(changedFileName)) {
|
||||
type = 'changed';
|
||||
} else {
|
||||
type = 'added';
|
||||
folderChildren.add(changedFileName);
|
||||
}
|
||||
|
||||
onChange(type, changedFilePath);
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
exists(file.path).then(exists => {
|
||||
if (exists && !disposed) {
|
||||
onError(`Failed to watch ${file.path} for changes using fs.watch() (${error.toString()})`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return toDisposable(() => {
|
||||
disposed = true;
|
||||
|
||||
watcherDisposables = dispose(watcherDisposables);
|
||||
});
|
||||
}
|
|
@ -7,7 +7,7 @@ import * as nls from 'vs/nls';
|
|||
import * as path from 'vs/base/common/path';
|
||||
import { createWriteStream, WriteStream } from 'fs';
|
||||
import { Readable } from 'stream';
|
||||
import { nfcall, ninvoke, Sequencer, createCancelablePromise } from 'vs/base/common/async';
|
||||
import { Sequencer, createCancelablePromise } from 'vs/base/common/async';
|
||||
import { mkdirp, rimraf } from 'vs/base/node/pfs';
|
||||
import { open as _openZip, Entry, ZipFile } from 'yauzl';
|
||||
import * as yazl from 'yazl';
|
||||
|
@ -17,7 +17,7 @@ import { Event } from 'vs/base/common/event';
|
|||
export interface IExtractOptions {
|
||||
overwrite?: boolean;
|
||||
|
||||
/**
|
||||
/**
|
||||
* Source path within the ZIP archive. Only the files contained in this
|
||||
* path will be extracted.
|
||||
*/
|
||||
|
@ -153,7 +153,7 @@ function extractZip(zipfile: ZipFile, targetPath: string, options: IOptions, tok
|
|||
return;
|
||||
}
|
||||
|
||||
const stream = ninvoke(zipfile, zipfile.openReadStream, entry);
|
||||
const stream = openZipStream(zipfile, entry);
|
||||
const mode = modeFromEntry(entry);
|
||||
|
||||
last = createCancelablePromise(token => throttler.queue(() => stream.then(stream => extractEntry(stream, fileName, mode, targetPath, options, token).then(() => readNextEntry(token)))).then(null!, e));
|
||||
|
@ -162,8 +162,27 @@ function extractZip(zipfile: ZipFile, targetPath: string, options: IOptions, tok
|
|||
}
|
||||
|
||||
function openZip(zipFile: string, lazy: boolean = false): Promise<ZipFile> {
|
||||
return nfcall<ZipFile>(_openZip, zipFile, lazy ? { lazyEntries: true } : undefined)
|
||||
.then(undefined, err => Promise.reject(toExtractError(err)));
|
||||
return new Promise((resolve, reject) => {
|
||||
_openZip(zipFile, lazy ? { lazyEntries: true } : undefined, (error?: Error, zipfile?: ZipFile) => {
|
||||
if (error) {
|
||||
reject(toExtractError(error));
|
||||
} else {
|
||||
resolve(zipfile);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function openZipStream(zipFile: ZipFile, entry: Entry): Promise<Readable> {
|
||||
return new Promise((resolve, reject) => {
|
||||
zipFile.openReadStream(entry, (error?: Error, stream?: Readable) => {
|
||||
if (error) {
|
||||
reject(toExtractError(error));
|
||||
} else {
|
||||
resolve(stream);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export interface IFile {
|
||||
|
@ -210,7 +229,7 @@ function read(zipPath: string, filePath: string): Promise<Readable> {
|
|||
return new Promise<Readable>((c, e) => {
|
||||
zipfile.on('entry', (entry: Entry) => {
|
||||
if (entry.fileName === filePath) {
|
||||
ninvoke<Readable>(zipfile, zipfile.openReadStream, entry).then(stream => c(stream), err => e(err));
|
||||
openZipStream(zipfile, entry).then(stream => c(stream), err => e(err));
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -224,7 +243,7 @@ export function buffer(zipPath: string, filePath: string): Promise<Buffer> {
|
|||
return new Promise<Buffer>((c, e) => {
|
||||
const buffers: Buffer[] = [];
|
||||
stream.once('error', e);
|
||||
stream.on('data', b => buffers.push(b as Buffer));
|
||||
stream.on('data', (b: Buffer) => buffers.push(b));
|
||||
stream.on('end', () => c(Buffer.concat(buffers)));
|
||||
});
|
||||
});
|
||||
|
|
|
@ -37,7 +37,7 @@ export function popup(items: IContextMenuItem[], options?: IPopupOptions): void
|
|||
}
|
||||
|
||||
function createItem(item: IContextMenuItem, processedItems: IContextMenuItem[]): ISerializableContextMenuItem {
|
||||
const serializableItem = {
|
||||
const serializableItem: ISerializableContextMenuItem = {
|
||||
id: processedItems.length,
|
||||
label: item.label,
|
||||
type: item.type,
|
||||
|
@ -45,7 +45,7 @@ function createItem(item: IContextMenuItem, processedItems: IContextMenuItem[]):
|
|||
checked: item.checked,
|
||||
enabled: typeof item.enabled === 'boolean' ? item.enabled : true,
|
||||
visible: typeof item.visible === 'boolean' ? item.visible : true
|
||||
} as ISerializableContextMenuItem;
|
||||
};
|
||||
|
||||
processedItems.push(item);
|
||||
|
||||
|
|
|
@ -17,6 +17,7 @@ export interface ISocket {
|
|||
onEnd(listener: () => void): IDisposable;
|
||||
write(buffer: VSBuffer): void;
|
||||
end(): void;
|
||||
dispose(): void;
|
||||
}
|
||||
|
||||
let emptyBuffer: VSBuffer | null = null;
|
||||
|
@ -27,7 +28,7 @@ function getEmptyBuffer(): VSBuffer {
|
|||
return emptyBuffer;
|
||||
}
|
||||
|
||||
class ChunkStream {
|
||||
export class ChunkStream {
|
||||
|
||||
private _chunks: VSBuffer[];
|
||||
private _totalLength: number;
|
||||
|
@ -47,6 +48,15 @@ class ChunkStream {
|
|||
}
|
||||
|
||||
public read(byteCount: number): VSBuffer {
|
||||
return this._read(byteCount, true);
|
||||
}
|
||||
|
||||
public peek(byteCount: number): VSBuffer {
|
||||
return this._read(byteCount, false);
|
||||
}
|
||||
|
||||
private _read(byteCount: number, advance: boolean): VSBuffer {
|
||||
|
||||
if (byteCount === 0) {
|
||||
return getEmptyBuffer();
|
||||
}
|
||||
|
@ -57,39 +67,53 @@ class ChunkStream {
|
|||
|
||||
if (this._chunks[0].byteLength === byteCount) {
|
||||
// super fast path, precisely first chunk must be returned
|
||||
const result = this._chunks.shift()!;
|
||||
this._totalLength -= byteCount;
|
||||
const result = this._chunks[0];
|
||||
if (advance) {
|
||||
this._chunks.shift();
|
||||
this._totalLength -= byteCount;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
if (this._chunks[0].byteLength > byteCount) {
|
||||
// fast path, the reading is entirely within the first chunk
|
||||
const result = this._chunks[0].slice(0, byteCount);
|
||||
this._chunks[0] = this._chunks[0].slice(byteCount);
|
||||
this._totalLength -= byteCount;
|
||||
if (advance) {
|
||||
this._chunks[0] = this._chunks[0].slice(byteCount);
|
||||
this._totalLength -= byteCount;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
let result = VSBuffer.alloc(byteCount);
|
||||
let resultOffset = 0;
|
||||
let chunkIndex = 0;
|
||||
while (byteCount > 0) {
|
||||
const chunk = this._chunks[0];
|
||||
const chunk = this._chunks[chunkIndex];
|
||||
if (chunk.byteLength > byteCount) {
|
||||
// this chunk will survive
|
||||
this._chunks[0] = chunk.slice(byteCount);
|
||||
|
||||
const chunkPart = chunk.slice(0, byteCount);
|
||||
result.set(chunkPart, resultOffset);
|
||||
resultOffset += byteCount;
|
||||
this._totalLength -= byteCount;
|
||||
|
||||
if (advance) {
|
||||
this._chunks[chunkIndex] = chunk.slice(byteCount);
|
||||
this._totalLength -= byteCount;
|
||||
}
|
||||
|
||||
byteCount -= byteCount;
|
||||
} else {
|
||||
// this chunk will be entirely read
|
||||
this._chunks.shift();
|
||||
|
||||
result.set(chunk, resultOffset);
|
||||
resultOffset += chunk.byteLength;
|
||||
this._totalLength -= chunk.byteLength;
|
||||
|
||||
if (advance) {
|
||||
this._chunks.shift();
|
||||
this._totalLength -= chunk.byteLength;
|
||||
} else {
|
||||
chunkIndex++;
|
||||
}
|
||||
|
||||
byteCount -= chunk.byteLength;
|
||||
}
|
||||
}
|
||||
|
@ -154,7 +178,7 @@ class ProtocolReader extends Disposable {
|
|||
private readonly _incomingData: ChunkStream;
|
||||
public lastReadTime: number;
|
||||
|
||||
private readonly _onMessage = new Emitter<ProtocolMessage>();
|
||||
private readonly _onMessage = this._register(new Emitter<ProtocolMessage>());
|
||||
public readonly onMessage: Event<ProtocolMessage> = this._onMessage.event;
|
||||
|
||||
private readonly _state = {
|
||||
|
|
|
@ -20,6 +20,10 @@ export class NodeSocket implements ISocket {
|
|||
this.socket = socket;
|
||||
}
|
||||
|
||||
public dispose(): void {
|
||||
this.socket.destroy();
|
||||
}
|
||||
|
||||
public onData(_listener: (e: VSBuffer) => void): IDisposable {
|
||||
const listener = (buff: Buffer) => _listener(VSBuffer.wrap(buff));
|
||||
this.socket.on('data', listener);
|
||||
|
|
|
@ -473,17 +473,16 @@ class Renderer implements IRenderer<QuickOpenEntry> {
|
|||
}
|
||||
|
||||
disposeTemplate(templateId: string, templateData: IQuickOpenEntryGroupTemplateData): void {
|
||||
const data = templateData as IQuickOpenEntryGroupTemplateData;
|
||||
data.actionBar.dispose();
|
||||
data.actionBar = null!;
|
||||
data.container = null!;
|
||||
data.entry = null!;
|
||||
data.keybinding = null!;
|
||||
data.detail = null!;
|
||||
data.group = null!;
|
||||
data.icon = null!;
|
||||
data.label.dispose();
|
||||
data.label = null!;
|
||||
templateData.actionBar.dispose();
|
||||
templateData.actionBar = null!;
|
||||
templateData.container = null!;
|
||||
templateData.entry = null!;
|
||||
templateData.keybinding = null!;
|
||||
templateData.detail = null!;
|
||||
templateData.group = null!;
|
||||
templateData.icon = null!;
|
||||
templateData.label.dispose();
|
||||
templateData.label = null!;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -108,61 +108,17 @@ export interface ITree {
|
|||
*/
|
||||
toggleExpansion(element: any, recursive?: boolean): Promise<any>;
|
||||
|
||||
/**
|
||||
* Toggles several element's expansion state.
|
||||
*/
|
||||
toggleExpansionAll(elements: any[]): Promise<any>;
|
||||
|
||||
/**
|
||||
* Returns whether an element is expanded or not.
|
||||
*/
|
||||
isExpanded(element: any): boolean;
|
||||
|
||||
/**
|
||||
* Returns a list of the currently expanded elements.
|
||||
*/
|
||||
getExpandedElements(): any[];
|
||||
|
||||
/**
|
||||
* Reveals an element in the tree. The relativeTop is a value between 0 and 1. The closer to 0 the more the
|
||||
* element will scroll up to the top.
|
||||
*/
|
||||
reveal(element: any, relativeTop?: number): Promise<any>;
|
||||
|
||||
/**
|
||||
* Returns the relative top position of any given element, if visible.
|
||||
* If not visible, returns a negative number or a number > 1.
|
||||
* Useful when calling `reveal(element, relativeTop)`.
|
||||
*/
|
||||
getRelativeTop(element: any): number;
|
||||
|
||||
/**
|
||||
* Returns the top-most visible element.
|
||||
*/
|
||||
getFirstVisibleElement(): any;
|
||||
|
||||
/**
|
||||
* Returns a number between 0 and 1 representing how much the tree is scroll down. 0 means all the way
|
||||
* to the top; 1 means all the way down.
|
||||
*/
|
||||
getScrollPosition(): number;
|
||||
|
||||
/**
|
||||
* Sets the scroll position with a number between 0 and 1 representing how much the tree is scroll down. 0 means all the way
|
||||
* to the top; 1 means all the way down.
|
||||
*/
|
||||
setScrollPosition(pos: number): void;
|
||||
|
||||
/**
|
||||
* Returns the total height of the tree's content.
|
||||
*/
|
||||
getContentHeight(): number;
|
||||
|
||||
/**
|
||||
* Sets the tree's highlight to be the given element.
|
||||
* Provide no arguments and it clears the tree's highlight.
|
||||
*/
|
||||
setHighlight(element?: any, eventPayload?: any): void;
|
||||
|
||||
/**
|
||||
* Returns the currently highlighted element.
|
||||
|
|
|
@ -181,33 +181,17 @@ export class Tree implements _.ITree {
|
|||
return this.model.toggleExpansion(element, recursive);
|
||||
}
|
||||
|
||||
public toggleExpansionAll(elements: any[]): Promise<any> {
|
||||
return this.model.toggleExpansionAll(elements);
|
||||
}
|
||||
|
||||
public isExpanded(element: any): boolean {
|
||||
return this.model.isExpanded(element);
|
||||
}
|
||||
|
||||
public getExpandedElements(): any[] {
|
||||
return this.model.getExpandedElements();
|
||||
}
|
||||
|
||||
public reveal(element: any, relativeTop: number | null = null): Promise<any> {
|
||||
return this.model.reveal(element, relativeTop);
|
||||
}
|
||||
|
||||
public getRelativeTop(element: any): number {
|
||||
const item = this.model.getItem(element);
|
||||
return item ? this.view.getRelativeTop(item) : 0;
|
||||
}
|
||||
|
||||
public getFirstVisibleElement(): any {
|
||||
return this.view.getFirstVisibleElement();
|
||||
}
|
||||
|
||||
public getLastVisibleElement(): any {
|
||||
return this.view.getLastVisibleElement();
|
||||
// {{SQL CARBON EDIT }} - add back deleted VS Code tree methods
|
||||
public getExpandedElements(): any[] {
|
||||
return this.model.getExpandedElements();
|
||||
}
|
||||
|
||||
public getScrollPosition(): number {
|
||||
|
@ -221,10 +205,8 @@ export class Tree implements _.ITree {
|
|||
getContentHeight(): number {
|
||||
return this.view.getContentHeight();
|
||||
}
|
||||
// {{SQL CARBON EDIT }} - end block
|
||||
|
||||
public setHighlight(element?: any, eventPayload?: any): void {
|
||||
this.model.setHighlight(element, eventPayload);
|
||||
}
|
||||
|
||||
public getHighlight(): any {
|
||||
return this.model.getHighlight();
|
||||
|
|
|
@ -566,19 +566,6 @@ export class Item {
|
|||
return this.isAncestorOf(other) || other.isAncestorOf(this);
|
||||
}
|
||||
|
||||
public getHierarchy(): Item[] {
|
||||
let result: Item[] = [];
|
||||
let node: Item | null = this;
|
||||
|
||||
do {
|
||||
result.push(node);
|
||||
node = node.parent;
|
||||
} while (node);
|
||||
|
||||
result.reverse();
|
||||
return result;
|
||||
}
|
||||
|
||||
private isAncestorOf(startItem: Item): boolean {
|
||||
let item: Item | null = startItem;
|
||||
while (item) {
|
||||
|
|
|
@ -5,16 +5,6 @@
|
|||
|
||||
import * as _ from 'vs/base/parts/tree/browser/tree';
|
||||
|
||||
export function collapseAll(tree: _.ITree, except?: any): void {
|
||||
const nav = tree.getNavigator();
|
||||
let cur;
|
||||
while (cur = nav.next()) {
|
||||
if (!except || !isEqualOrParent(tree, except, cur)) {
|
||||
tree.collapse(cur);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function isEqualOrParent(tree: _.ITree, element: any, candidateParent: any): boolean {
|
||||
const nav = tree.getNavigator(element);
|
||||
|
||||
|
@ -26,11 +16,3 @@ export function isEqualOrParent(tree: _.ITree, element: any, candidateParent: an
|
|||
|
||||
return false;
|
||||
}
|
||||
|
||||
export function expandAll(tree: _.ITree): void {
|
||||
const nav = tree.getNavigator();
|
||||
let cur;
|
||||
while (cur = nav.next()) {
|
||||
tree.expand(cur);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -658,27 +658,6 @@ export class TreeView extends HeightMap {
|
|||
}
|
||||
}
|
||||
|
||||
public getFirstVisibleElement(): any {
|
||||
const firstIndex = this.indexAt(this.lastRenderTop);
|
||||
let item = this.itemAtIndex(firstIndex);
|
||||
if (!item) {
|
||||
return item;
|
||||
}
|
||||
|
||||
const itemMidpoint = item.top + item.height / 2;
|
||||
if (itemMidpoint < this.scrollTop) {
|
||||
const nextItem = this.itemAtIndex(firstIndex + 1);
|
||||
item = nextItem || item;
|
||||
}
|
||||
|
||||
return item.model.getElement();
|
||||
}
|
||||
|
||||
public getLastVisibleElement(): any {
|
||||
const item = this.itemAtIndex(this.indexAt(this.lastRenderTop + this.lastRenderHeight - 1));
|
||||
return item && item.model.getElement();
|
||||
}
|
||||
|
||||
private render(scrollTop: number, viewHeight: number, scrollLeft: number, viewWidth: number, scrollWidth: number): void {
|
||||
let i: number;
|
||||
let stop: number;
|
||||
|
@ -1043,16 +1022,6 @@ export class TreeView extends HeightMap {
|
|||
}
|
||||
}
|
||||
|
||||
public getRelativeTop(item: Model.Item): number {
|
||||
if (item && item.isVisible()) {
|
||||
let viewItem = this.items[item.id];
|
||||
if (viewItem) {
|
||||
return (viewItem.top - this.scrollTop) / (this.viewHeight - viewItem.height);
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
private onItemReveal(e: Model.IItemRevealEvent): void {
|
||||
let item = <Model.Item>e.item;
|
||||
let relativeTop = <number>e.relativeTop;
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
import * as assert from 'assert';
|
||||
import { createSHA1 } from 'vs/base/browser/hash';
|
||||
|
||||
suite('Hash', () => {
|
||||
test('computeSHA1Hash', async () => {
|
||||
assert.equal(await createSHA1(''), 'da39a3ee5e6b4b0d3255bfef95601890afd80709');
|
||||
assert.equal(await createSHA1('hello world'), '2aae6c35c94fcfb415dbe95f408b9ce91ee846ed');
|
||||
assert.equal(await createSHA1('da39a3ee5e6b4b0d3255bfef95601890afd80709'), '10a34637ad661d98ba3344717656fcc76209c2f8');
|
||||
assert.equal(await createSHA1('2aae6c35c94fcfb415dbe95f408b9ce91ee846ed'), 'd6b0d82cea4269b51572b8fab43adcee9fc3cf9a');
|
||||
assert.equal(await createSHA1('öäü_?ß()<>ÖÄÜ'), 'b64beaeff9e317b0193c8e40a2431b210388eba9');
|
||||
});
|
||||
});
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
import * as assert from 'assert';
|
||||
import { ITreeNode, ITreeRenderer } from 'vs/base/browser/ui/tree/tree';
|
||||
import { IListVirtualDelegate } from 'vs/base/browser/ui/list/list';
|
||||
import { IListVirtualDelegate, IIdentityProvider } from 'vs/base/browser/ui/list/list';
|
||||
import { ObjectTree } from 'vs/base/browser/ui/tree/objectTree';
|
||||
import { Iterator } from 'vs/base/common/iterator';
|
||||
|
||||
|
@ -186,4 +186,42 @@ suite('ObjectTree', function () {
|
|||
assert.equal(navigator.last(), 2);
|
||||
});
|
||||
});
|
||||
|
||||
test('traits are preserved according to string identity', function () {
|
||||
const container = document.createElement('div');
|
||||
container.style.width = '200px';
|
||||
container.style.height = '200px';
|
||||
|
||||
const delegate = new class implements IListVirtualDelegate<number> {
|
||||
getHeight() { return 20; }
|
||||
getTemplateId(): string { return 'default'; }
|
||||
};
|
||||
|
||||
const renderer = new class implements ITreeRenderer<number, void, HTMLElement> {
|
||||
readonly templateId = 'default';
|
||||
renderTemplate(container: HTMLElement): HTMLElement {
|
||||
return container;
|
||||
}
|
||||
renderElement(element: ITreeNode<number, void>, index: number, templateData: HTMLElement): void {
|
||||
templateData.textContent = `${element.element}`;
|
||||
}
|
||||
disposeTemplate(): void { }
|
||||
};
|
||||
|
||||
const identityProvider = new class implements IIdentityProvider<number> {
|
||||
getId(element: number): { toString(): string; } {
|
||||
return `${element % 100}`;
|
||||
}
|
||||
};
|
||||
|
||||
const tree = new ObjectTree<number>(container, delegate, [renderer], { identityProvider });
|
||||
tree.layout(200);
|
||||
|
||||
tree.setChildren(null, [{ element: 0 }, { element: 1 }, { element: 2 }, { element: 3 }]);
|
||||
tree.setFocus([1]);
|
||||
assert.deepStrictEqual(tree.getFocus(), [1]);
|
||||
|
||||
tree.setChildren(null, [{ element: 100 }, { element: 101 }, { element: 102 }, { element: 103 }]);
|
||||
assert.deepStrictEqual(tree.getFocus(), [101]);
|
||||
});
|
||||
});
|
|
@ -94,4 +94,19 @@ suite('CancellationToken', function () {
|
|||
source.cancel();
|
||||
assert.equal(count, 0);
|
||||
});
|
||||
|
||||
test('parent cancels child', function () {
|
||||
|
||||
let parent = new CancellationTokenSource();
|
||||
let child = new CancellationTokenSource(parent.token);
|
||||
|
||||
let count = 0;
|
||||
child.token.onCancellationRequested(() => count += 1);
|
||||
|
||||
parent.cancel();
|
||||
|
||||
assert.equal(count, 1);
|
||||
assert.equal(child.token.isCancellationRequested, true);
|
||||
assert.equal(parent.token.isCancellationRequested, true);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
import * as assert from 'assert';
|
||||
import { Event, Emitter, EventBufferer, EventMultiplexer, AsyncEmitter, IWaitUntil } from 'vs/base/common/event';
|
||||
import { Event, Emitter, EventBufferer, EventMultiplexer, AsyncEmitter, IWaitUntil, PauseableEmitter } from 'vs/base/common/event';
|
||||
import { IDisposable } from 'vs/base/common/lifecycle';
|
||||
import * as Errors from 'vs/base/common/errors';
|
||||
import { timeout } from 'vs/base/common/async';
|
||||
|
@ -331,6 +331,133 @@ suite('AsyncEmitter', function () {
|
|||
});
|
||||
});
|
||||
|
||||
suite('PausableEmitter', function () {
|
||||
|
||||
test('basic', function () {
|
||||
const data: number[] = [];
|
||||
const emitter = new PauseableEmitter<number>();
|
||||
|
||||
emitter.event(e => data.push(e));
|
||||
emitter.fire(1);
|
||||
emitter.fire(2);
|
||||
|
||||
assert.deepEqual(data, [1, 2]);
|
||||
});
|
||||
|
||||
test('pause/resume - no merge', function () {
|
||||
const data: number[] = [];
|
||||
const emitter = new PauseableEmitter<number>();
|
||||
|
||||
emitter.event(e => data.push(e));
|
||||
emitter.fire(1);
|
||||
emitter.fire(2);
|
||||
assert.deepEqual(data, [1, 2]);
|
||||
|
||||
emitter.pause();
|
||||
emitter.fire(3);
|
||||
emitter.fire(4);
|
||||
assert.deepEqual(data, [1, 2]);
|
||||
|
||||
emitter.resume();
|
||||
assert.deepEqual(data, [1, 2, 3, 4]);
|
||||
emitter.fire(5);
|
||||
assert.deepEqual(data, [1, 2, 3, 4, 5]);
|
||||
});
|
||||
|
||||
test('pause/resume - merge', function () {
|
||||
const data: number[] = [];
|
||||
const emitter = new PauseableEmitter<number>({ merge: (a) => a.reduce((p, c) => p + c, 0) });
|
||||
|
||||
emitter.event(e => data.push(e));
|
||||
emitter.fire(1);
|
||||
emitter.fire(2);
|
||||
assert.deepEqual(data, [1, 2]);
|
||||
|
||||
emitter.pause();
|
||||
emitter.fire(3);
|
||||
emitter.fire(4);
|
||||
assert.deepEqual(data, [1, 2]);
|
||||
|
||||
emitter.resume();
|
||||
assert.deepEqual(data, [1, 2, 7]);
|
||||
|
||||
emitter.fire(5);
|
||||
assert.deepEqual(data, [1, 2, 7, 5]);
|
||||
});
|
||||
|
||||
test('double pause/resume', function () {
|
||||
const data: number[] = [];
|
||||
const emitter = new PauseableEmitter<number>();
|
||||
|
||||
emitter.event(e => data.push(e));
|
||||
emitter.fire(1);
|
||||
emitter.fire(2);
|
||||
assert.deepEqual(data, [1, 2]);
|
||||
|
||||
emitter.pause();
|
||||
emitter.pause();
|
||||
emitter.fire(3);
|
||||
emitter.fire(4);
|
||||
assert.deepEqual(data, [1, 2]);
|
||||
|
||||
emitter.resume();
|
||||
assert.deepEqual(data, [1, 2]);
|
||||
|
||||
emitter.resume();
|
||||
assert.deepEqual(data, [1, 2, 3, 4]);
|
||||
|
||||
emitter.resume();
|
||||
assert.deepEqual(data, [1, 2, 3, 4]);
|
||||
});
|
||||
|
||||
test('resume, no pause', function () {
|
||||
const data: number[] = [];
|
||||
const emitter = new PauseableEmitter<number>();
|
||||
|
||||
emitter.event(e => data.push(e));
|
||||
emitter.fire(1);
|
||||
emitter.fire(2);
|
||||
assert.deepEqual(data, [1, 2]);
|
||||
|
||||
emitter.resume();
|
||||
emitter.fire(3);
|
||||
assert.deepEqual(data, [1, 2, 3]);
|
||||
});
|
||||
|
||||
test('nested pause', function () {
|
||||
const data: number[] = [];
|
||||
const emitter = new PauseableEmitter<number>();
|
||||
|
||||
let once = true;
|
||||
emitter.event(e => {
|
||||
data.push(e);
|
||||
|
||||
if (once) {
|
||||
emitter.pause();
|
||||
once = false;
|
||||
}
|
||||
});
|
||||
emitter.event(e => {
|
||||
data.push(e);
|
||||
});
|
||||
|
||||
emitter.pause();
|
||||
emitter.fire(1);
|
||||
emitter.fire(2);
|
||||
assert.deepEqual(data, []);
|
||||
|
||||
emitter.resume();
|
||||
assert.deepEqual(data, [1, 1]); // paused after first event
|
||||
|
||||
emitter.resume();
|
||||
assert.deepEqual(data, [1, 1, 2, 2]); // remaing event delivered
|
||||
|
||||
emitter.fire(3);
|
||||
assert.deepEqual(data, [1, 1, 2, 2, 3, 3]);
|
||||
|
||||
});
|
||||
});
|
||||
|
||||
suite('Event utils', () => {
|
||||
|
||||
suite('EventBufferer', () => {
|
||||
|
@ -777,4 +904,5 @@ suite('Event utils', () => {
|
|||
|
||||
listener.dispose();
|
||||
});
|
||||
|
||||
});
|
||||
|
|
|
@ -16,7 +16,6 @@ suite('Paths', () => {
|
|||
});
|
||||
|
||||
test('getRoot', () => {
|
||||
|
||||
assert.equal(extpath.getRoot('/user/far'), '/');
|
||||
assert.equal(extpath.getRoot('\\\\server\\share\\some\\path'), '//server/share/');
|
||||
assert.equal(extpath.getRoot('//server/share/some/path'), '//server/share/');
|
||||
|
@ -65,4 +64,54 @@ suite('Paths', () => {
|
|||
assert.ok(!extpath.isValidBasename('tes"t.txt'));
|
||||
}
|
||||
});
|
||||
|
||||
test('sanitizeFilePath', () => {
|
||||
if (platform.isWindows) {
|
||||
assert.equal(extpath.sanitizeFilePath('.', 'C:\\the\\cwd'), 'C:\\the\\cwd');
|
||||
assert.equal(extpath.sanitizeFilePath('', 'C:\\the\\cwd'), 'C:\\the\\cwd');
|
||||
|
||||
assert.equal(extpath.sanitizeFilePath('C:', 'C:\\the\\cwd'), 'C:\\');
|
||||
assert.equal(extpath.sanitizeFilePath('C:\\', 'C:\\the\\cwd'), 'C:\\');
|
||||
assert.equal(extpath.sanitizeFilePath('C:\\\\', 'C:\\the\\cwd'), 'C:\\');
|
||||
|
||||
assert.equal(extpath.sanitizeFilePath('C:\\folder\\my.txt', 'C:\\the\\cwd'), 'C:\\folder\\my.txt');
|
||||
assert.equal(extpath.sanitizeFilePath('C:\\folder\\my', 'C:\\the\\cwd'), 'C:\\folder\\my');
|
||||
assert.equal(extpath.sanitizeFilePath('C:\\folder\\..\\my', 'C:\\the\\cwd'), 'C:\\my');
|
||||
assert.equal(extpath.sanitizeFilePath('C:\\folder\\my\\', 'C:\\the\\cwd'), 'C:\\folder\\my');
|
||||
assert.equal(extpath.sanitizeFilePath('C:\\folder\\my\\\\\\', 'C:\\the\\cwd'), 'C:\\folder\\my');
|
||||
|
||||
assert.equal(extpath.sanitizeFilePath('my.txt', 'C:\\the\\cwd'), 'C:\\the\\cwd\\my.txt');
|
||||
assert.equal(extpath.sanitizeFilePath('my.txt\\', 'C:\\the\\cwd'), 'C:\\the\\cwd\\my.txt');
|
||||
|
||||
assert.equal(extpath.sanitizeFilePath('\\\\localhost\\folder\\my', 'C:\\the\\cwd'), '\\\\localhost\\folder\\my');
|
||||
assert.equal(extpath.sanitizeFilePath('\\\\localhost\\folder\\my\\', 'C:\\the\\cwd'), '\\\\localhost\\folder\\my');
|
||||
} else {
|
||||
assert.equal(extpath.sanitizeFilePath('.', '/the/cwd'), '/the/cwd');
|
||||
assert.equal(extpath.sanitizeFilePath('', '/the/cwd'), '/the/cwd');
|
||||
assert.equal(extpath.sanitizeFilePath('/', '/the/cwd'), '/');
|
||||
|
||||
assert.equal(extpath.sanitizeFilePath('/folder/my.txt', '/the/cwd'), '/folder/my.txt');
|
||||
assert.equal(extpath.sanitizeFilePath('/folder/my', '/the/cwd'), '/folder/my');
|
||||
assert.equal(extpath.sanitizeFilePath('/folder/../my', '/the/cwd'), '/my');
|
||||
assert.equal(extpath.sanitizeFilePath('/folder/my/', '/the/cwd'), '/folder/my');
|
||||
assert.equal(extpath.sanitizeFilePath('/folder/my///', '/the/cwd'), '/folder/my');
|
||||
|
||||
assert.equal(extpath.sanitizeFilePath('my.txt', '/the/cwd'), '/the/cwd/my.txt');
|
||||
assert.equal(extpath.sanitizeFilePath('my.txt/', '/the/cwd'), '/the/cwd/my.txt');
|
||||
}
|
||||
});
|
||||
|
||||
test('isRoot', () => {
|
||||
if (platform.isWindows) {
|
||||
assert.ok(extpath.isRootOrDriveLetter('c:'));
|
||||
assert.ok(extpath.isRootOrDriveLetter('D:'));
|
||||
assert.ok(extpath.isRootOrDriveLetter('D:/'));
|
||||
assert.ok(extpath.isRootOrDriveLetter('D:\\'));
|
||||
assert.ok(!extpath.isRootOrDriveLetter('D:\\path'));
|
||||
assert.ok(!extpath.isRootOrDriveLetter('D:/path'));
|
||||
} else {
|
||||
assert.ok(extpath.isRootOrDriveLetter('/'));
|
||||
assert.ok(!extpath.isRootOrDriveLetter('/path'));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
|
|
@ -52,6 +52,14 @@ suite('LinkedList', function () {
|
|||
disp = list.push(2);
|
||||
disp();
|
||||
assertElements(list, 0, 1);
|
||||
|
||||
list = new LinkedList<number>();
|
||||
list.push(0);
|
||||
list.push(1);
|
||||
disp = list.push(2);
|
||||
disp();
|
||||
disp();
|
||||
assertElements(list, 0, 1);
|
||||
});
|
||||
|
||||
test('Push/toArray', () => {
|
||||
|
|
|
@ -189,29 +189,4 @@ suite('Types', () => {
|
|||
assert.throws(() => types.validateConstraints(['2'], [types.isNumber]));
|
||||
assert.throws(() => types.validateConstraints([1, 'test', true], [Number, String, Number]));
|
||||
});
|
||||
|
||||
test('create', () => {
|
||||
let zeroConstructor = function () { /**/ };
|
||||
|
||||
assert(types.create(zeroConstructor) instanceof zeroConstructor);
|
||||
assert(types.isObject(types.create(zeroConstructor)));
|
||||
|
||||
let manyArgConstructor = function (this: any, foo: any, bar: any) {
|
||||
this.foo = foo;
|
||||
this.bar = bar;
|
||||
};
|
||||
|
||||
let foo = {};
|
||||
let bar = 'foo';
|
||||
|
||||
assert(types.create(manyArgConstructor) instanceof manyArgConstructor);
|
||||
assert(types.isObject(types.create(manyArgConstructor)));
|
||||
|
||||
assert(types.create(manyArgConstructor, foo, bar) instanceof manyArgConstructor);
|
||||
assert(types.isObject(types.create(manyArgConstructor, foo, bar)));
|
||||
|
||||
let obj = types.create(manyArgConstructor, foo, bar);
|
||||
assert.strictEqual(obj.foo, foo);
|
||||
assert.strictEqual(obj.bar, bar);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,615 +0,0 @@
|
|||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as assert from 'assert';
|
||||
import * as fs from 'fs';
|
||||
import * as os from 'os';
|
||||
import * as path from 'vs/base/common/path';
|
||||
import { Readable } from 'stream';
|
||||
import { canNormalize } from 'vs/base/common/normalization';
|
||||
import { isLinux, isWindows } from 'vs/base/common/platform';
|
||||
import * as uuid from 'vs/base/common/uuid';
|
||||
import * as extfs from 'vs/base/node/extfs';
|
||||
import { getPathFromAmdModule } from 'vs/base/common/amd';
|
||||
import { CancellationTokenSource } from 'vs/base/common/cancellation';
|
||||
|
||||
const ignore = () => { };
|
||||
|
||||
const mkdirp = (path: string, mode: number, callback: (error: any) => void) => {
|
||||
extfs.mkdirp(path, mode).then(() => callback(null), error => callback(error));
|
||||
};
|
||||
|
||||
const chunkSize = 64 * 1024;
|
||||
const readError = 'Error while reading';
|
||||
function toReadable(value: string, throwError?: boolean): Readable {
|
||||
const totalChunks = Math.ceil(value.length / chunkSize);
|
||||
const stringChunks: string[] = [];
|
||||
|
||||
for (let i = 0, j = 0; i < totalChunks; ++i, j += chunkSize) {
|
||||
stringChunks[i] = value.substr(j, chunkSize);
|
||||
}
|
||||
|
||||
let counter = 0;
|
||||
return new Readable({
|
||||
read: function () {
|
||||
if (throwError) {
|
||||
this.emit('error', new Error(readError));
|
||||
}
|
||||
|
||||
let res!: string;
|
||||
let canPush = true;
|
||||
while (canPush && (res = stringChunks[counter++])) {
|
||||
canPush = this.push(res);
|
||||
}
|
||||
|
||||
// EOS
|
||||
if (!res) {
|
||||
this.push(null);
|
||||
}
|
||||
},
|
||||
encoding: 'utf8'
|
||||
});
|
||||
}
|
||||
|
||||
suite('Extfs', () => {
|
||||
|
||||
test('mkdirp', function (done) {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'extfs', id);
|
||||
|
||||
mkdirp(newDir, 493, error => {
|
||||
if (error) {
|
||||
return done(error);
|
||||
}
|
||||
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
extfs.del(parentDir, os.tmpdir(), done, ignore);
|
||||
}); // 493 = 0755
|
||||
});
|
||||
|
||||
test('stat link', function (done) {
|
||||
if (isWindows) {
|
||||
// Symlinks are not the same on win, and we can not create them programitically without admin privileges
|
||||
return done();
|
||||
}
|
||||
|
||||
const id1 = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id1);
|
||||
const directory = path.join(parentDir, 'extfs', id1);
|
||||
|
||||
const id2 = uuid.generateUuid();
|
||||
const symbolicLink = path.join(parentDir, 'extfs', id2);
|
||||
|
||||
mkdirp(directory, 493, error => {
|
||||
if (error) {
|
||||
return done(error);
|
||||
}
|
||||
|
||||
fs.symlinkSync(directory, symbolicLink);
|
||||
|
||||
extfs.statLink(directory, (error, statAndIsLink) => {
|
||||
if (error) {
|
||||
return done(error);
|
||||
}
|
||||
|
||||
assert.ok(!statAndIsLink!.isSymbolicLink);
|
||||
|
||||
extfs.statLink(symbolicLink, (error, statAndIsLink) => {
|
||||
if (error) {
|
||||
return done(error);
|
||||
}
|
||||
|
||||
assert.ok(statAndIsLink!.isSymbolicLink);
|
||||
extfs.delSync(directory);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('delSync - swallows file not found error', function () {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'extfs', id);
|
||||
|
||||
extfs.delSync(newDir);
|
||||
|
||||
assert.ok(!fs.existsSync(newDir));
|
||||
});
|
||||
|
||||
test('delSync - simple', function (done) {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'extfs', id);
|
||||
|
||||
mkdirp(newDir, 493, error => {
|
||||
if (error) {
|
||||
return done(error);
|
||||
}
|
||||
|
||||
fs.writeFileSync(path.join(newDir, 'somefile.txt'), 'Contents');
|
||||
fs.writeFileSync(path.join(newDir, 'someOtherFile.txt'), 'Contents');
|
||||
|
||||
extfs.delSync(newDir);
|
||||
|
||||
assert.ok(!fs.existsSync(newDir));
|
||||
done();
|
||||
}); // 493 = 0755
|
||||
});
|
||||
|
||||
test('delSync - recursive folder structure', function (done) {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'extfs', id);
|
||||
|
||||
mkdirp(newDir, 493, error => {
|
||||
if (error) {
|
||||
return done(error);
|
||||
}
|
||||
|
||||
fs.writeFileSync(path.join(newDir, 'somefile.txt'), 'Contents');
|
||||
fs.writeFileSync(path.join(newDir, 'someOtherFile.txt'), 'Contents');
|
||||
|
||||
fs.mkdirSync(path.join(newDir, 'somefolder'));
|
||||
fs.writeFileSync(path.join(newDir, 'somefolder', 'somefile.txt'), 'Contents');
|
||||
|
||||
extfs.delSync(newDir);
|
||||
|
||||
assert.ok(!fs.existsSync(newDir));
|
||||
done();
|
||||
}); // 493 = 0755
|
||||
});
|
||||
|
||||
test('copy, move and delete', function (done) {
|
||||
const id = uuid.generateUuid();
|
||||
const id2 = uuid.generateUuid();
|
||||
const sourceDir = getPathFromAmdModule(require, './fixtures');
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', 'extfs');
|
||||
const targetDir = path.join(parentDir, id);
|
||||
const targetDir2 = path.join(parentDir, id2);
|
||||
|
||||
extfs.copy(sourceDir, targetDir, error => {
|
||||
if (error) {
|
||||
return done(error);
|
||||
}
|
||||
|
||||
assert.ok(fs.existsSync(targetDir));
|
||||
assert.ok(fs.existsSync(path.join(targetDir, 'index.html')));
|
||||
assert.ok(fs.existsSync(path.join(targetDir, 'site.css')));
|
||||
assert.ok(fs.existsSync(path.join(targetDir, 'examples')));
|
||||
assert.ok(fs.statSync(path.join(targetDir, 'examples')).isDirectory());
|
||||
assert.ok(fs.existsSync(path.join(targetDir, 'examples', 'small.jxs')));
|
||||
|
||||
extfs.mv(targetDir, targetDir2, error => {
|
||||
if (error) {
|
||||
return done(error);
|
||||
}
|
||||
|
||||
assert.ok(!fs.existsSync(targetDir));
|
||||
assert.ok(fs.existsSync(targetDir2));
|
||||
assert.ok(fs.existsSync(path.join(targetDir2, 'index.html')));
|
||||
assert.ok(fs.existsSync(path.join(targetDir2, 'site.css')));
|
||||
assert.ok(fs.existsSync(path.join(targetDir2, 'examples')));
|
||||
assert.ok(fs.statSync(path.join(targetDir2, 'examples')).isDirectory());
|
||||
assert.ok(fs.existsSync(path.join(targetDir2, 'examples', 'small.jxs')));
|
||||
|
||||
extfs.mv(path.join(targetDir2, 'index.html'), path.join(targetDir2, 'index_moved.html'), error => {
|
||||
if (error) {
|
||||
return done(error);
|
||||
}
|
||||
|
||||
assert.ok(!fs.existsSync(path.join(targetDir2, 'index.html')));
|
||||
assert.ok(fs.existsSync(path.join(targetDir2, 'index_moved.html')));
|
||||
|
||||
extfs.del(parentDir, os.tmpdir(), error => {
|
||||
if (error) {
|
||||
return done(error);
|
||||
}
|
||||
}, error => {
|
||||
if (error) {
|
||||
return done(error);
|
||||
}
|
||||
assert.ok(!fs.existsSync(parentDir));
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('readdir', function (done) {
|
||||
if (canNormalize && typeof process.versions['electron'] !== 'undefined' /* needs electron */) {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'extfs', id, 'öäü');
|
||||
|
||||
mkdirp(newDir, 493, error => {
|
||||
if (error) {
|
||||
return done(error);
|
||||
}
|
||||
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
extfs.readdir(path.join(parentDir, 'extfs', id), (error, children) => {
|
||||
assert.equal(children.some(n => n === 'öäü'), true); // Mac always converts to NFD, so
|
||||
|
||||
extfs.del(parentDir, os.tmpdir(), done, ignore);
|
||||
});
|
||||
}); // 493 = 0755
|
||||
} else {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
test('writeFileAndFlush (string)', function (done) {
|
||||
const smallData = 'Hello World';
|
||||
const bigData = (new Array(100 * 1024)).join('Large String\n');
|
||||
|
||||
testWriteFileAndFlush(smallData, smallData, bigData, bigData, done);
|
||||
});
|
||||
|
||||
test('writeFileAndFlush (Buffer)', function (done) {
|
||||
const smallData = 'Hello World';
|
||||
const bigData = (new Array(100 * 1024)).join('Large String\n');
|
||||
|
||||
testWriteFileAndFlush(Buffer.from(smallData), smallData, Buffer.from(bigData), bigData, done);
|
||||
});
|
||||
|
||||
test('writeFileAndFlush (UInt8Array)', function (done) {
|
||||
const smallData = 'Hello World';
|
||||
const bigData = (new Array(100 * 1024)).join('Large String\n');
|
||||
|
||||
testWriteFileAndFlush(new TextEncoder().encode(smallData), smallData, new TextEncoder().encode(bigData), bigData, done);
|
||||
});
|
||||
|
||||
test('writeFileAndFlush (stream)', function (done) {
|
||||
const smallData = 'Hello World';
|
||||
const bigData = (new Array(100 * 1024)).join('Large String\n');
|
||||
|
||||
testWriteFileAndFlush(toReadable(smallData), smallData, toReadable(bigData), bigData, done);
|
||||
});
|
||||
|
||||
function testWriteFileAndFlush(
|
||||
smallData: string | Buffer | NodeJS.ReadableStream | Uint8Array,
|
||||
smallDataValue: string,
|
||||
bigData: string | Buffer | NodeJS.ReadableStream | Uint8Array,
|
||||
bigDataValue: string,
|
||||
done: (error: Error | null) => void
|
||||
): void {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'extfs', id);
|
||||
const testFile = path.join(newDir, 'flushed.txt');
|
||||
|
||||
mkdirp(newDir, 493, error => {
|
||||
if (error) {
|
||||
return done(error);
|
||||
}
|
||||
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
extfs.writeFileAndFlush(testFile, smallData, null!, error => {
|
||||
if (error) {
|
||||
return done(error);
|
||||
}
|
||||
|
||||
assert.equal(fs.readFileSync(testFile), smallDataValue);
|
||||
|
||||
extfs.writeFileAndFlush(testFile, bigData, null!, error => {
|
||||
if (error) {
|
||||
return done(error);
|
||||
}
|
||||
|
||||
assert.equal(fs.readFileSync(testFile), bigDataValue);
|
||||
|
||||
extfs.del(parentDir, os.tmpdir(), done, ignore);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
test('writeFileAndFlush (file stream)', function (done) {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const sourceFile = getPathFromAmdModule(require, './fixtures/index.html');
|
||||
const newDir = path.join(parentDir, 'extfs', id);
|
||||
const testFile = path.join(newDir, 'flushed.txt');
|
||||
|
||||
mkdirp(newDir, 493, error => {
|
||||
if (error) {
|
||||
return done(error);
|
||||
}
|
||||
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
extfs.writeFileAndFlush(testFile, fs.createReadStream(sourceFile), null!, error => {
|
||||
if (error) {
|
||||
return done(error);
|
||||
}
|
||||
|
||||
assert.equal(fs.readFileSync(testFile).toString(), fs.readFileSync(sourceFile).toString());
|
||||
|
||||
extfs.del(parentDir, os.tmpdir(), done, ignore);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('writeFileAndFlush (string, error handling)', function (done) {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'extfs', id);
|
||||
const testFile = path.join(newDir, 'flushed.txt');
|
||||
|
||||
mkdirp(newDir, 493, error => {
|
||||
if (error) {
|
||||
return done(error);
|
||||
}
|
||||
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
fs.mkdirSync(testFile); // this will trigger an error because testFile is now a directory!
|
||||
|
||||
extfs.writeFileAndFlush(testFile, 'Hello World', null!, error => {
|
||||
if (!error) {
|
||||
return done(new Error('Expected error for writing to readonly file'));
|
||||
}
|
||||
|
||||
extfs.del(parentDir, os.tmpdir(), done, ignore);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('writeFileAndFlush (stream, error handling EISDIR)', function (done) {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'extfs', id);
|
||||
const testFile = path.join(newDir, 'flushed.txt');
|
||||
|
||||
mkdirp(newDir, 493, error => {
|
||||
if (error) {
|
||||
return done(error);
|
||||
}
|
||||
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
fs.mkdirSync(testFile); // this will trigger an error because testFile is now a directory!
|
||||
|
||||
const readable = toReadable('Hello World');
|
||||
extfs.writeFileAndFlush(testFile, readable, null!, error => {
|
||||
if (!error || (<any>error).code !== 'EISDIR') {
|
||||
return done(new Error('Expected EISDIR error for writing to folder but got: ' + (error ? (<any>error).code : 'no error')));
|
||||
}
|
||||
|
||||
// verify that the stream is still consumable (for https://github.com/Microsoft/vscode/issues/42542)
|
||||
assert.equal(readable.read(), 'Hello World');
|
||||
|
||||
extfs.del(parentDir, os.tmpdir(), done, ignore);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('writeFileAndFlush (stream, error handling READERROR)', function (done) {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'extfs', id);
|
||||
const testFile = path.join(newDir, 'flushed.txt');
|
||||
|
||||
mkdirp(newDir, 493, error => {
|
||||
if (error) {
|
||||
return done(error);
|
||||
}
|
||||
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
extfs.writeFileAndFlush(testFile, toReadable('Hello World', true /* throw error */), null!, error => {
|
||||
if (!error || error.message !== readError) {
|
||||
return done(new Error('Expected error for writing to folder'));
|
||||
}
|
||||
|
||||
extfs.del(parentDir, os.tmpdir(), done, ignore);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('writeFileAndFlush (stream, error handling EACCES)', function (done) {
|
||||
if (isLinux) {
|
||||
return done(); // somehow this test fails on Linux in our TFS builds
|
||||
}
|
||||
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'extfs', id);
|
||||
const testFile = path.join(newDir, 'flushed.txt');
|
||||
|
||||
mkdirp(newDir, 493, error => {
|
||||
if (error) {
|
||||
return done(error);
|
||||
}
|
||||
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
fs.writeFileSync(testFile, '');
|
||||
fs.chmodSync(testFile, 33060); // make readonly
|
||||
|
||||
extfs.writeFileAndFlush(testFile, toReadable('Hello World'), null!, error => {
|
||||
if (!error || !((<any>error).code !== 'EACCES' || (<any>error).code !== 'EPERM')) {
|
||||
return done(new Error('Expected EACCES/EPERM error for writing to folder but got: ' + (error ? (<any>error).code : 'no error')));
|
||||
}
|
||||
|
||||
extfs.del(parentDir, os.tmpdir(), done, ignore);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('writeFileAndFlush (file stream, error handling)', function (done) {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const sourceFile = getPathFromAmdModule(require, './fixtures/index.html');
|
||||
const newDir = path.join(parentDir, 'extfs', id);
|
||||
const testFile = path.join(newDir, 'flushed.txt');
|
||||
|
||||
mkdirp(newDir, 493, error => {
|
||||
if (error) {
|
||||
return done(error);
|
||||
}
|
||||
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
fs.mkdirSync(testFile); // this will trigger an error because testFile is now a directory!
|
||||
|
||||
extfs.writeFileAndFlush(testFile, fs.createReadStream(sourceFile), null!, error => {
|
||||
if (!error) {
|
||||
return done(new Error('Expected error for writing to folder'));
|
||||
}
|
||||
|
||||
extfs.del(parentDir, os.tmpdir(), done, ignore);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('writeFileAndFlushSync', function (done) {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'extfs', id);
|
||||
const testFile = path.join(newDir, 'flushed.txt');
|
||||
|
||||
mkdirp(newDir, 493, error => {
|
||||
if (error) {
|
||||
return done(error);
|
||||
}
|
||||
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
extfs.writeFileAndFlushSync(testFile, 'Hello World', null!);
|
||||
assert.equal(fs.readFileSync(testFile), 'Hello World');
|
||||
|
||||
const largeString = (new Array(100 * 1024)).join('Large String\n');
|
||||
|
||||
extfs.writeFileAndFlushSync(testFile, largeString, null!);
|
||||
assert.equal(fs.readFileSync(testFile), largeString);
|
||||
|
||||
extfs.del(parentDir, os.tmpdir(), done, ignore);
|
||||
});
|
||||
});
|
||||
|
||||
test('realcase', (done) => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'extfs', id);
|
||||
|
||||
mkdirp(newDir, 493, error => {
|
||||
|
||||
// {{SQL CARBON EDIT}} don't run this test case on Windows as this fails in VSO
|
||||
// assume case insensitive file system
|
||||
if (process.platform === 'darwin') {
|
||||
const upper = newDir.toUpperCase();
|
||||
const real = extfs.realcaseSync(upper);
|
||||
|
||||
if (real) { // can be null in case of permission errors
|
||||
assert.notEqual(real, upper);
|
||||
assert.equal(real.toUpperCase(), upper);
|
||||
assert.equal(real, newDir);
|
||||
}
|
||||
}
|
||||
|
||||
// linux, unix, etc. -> assume case sensitive file system
|
||||
else if (process.platform !== 'win32') {
|
||||
const real = extfs.realcaseSync(newDir);
|
||||
assert.equal(real, newDir);
|
||||
}
|
||||
|
||||
extfs.del(parentDir, os.tmpdir(), done, ignore);
|
||||
});
|
||||
});
|
||||
|
||||
test('realpath', (done) => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'extfs', id);
|
||||
|
||||
mkdirp(newDir, 493, error => {
|
||||
|
||||
extfs.realpath(newDir, (error, realpath) => {
|
||||
assert.ok(realpath);
|
||||
assert.ok(!error);
|
||||
|
||||
extfs.del(parentDir, os.tmpdir(), done, ignore);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('realpathSync', (done) => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'extfs', id);
|
||||
|
||||
mkdirp(newDir, 493, error => {
|
||||
let realpath!: string;
|
||||
try {
|
||||
realpath = extfs.realpathSync(newDir);
|
||||
} catch (error) {
|
||||
assert.ok(!error);
|
||||
}
|
||||
assert.ok(realpath!);
|
||||
|
||||
extfs.del(parentDir, os.tmpdir(), done, ignore);
|
||||
});
|
||||
});
|
||||
|
||||
test('mkdirp cancellation', (done) => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'extfs', id);
|
||||
|
||||
const source = new CancellationTokenSource();
|
||||
|
||||
const mkdirpPromise = extfs.mkdirp(newDir, 493, source.token);
|
||||
source.cancel();
|
||||
|
||||
return mkdirpPromise.then(res => {
|
||||
assert.equal(res, false);
|
||||
|
||||
extfs.del(parentDir, os.tmpdir(), done, ignore);
|
||||
});
|
||||
});
|
||||
|
||||
test('sanitizeFilePath', () => {
|
||||
if (isWindows) {
|
||||
assert.equal(extfs.sanitizeFilePath('.', 'C:\\the\\cwd'), 'C:\\the\\cwd');
|
||||
assert.equal(extfs.sanitizeFilePath('', 'C:\\the\\cwd'), 'C:\\the\\cwd');
|
||||
|
||||
assert.equal(extfs.sanitizeFilePath('C:', 'C:\\the\\cwd'), 'C:\\');
|
||||
assert.equal(extfs.sanitizeFilePath('C:\\', 'C:\\the\\cwd'), 'C:\\');
|
||||
assert.equal(extfs.sanitizeFilePath('C:\\\\', 'C:\\the\\cwd'), 'C:\\');
|
||||
|
||||
assert.equal(extfs.sanitizeFilePath('C:\\folder\\my.txt', 'C:\\the\\cwd'), 'C:\\folder\\my.txt');
|
||||
assert.equal(extfs.sanitizeFilePath('C:\\folder\\my', 'C:\\the\\cwd'), 'C:\\folder\\my');
|
||||
assert.equal(extfs.sanitizeFilePath('C:\\folder\\..\\my', 'C:\\the\\cwd'), 'C:\\my');
|
||||
assert.equal(extfs.sanitizeFilePath('C:\\folder\\my\\', 'C:\\the\\cwd'), 'C:\\folder\\my');
|
||||
assert.equal(extfs.sanitizeFilePath('C:\\folder\\my\\\\\\', 'C:\\the\\cwd'), 'C:\\folder\\my');
|
||||
|
||||
assert.equal(extfs.sanitizeFilePath('my.txt', 'C:\\the\\cwd'), 'C:\\the\\cwd\\my.txt');
|
||||
assert.equal(extfs.sanitizeFilePath('my.txt\\', 'C:\\the\\cwd'), 'C:\\the\\cwd\\my.txt');
|
||||
|
||||
assert.equal(extfs.sanitizeFilePath('\\\\localhost\\folder\\my', 'C:\\the\\cwd'), '\\\\localhost\\folder\\my');
|
||||
assert.equal(extfs.sanitizeFilePath('\\\\localhost\\folder\\my\\', 'C:\\the\\cwd'), '\\\\localhost\\folder\\my');
|
||||
} else {
|
||||
assert.equal(extfs.sanitizeFilePath('.', '/the/cwd'), '/the/cwd');
|
||||
assert.equal(extfs.sanitizeFilePath('', '/the/cwd'), '/the/cwd');
|
||||
assert.equal(extfs.sanitizeFilePath('/', '/the/cwd'), '/');
|
||||
|
||||
assert.equal(extfs.sanitizeFilePath('/folder/my.txt', '/the/cwd'), '/folder/my.txt');
|
||||
assert.equal(extfs.sanitizeFilePath('/folder/my', '/the/cwd'), '/folder/my');
|
||||
assert.equal(extfs.sanitizeFilePath('/folder/../my', '/the/cwd'), '/my');
|
||||
assert.equal(extfs.sanitizeFilePath('/folder/my/', '/the/cwd'), '/folder/my');
|
||||
assert.equal(extfs.sanitizeFilePath('/folder/my///', '/the/cwd'), '/folder/my');
|
||||
|
||||
assert.equal(extfs.sanitizeFilePath('my.txt', '/the/cwd'), '/the/cwd/my.txt');
|
||||
assert.equal(extfs.sanitizeFilePath('my.txt/', '/the/cwd'), '/the/cwd/my.txt');
|
||||
}
|
||||
});
|
||||
});
|
|
@ -0,0 +1,73 @@
|
|||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as assert from 'assert';
|
||||
import * as os from 'os';
|
||||
import * as path from 'vs/base/common/path';
|
||||
import * as uuid from 'vs/base/common/uuid';
|
||||
import * as pfs from 'vs/base/node/pfs';
|
||||
import { realcaseSync, realpath, realpathSync } from 'vs/base/node/extpath';
|
||||
|
||||
suite('Extpath', () => {
|
||||
|
||||
test('realcase', async () => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'extpath', id);
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
|
||||
// assume case insensitive file system
|
||||
if (process.platform === 'win32' || process.platform === 'darwin') {
|
||||
const upper = newDir.toUpperCase();
|
||||
const real = realcaseSync(upper);
|
||||
|
||||
if (real) { // can be null in case of permission errors
|
||||
assert.notEqual(real, upper);
|
||||
assert.equal(real.toUpperCase(), upper);
|
||||
assert.equal(real, newDir);
|
||||
}
|
||||
}
|
||||
|
||||
// linux, unix, etc. -> assume case sensitive file system
|
||||
else {
|
||||
const real = realcaseSync(newDir);
|
||||
assert.equal(real, newDir);
|
||||
}
|
||||
|
||||
await pfs.rimraf(parentDir, pfs.RimRafMode.MOVE);
|
||||
});
|
||||
|
||||
test('realpath', async () => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'extpath', id);
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
|
||||
const realpathVal = await realpath(newDir);
|
||||
assert.ok(realpathVal);
|
||||
|
||||
await pfs.rimraf(parentDir, pfs.RimRafMode.MOVE);
|
||||
});
|
||||
|
||||
test('realpathSync', async () => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'extpath', id);
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
|
||||
let realpath!: string;
|
||||
try {
|
||||
realpath = realpathSync(newDir);
|
||||
} catch (error) {
|
||||
assert.ok(!error);
|
||||
}
|
||||
assert.ok(realpath!);
|
||||
|
||||
await pfs.rimraf(parentDir, pfs.RimRafMode.MOVE);
|
||||
});
|
||||
});
|
|
@ -1,488 +0,0 @@
|
|||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as assert from 'assert';
|
||||
import * as flow from 'vs/base/node/flow';
|
||||
|
||||
const loop = flow.loop;
|
||||
const sequence = flow.sequence;
|
||||
const parallel = flow.parallel;
|
||||
|
||||
suite('Flow', () => {
|
||||
function assertCounterEquals(counter: number, expected: number): void {
|
||||
assert.ok(counter === expected, 'Expected ' + expected + ' assertions, but got ' + counter);
|
||||
}
|
||||
|
||||
function syncThrowsError(callback: any): void {
|
||||
callback(new Error('foo'), null);
|
||||
}
|
||||
|
||||
function syncSequenceGetThrowsError(value: any, callback: any) {
|
||||
sequence(
|
||||
function onError(error) {
|
||||
callback(error, null);
|
||||
},
|
||||
|
||||
function getFirst(this: any) {
|
||||
syncThrowsError(this);
|
||||
},
|
||||
|
||||
function handleFirst(first: number) {
|
||||
//Foo
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
function syncGet(value: any, callback: any): void {
|
||||
callback(null, value);
|
||||
}
|
||||
|
||||
function syncGetError(value: any, callback: any): void {
|
||||
callback(new Error(''), null);
|
||||
}
|
||||
|
||||
function asyncGet(value: any, callback: any): void {
|
||||
process.nextTick(function () {
|
||||
callback(null, value);
|
||||
});
|
||||
}
|
||||
|
||||
function asyncGetError(value: any, callback: any): void {
|
||||
process.nextTick(function () {
|
||||
callback(new Error(''), null);
|
||||
});
|
||||
}
|
||||
|
||||
test('loopSync', function (done: () => void) {
|
||||
const elements = ['1', '2', '3'];
|
||||
loop(elements, function (element, callback, index, total) {
|
||||
assert.ok(index === 0 || index === 1 || index === 2);
|
||||
assert.deepEqual(3, total);
|
||||
callback(null, element);
|
||||
}, function (error, result) {
|
||||
assert.equal(error, null);
|
||||
assert.deepEqual(result, elements);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
test('loopByFunctionSync', function (done: () => void) {
|
||||
const elements = function (callback: Function) {
|
||||
callback(null, ['1', '2', '3']);
|
||||
};
|
||||
|
||||
loop(elements, function (element, callback) {
|
||||
callback(null, element);
|
||||
}, function (error, result) {
|
||||
assert.equal(error, null);
|
||||
assert.deepEqual(result, ['1', '2', '3']);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
test('loopByFunctionAsync', function (done: () => void) {
|
||||
const elements = function (callback: Function) {
|
||||
process.nextTick(function () {
|
||||
callback(null, ['1', '2', '3']);
|
||||
});
|
||||
};
|
||||
|
||||
loop(elements, function (element, callback) {
|
||||
callback(null, element);
|
||||
}, function (error, result) {
|
||||
assert.equal(error, null);
|
||||
assert.deepEqual(result, ['1', '2', '3']);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
test('loopSyncErrorByThrow', function (done: () => void) {
|
||||
const elements = ['1', '2', '3'];
|
||||
loop(elements, function (element, callback) {
|
||||
if (element === '2') {
|
||||
throw new Error('foo');
|
||||
} else {
|
||||
callback(null, element);
|
||||
}
|
||||
}, function (error, result) {
|
||||
assert.ok(error);
|
||||
assert.ok(!result);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
test('loopSyncErrorByCallback', function (done: () => void) {
|
||||
const elements = ['1', '2', '3'];
|
||||
loop(elements, function (element, callback) {
|
||||
if (element === '2') {
|
||||
callback(new Error('foo'), null);
|
||||
} else {
|
||||
callback(null, element);
|
||||
}
|
||||
}, function (error, result) {
|
||||
assert.ok(error);
|
||||
assert.ok(!result);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
test('loopAsync', function (done: () => void) {
|
||||
const elements = ['1', '2', '3'];
|
||||
loop(elements, function (element, callback) {
|
||||
process.nextTick(function () {
|
||||
callback(null, element);
|
||||
});
|
||||
}, function (error, result) {
|
||||
assert.equal(error, null);
|
||||
assert.deepEqual(result, elements);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
test('loopAsyncErrorByCallback', function (done: () => void) {
|
||||
const elements = ['1', '2', '3'];
|
||||
loop(elements, function (element, callback) {
|
||||
process.nextTick(function () {
|
||||
if (element === '2') {
|
||||
callback(new Error('foo'), null);
|
||||
} else {
|
||||
callback(null, element);
|
||||
}
|
||||
});
|
||||
}, function (error, result) {
|
||||
assert.ok(error);
|
||||
assert.ok(!result);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
test('sequenceSync', function (done: () => void) {
|
||||
let assertionCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
sequence(
|
||||
function onError(error) {
|
||||
errorCount++;
|
||||
},
|
||||
|
||||
function getFirst(this: any) {
|
||||
syncGet('1', this);
|
||||
},
|
||||
|
||||
function handleFirst(this: any, first: number) {
|
||||
assert.deepEqual('1', first);
|
||||
assertionCount++;
|
||||
syncGet('2', this);
|
||||
},
|
||||
|
||||
function handleSecond(this: any, second: any) {
|
||||
assert.deepEqual('2', second);
|
||||
assertionCount++;
|
||||
syncGet(null, this);
|
||||
},
|
||||
|
||||
function handleThird(third: any) {
|
||||
assert.ok(!third);
|
||||
assertionCount++;
|
||||
|
||||
assertCounterEquals(assertionCount, 3);
|
||||
assertCounterEquals(errorCount, 0);
|
||||
done();
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
test('sequenceAsync', function (done: () => void) {
|
||||
let assertionCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
sequence(
|
||||
function onError(error) {
|
||||
errorCount++;
|
||||
},
|
||||
|
||||
function getFirst(this: any) {
|
||||
asyncGet('1', this);
|
||||
},
|
||||
|
||||
function handleFirst(this: any, first: number) {
|
||||
assert.deepEqual('1', first);
|
||||
assertionCount++;
|
||||
asyncGet('2', this);
|
||||
},
|
||||
|
||||
function handleSecond(this: any, second: number) {
|
||||
assert.deepEqual('2', second);
|
||||
assertionCount++;
|
||||
asyncGet(null, this);
|
||||
},
|
||||
|
||||
function handleThird(third: number) {
|
||||
assert.ok(!third);
|
||||
assertionCount++;
|
||||
|
||||
assertCounterEquals(assertionCount, 3);
|
||||
assertCounterEquals(errorCount, 0);
|
||||
done();
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
test('sequenceSyncErrorByThrow', function (done: () => void) {
|
||||
let assertionCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
sequence(
|
||||
function onError(error) {
|
||||
errorCount++;
|
||||
|
||||
assertCounterEquals(assertionCount, 1);
|
||||
assertCounterEquals(errorCount, 1);
|
||||
done();
|
||||
},
|
||||
|
||||
function getFirst(this: any) {
|
||||
syncGet('1', this);
|
||||
},
|
||||
|
||||
function handleFirst(this: any, first: number) {
|
||||
assert.deepEqual('1', first);
|
||||
assertionCount++;
|
||||
syncGet('2', this);
|
||||
},
|
||||
|
||||
function handleSecond(second: number) {
|
||||
if (true) {
|
||||
throw new Error('');
|
||||
}
|
||||
// assertionCount++;
|
||||
// syncGet(null, this);
|
||||
},
|
||||
|
||||
function handleThird(third: number) {
|
||||
throw new Error('We should not be here');
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
test('sequenceSyncErrorByCallback', function (done: () => void) {
|
||||
let assertionCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
sequence(
|
||||
function onError(error) {
|
||||
errorCount++;
|
||||
|
||||
assertCounterEquals(assertionCount, 1);
|
||||
assertCounterEquals(errorCount, 1);
|
||||
done();
|
||||
},
|
||||
|
||||
function getFirst(this: any) {
|
||||
syncGet('1', this);
|
||||
},
|
||||
|
||||
function handleFirst(this: any, first: number) {
|
||||
assert.deepEqual('1', first);
|
||||
assertionCount++;
|
||||
syncGetError('2', this);
|
||||
},
|
||||
|
||||
function handleSecond(second: number) {
|
||||
throw new Error('We should not be here');
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
test('sequenceAsyncErrorByThrow', function (done: () => void) {
|
||||
let assertionCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
sequence(
|
||||
function onError(error) {
|
||||
errorCount++;
|
||||
|
||||
assertCounterEquals(assertionCount, 1);
|
||||
assertCounterEquals(errorCount, 1);
|
||||
done();
|
||||
},
|
||||
|
||||
function getFirst(this: any) {
|
||||
asyncGet('1', this);
|
||||
},
|
||||
|
||||
function handleFirst(this: any, first: number) {
|
||||
assert.deepEqual('1', first);
|
||||
assertionCount++;
|
||||
asyncGet('2', this);
|
||||
},
|
||||
|
||||
function handleSecond(second: number) {
|
||||
if (true) {
|
||||
throw new Error('');
|
||||
}
|
||||
// assertionCount++;
|
||||
// asyncGet(null, this);
|
||||
},
|
||||
|
||||
function handleThird(third: number) {
|
||||
throw new Error('We should not be here');
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
test('sequenceAsyncErrorByCallback', function (done: () => void) {
|
||||
let assertionCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
sequence(
|
||||
function onError(error) {
|
||||
errorCount++;
|
||||
|
||||
assertCounterEquals(assertionCount, 1);
|
||||
assertCounterEquals(errorCount, 1);
|
||||
done();
|
||||
},
|
||||
|
||||
function getFirst(this: any) {
|
||||
asyncGet('1', this);
|
||||
},
|
||||
|
||||
function handleFirst(this: any, first: number) {
|
||||
assert.deepEqual('1', first);
|
||||
assertionCount++;
|
||||
asyncGetError('2', this);
|
||||
},
|
||||
|
||||
function handleSecond(second: number) {
|
||||
throw new Error('We should not be here');
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
test('syncChainedSequenceError', function (done: () => void) {
|
||||
sequence(
|
||||
function onError(error) {
|
||||
done();
|
||||
},
|
||||
|
||||
function getFirst(this: any) {
|
||||
syncSequenceGetThrowsError('1', this);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
test('tolerateBooleanResults', function (done: () => void) {
|
||||
let assertionCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
sequence(
|
||||
function onError(error) {
|
||||
errorCount++;
|
||||
},
|
||||
|
||||
function getFirst(this: any) {
|
||||
this(true);
|
||||
},
|
||||
|
||||
function getSecond(this: any, result: boolean) {
|
||||
assert.equal(result, true);
|
||||
this(false);
|
||||
},
|
||||
|
||||
function last(result: boolean) {
|
||||
assert.equal(result, false);
|
||||
assertionCount++;
|
||||
|
||||
assertCounterEquals(assertionCount, 1);
|
||||
assertCounterEquals(errorCount, 0);
|
||||
done();
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
test('loopTolerateBooleanResults', function (done: () => void) {
|
||||
let elements = ['1', '2', '3'];
|
||||
loop(elements, function (element, callback) {
|
||||
process.nextTick(function () {
|
||||
(<any>callback)(true);
|
||||
});
|
||||
}, function (error, result) {
|
||||
assert.equal(error, null);
|
||||
assert.deepEqual(result, [true, true, true]);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
test('parallel', function (done: () => void) {
|
||||
let elements = [1, 2, 3, 4, 5];
|
||||
let sum = 0;
|
||||
|
||||
parallel(elements, function (element, callback) {
|
||||
sum += element;
|
||||
callback(null!, element * element);
|
||||
}, function (errors, result) {
|
||||
assert.ok(!errors);
|
||||
|
||||
assert.deepEqual(sum, 15);
|
||||
assert.deepEqual(result, [1, 4, 9, 16, 25]);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
test('parallel - setTimeout', function (done: () => void) {
|
||||
let elements = [1, 2, 3, 4, 5];
|
||||
let timeouts = [10, 30, 5, 0, 4];
|
||||
let sum = 0;
|
||||
|
||||
parallel(elements, function (element, callback) {
|
||||
setTimeout(function () {
|
||||
sum += element;
|
||||
callback(null!, element * element);
|
||||
}, timeouts.pop());
|
||||
}, function (errors, result) {
|
||||
assert.ok(!errors);
|
||||
|
||||
assert.deepEqual(sum, 15);
|
||||
assert.deepEqual(result, [1, 4, 9, 16, 25]);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
test('parallel - with error', function (done: () => void) {
|
||||
const elements = [1, 2, 3, 4, 5];
|
||||
const timeouts = [10, 30, 5, 0, 4];
|
||||
let sum = 0;
|
||||
|
||||
parallel(elements, function (element, callback) {
|
||||
setTimeout(function () {
|
||||
if (element === 4) {
|
||||
callback(new Error('error!'), null!);
|
||||
} else {
|
||||
sum += element;
|
||||
callback(null!, element * element);
|
||||
}
|
||||
}, timeouts.pop());
|
||||
}, function (errors, result) {
|
||||
assert.ok(errors);
|
||||
assert.deepEqual(errors, [null, null, null, new Error('error!'), null]);
|
||||
|
||||
assert.deepEqual(sum, 11);
|
||||
assert.deepEqual(result, [1, 4, 9, null, 25]);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -455,7 +455,7 @@ suite('Glob', () => {
|
|||
|
||||
expression = {
|
||||
'**/*.js': {
|
||||
}
|
||||
} as any
|
||||
};
|
||||
|
||||
assert.strictEqual('**/*.js', glob.match(expression, 'test.js', hasSibling));
|
||||
|
@ -474,7 +474,7 @@ suite('Glob', () => {
|
|||
'**/*.js': { when: '$(basename).ts' },
|
||||
'**/*.as': true,
|
||||
'**/*.foo': false,
|
||||
'**/*.bananas': { bananas: true }
|
||||
'**/*.bananas': { bananas: true } as any
|
||||
};
|
||||
|
||||
assert.strictEqual('**/*.js', glob.match(expression, 'test.js', hasSibling));
|
||||
|
@ -691,7 +691,7 @@ suite('Glob', () => {
|
|||
});
|
||||
|
||||
test('expression with other falsy value', function () {
|
||||
let expr = { '**/*.js': 0 };
|
||||
let expr = { '**/*.js': 0 } as any;
|
||||
|
||||
assert.strictEqual(glob.match(expr, 'foo.js'), '**/*.js');
|
||||
});
|
||||
|
|
|
@ -1,138 +0,0 @@
|
|||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as assert from 'assert';
|
||||
import * as os from 'os';
|
||||
|
||||
import * as path from 'vs/base/common/path';
|
||||
import * as fs from 'fs';
|
||||
|
||||
import * as uuid from 'vs/base/common/uuid';
|
||||
import * as pfs from 'vs/base/node/pfs';
|
||||
import { timeout } from 'vs/base/common/async';
|
||||
|
||||
suite('PFS', () => {
|
||||
|
||||
test('writeFile', () => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
const testFile = path.join(newDir, 'writefile.txt');
|
||||
|
||||
return pfs.mkdirp(newDir, 493).then(() => {
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
return pfs.writeFile(testFile, 'Hello World', null!).then(() => {
|
||||
assert.equal(fs.readFileSync(testFile), 'Hello World');
|
||||
|
||||
return pfs.del(parentDir, os.tmpdir());
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('writeFile - parallel write on different files works', function () {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
const testFile1 = path.join(newDir, 'writefile1.txt');
|
||||
const testFile2 = path.join(newDir, 'writefile2.txt');
|
||||
const testFile3 = path.join(newDir, 'writefile3.txt');
|
||||
const testFile4 = path.join(newDir, 'writefile4.txt');
|
||||
const testFile5 = path.join(newDir, 'writefile5.txt');
|
||||
|
||||
return pfs.mkdirp(newDir, 493).then(() => {
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
return Promise.all([
|
||||
pfs.writeFile(testFile1, 'Hello World 1', null!),
|
||||
pfs.writeFile(testFile2, 'Hello World 2', null!),
|
||||
pfs.writeFile(testFile3, 'Hello World 3', null!),
|
||||
pfs.writeFile(testFile4, 'Hello World 4', null!),
|
||||
pfs.writeFile(testFile5, 'Hello World 5', null!)
|
||||
]).then(() => {
|
||||
assert.equal(fs.readFileSync(testFile1), 'Hello World 1');
|
||||
assert.equal(fs.readFileSync(testFile2), 'Hello World 2');
|
||||
assert.equal(fs.readFileSync(testFile3), 'Hello World 3');
|
||||
assert.equal(fs.readFileSync(testFile4), 'Hello World 4');
|
||||
assert.equal(fs.readFileSync(testFile5), 'Hello World 5');
|
||||
|
||||
return pfs.del(parentDir, os.tmpdir());
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('writeFile - parallel write on same files works and is sequentalized', function () {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
const testFile = path.join(newDir, 'writefile.txt');
|
||||
|
||||
return pfs.mkdirp(newDir, 493).then(() => {
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
return Promise.all([
|
||||
pfs.writeFile(testFile, 'Hello World 1', undefined),
|
||||
pfs.writeFile(testFile, 'Hello World 2', undefined),
|
||||
timeout(10).then(() => pfs.writeFile(testFile, 'Hello World 3', undefined)),
|
||||
pfs.writeFile(testFile, 'Hello World 4', undefined),
|
||||
timeout(10).then(() => pfs.writeFile(testFile, 'Hello World 5', undefined))
|
||||
]).then(() => {
|
||||
assert.equal(fs.readFileSync(testFile), 'Hello World 5');
|
||||
|
||||
return pfs.del(parentDir, os.tmpdir());
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('rimraf - simple', function () {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'extfs', id);
|
||||
|
||||
return pfs.mkdirp(newDir, 493).then(() => {
|
||||
fs.writeFileSync(path.join(newDir, 'somefile.txt'), 'Contents');
|
||||
fs.writeFileSync(path.join(newDir, 'someOtherFile.txt'), 'Contents');
|
||||
|
||||
return pfs.rimraf(newDir).then(() => {
|
||||
assert.ok(!fs.existsSync(newDir));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('rimraf - recursive folder structure', function () {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'extfs', id);
|
||||
|
||||
return pfs.mkdirp(newDir, 493).then(() => {
|
||||
fs.writeFileSync(path.join(newDir, 'somefile.txt'), 'Contents');
|
||||
fs.writeFileSync(path.join(newDir, 'someOtherFile.txt'), 'Contents');
|
||||
|
||||
fs.mkdirSync(path.join(newDir, 'somefolder'));
|
||||
fs.writeFileSync(path.join(newDir, 'somefolder', 'somefile.txt'), 'Contents');
|
||||
|
||||
return pfs.rimraf(newDir).then(() => {
|
||||
assert.ok(!fs.existsSync(newDir));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('moveIgnoreError', function () {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'extfs', id);
|
||||
|
||||
return pfs.mkdirp(newDir, 493).then(() => {
|
||||
return pfs.renameIgnoreError(path.join(newDir, 'foo'), path.join(newDir, 'bar')).then(() => {
|
||||
|
||||
return pfs.del(parentDir, os.tmpdir());
|
||||
}, error => {
|
||||
assert.fail(error);
|
||||
|
||||
return Promise.reject(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,6 +1,6 @@
|
|||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
/*----------------------------------------------------------
|
|
@ -0,0 +1,612 @@
|
|||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as assert from 'assert';
|
||||
import * as os from 'os';
|
||||
import * as path from 'vs/base/common/path';
|
||||
import * as fs from 'fs';
|
||||
import { Readable } from 'stream';
|
||||
import * as uuid from 'vs/base/common/uuid';
|
||||
import * as pfs from 'vs/base/node/pfs';
|
||||
import { timeout } from 'vs/base/common/async';
|
||||
import { getPathFromAmdModule } from 'vs/base/common/amd';
|
||||
import { CancellationTokenSource } from 'vs/base/common/cancellation';
|
||||
import { isWindows, isLinux } from 'vs/base/common/platform';
|
||||
import { canNormalize } from 'vs/base/common/normalization';
|
||||
import { VSBuffer } from 'vs/base/common/buffer';
|
||||
|
||||
const chunkSize = 64 * 1024;
|
||||
const readError = 'Error while reading';
|
||||
function toReadable(value: string, throwError?: boolean): Readable {
|
||||
const totalChunks = Math.ceil(value.length / chunkSize);
|
||||
const stringChunks: string[] = [];
|
||||
|
||||
for (let i = 0, j = 0; i < totalChunks; ++i, j += chunkSize) {
|
||||
stringChunks[i] = value.substr(j, chunkSize);
|
||||
}
|
||||
|
||||
let counter = 0;
|
||||
return new Readable({
|
||||
read: function () {
|
||||
if (throwError) {
|
||||
this.emit('error', new Error(readError));
|
||||
}
|
||||
|
||||
let res!: string;
|
||||
let canPush = true;
|
||||
while (canPush && (res = stringChunks[counter++])) {
|
||||
canPush = this.push(res);
|
||||
}
|
||||
|
||||
// EOS
|
||||
if (!res) {
|
||||
this.push(null);
|
||||
}
|
||||
},
|
||||
encoding: 'utf8'
|
||||
});
|
||||
}
|
||||
|
||||
suite('PFS', () => {
|
||||
|
||||
test('writeFile', async () => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
const testFile = path.join(newDir, 'writefile.txt');
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
await pfs.writeFile(testFile, 'Hello World', (null!));
|
||||
assert.equal(fs.readFileSync(testFile), 'Hello World');
|
||||
|
||||
await pfs.rimraf(parentDir, pfs.RimRafMode.MOVE);
|
||||
});
|
||||
|
||||
test('writeFile - parallel write on different files works', async () => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
const testFile1 = path.join(newDir, 'writefile1.txt');
|
||||
const testFile2 = path.join(newDir, 'writefile2.txt');
|
||||
const testFile3 = path.join(newDir, 'writefile3.txt');
|
||||
const testFile4 = path.join(newDir, 'writefile4.txt');
|
||||
const testFile5 = path.join(newDir, 'writefile5.txt');
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
await Promise.all([
|
||||
pfs.writeFile(testFile1, 'Hello World 1', (null!)),
|
||||
pfs.writeFile(testFile2, 'Hello World 2', (null!)),
|
||||
pfs.writeFile(testFile3, 'Hello World 3', (null!)),
|
||||
pfs.writeFile(testFile4, 'Hello World 4', (null!)),
|
||||
pfs.writeFile(testFile5, 'Hello World 5', (null!))
|
||||
]);
|
||||
assert.equal(fs.readFileSync(testFile1), 'Hello World 1');
|
||||
assert.equal(fs.readFileSync(testFile2), 'Hello World 2');
|
||||
assert.equal(fs.readFileSync(testFile3), 'Hello World 3');
|
||||
assert.equal(fs.readFileSync(testFile4), 'Hello World 4');
|
||||
assert.equal(fs.readFileSync(testFile5), 'Hello World 5');
|
||||
|
||||
await pfs.rimraf(parentDir, pfs.RimRafMode.MOVE);
|
||||
});
|
||||
|
||||
test('writeFile - parallel write on same files works and is sequentalized', async () => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
const testFile = path.join(newDir, 'writefile.txt');
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
await Promise.all([
|
||||
pfs.writeFile(testFile, 'Hello World 1', undefined),
|
||||
pfs.writeFile(testFile, 'Hello World 2', undefined),
|
||||
timeout(10).then(() => pfs.writeFile(testFile, 'Hello World 3', undefined)),
|
||||
pfs.writeFile(testFile, 'Hello World 4', undefined),
|
||||
timeout(10).then(() => pfs.writeFile(testFile, 'Hello World 5', undefined))
|
||||
]);
|
||||
assert.equal(fs.readFileSync(testFile), 'Hello World 5');
|
||||
|
||||
await pfs.rimraf(parentDir, pfs.RimRafMode.MOVE);
|
||||
});
|
||||
|
||||
test('rimraf - simple - unlink', async () => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
fs.writeFileSync(path.join(newDir, 'somefile.txt'), 'Contents');
|
||||
fs.writeFileSync(path.join(newDir, 'someOtherFile.txt'), 'Contents');
|
||||
|
||||
await pfs.rimraf(newDir);
|
||||
assert.ok(!fs.existsSync(newDir));
|
||||
});
|
||||
|
||||
test('rimraf - simple - move', async () => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
fs.writeFileSync(path.join(newDir, 'somefile.txt'), 'Contents');
|
||||
fs.writeFileSync(path.join(newDir, 'someOtherFile.txt'), 'Contents');
|
||||
|
||||
await pfs.rimraf(newDir, pfs.RimRafMode.MOVE);
|
||||
assert.ok(!fs.existsSync(newDir));
|
||||
});
|
||||
|
||||
test('rimraf - recursive folder structure - unlink', async () => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
fs.writeFileSync(path.join(newDir, 'somefile.txt'), 'Contents');
|
||||
fs.writeFileSync(path.join(newDir, 'someOtherFile.txt'), 'Contents');
|
||||
fs.mkdirSync(path.join(newDir, 'somefolder'));
|
||||
fs.writeFileSync(path.join(newDir, 'somefolder', 'somefile.txt'), 'Contents');
|
||||
|
||||
await pfs.rimraf(newDir);
|
||||
assert.ok(!fs.existsSync(newDir));
|
||||
});
|
||||
|
||||
test('rimraf - recursive folder structure - move', async () => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
fs.writeFileSync(path.join(newDir, 'somefile.txt'), 'Contents');
|
||||
fs.writeFileSync(path.join(newDir, 'someOtherFile.txt'), 'Contents');
|
||||
fs.mkdirSync(path.join(newDir, 'somefolder'));
|
||||
fs.writeFileSync(path.join(newDir, 'somefolder', 'somefile.txt'), 'Contents');
|
||||
|
||||
await pfs.rimraf(newDir, pfs.RimRafMode.MOVE);
|
||||
assert.ok(!fs.existsSync(newDir));
|
||||
});
|
||||
|
||||
test('rimraf - simple ends with dot - move', async () => {
|
||||
const id = `${uuid.generateUuid()}.`;
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
fs.writeFileSync(path.join(newDir, 'somefile.txt'), 'Contents');
|
||||
fs.writeFileSync(path.join(newDir, 'someOtherFile.txt'), 'Contents');
|
||||
|
||||
await pfs.rimraf(newDir, pfs.RimRafMode.MOVE);
|
||||
assert.ok(!fs.existsSync(newDir));
|
||||
});
|
||||
|
||||
test('rimraf - simple ends with dot slash/backslash - move', async () => {
|
||||
const id = `${uuid.generateUuid()}.`;
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
fs.writeFileSync(path.join(newDir, 'somefile.txt'), 'Contents');
|
||||
fs.writeFileSync(path.join(newDir, 'someOtherFile.txt'), 'Contents');
|
||||
|
||||
await pfs.rimraf(`${newDir}${path.sep}`, pfs.RimRafMode.MOVE);
|
||||
assert.ok(!fs.existsSync(newDir));
|
||||
});
|
||||
|
||||
test('rimrafSync - swallows file not found error', function () {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
|
||||
pfs.rimrafSync(newDir);
|
||||
|
||||
assert.ok(!fs.existsSync(newDir));
|
||||
});
|
||||
|
||||
test('rimrafSync - simple', async () => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
|
||||
fs.writeFileSync(path.join(newDir, 'somefile.txt'), 'Contents');
|
||||
fs.writeFileSync(path.join(newDir, 'someOtherFile.txt'), 'Contents');
|
||||
|
||||
pfs.rimrafSync(newDir);
|
||||
|
||||
assert.ok(!fs.existsSync(newDir));
|
||||
});
|
||||
|
||||
test('rimrafSync - recursive folder structure', async () => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
fs.writeFileSync(path.join(newDir, 'somefile.txt'), 'Contents');
|
||||
fs.writeFileSync(path.join(newDir, 'someOtherFile.txt'), 'Contents');
|
||||
|
||||
fs.mkdirSync(path.join(newDir, 'somefolder'));
|
||||
fs.writeFileSync(path.join(newDir, 'somefolder', 'somefile.txt'), 'Contents');
|
||||
|
||||
pfs.rimrafSync(newDir);
|
||||
|
||||
assert.ok(!fs.existsSync(newDir));
|
||||
});
|
||||
|
||||
test('moveIgnoreError', async () => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
try {
|
||||
await pfs.renameIgnoreError(path.join(newDir, 'foo'), path.join(newDir, 'bar'));
|
||||
return pfs.rimraf(parentDir, pfs.RimRafMode.MOVE);
|
||||
}
|
||||
catch (error) {
|
||||
assert.fail(error);
|
||||
return Promise.reject(error);
|
||||
}
|
||||
});
|
||||
|
||||
test('copy, move and delete', async () => {
|
||||
const id = uuid.generateUuid();
|
||||
const id2 = uuid.generateUuid();
|
||||
const sourceDir = getPathFromAmdModule(require, './fixtures');
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', 'pfs');
|
||||
const targetDir = path.join(parentDir, id);
|
||||
const targetDir2 = path.join(parentDir, id2);
|
||||
|
||||
await pfs.copy(sourceDir, targetDir);
|
||||
|
||||
assert.ok(fs.existsSync(targetDir));
|
||||
assert.ok(fs.existsSync(path.join(targetDir, 'index.html')));
|
||||
assert.ok(fs.existsSync(path.join(targetDir, 'site.css')));
|
||||
assert.ok(fs.existsSync(path.join(targetDir, 'examples')));
|
||||
assert.ok(fs.statSync(path.join(targetDir, 'examples')).isDirectory());
|
||||
assert.ok(fs.existsSync(path.join(targetDir, 'examples', 'small.jxs')));
|
||||
|
||||
await pfs.move(targetDir, targetDir2);
|
||||
|
||||
assert.ok(!fs.existsSync(targetDir));
|
||||
assert.ok(fs.existsSync(targetDir2));
|
||||
assert.ok(fs.existsSync(path.join(targetDir2, 'index.html')));
|
||||
assert.ok(fs.existsSync(path.join(targetDir2, 'site.css')));
|
||||
assert.ok(fs.existsSync(path.join(targetDir2, 'examples')));
|
||||
assert.ok(fs.statSync(path.join(targetDir2, 'examples')).isDirectory());
|
||||
assert.ok(fs.existsSync(path.join(targetDir2, 'examples', 'small.jxs')));
|
||||
|
||||
await pfs.move(path.join(targetDir2, 'index.html'), path.join(targetDir2, 'index_moved.html'));
|
||||
|
||||
assert.ok(!fs.existsSync(path.join(targetDir2, 'index.html')));
|
||||
assert.ok(fs.existsSync(path.join(targetDir2, 'index_moved.html')));
|
||||
|
||||
await pfs.rimraf(parentDir, pfs.RimRafMode.MOVE);
|
||||
|
||||
assert.ok(!fs.existsSync(parentDir));
|
||||
});
|
||||
|
||||
test('mkdirp', async () => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
return pfs.rimraf(parentDir, pfs.RimRafMode.MOVE);
|
||||
});
|
||||
|
||||
test('mkdirp cancellation', async () => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
|
||||
const source = new CancellationTokenSource();
|
||||
|
||||
const mkdirpPromise = pfs.mkdirp(newDir, 493, source.token);
|
||||
source.cancel();
|
||||
|
||||
await mkdirpPromise;
|
||||
|
||||
assert.ok(!fs.existsSync(newDir));
|
||||
|
||||
return pfs.rimraf(parentDir, pfs.RimRafMode.MOVE);
|
||||
});
|
||||
|
||||
test('readDirsInDir', async () => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
|
||||
fs.mkdirSync(path.join(newDir, 'somefolder1'));
|
||||
fs.mkdirSync(path.join(newDir, 'somefolder2'));
|
||||
fs.mkdirSync(path.join(newDir, 'somefolder3'));
|
||||
fs.writeFileSync(path.join(newDir, 'somefile.txt'), 'Contents');
|
||||
fs.writeFileSync(path.join(newDir, 'someOtherFile.txt'), 'Contents');
|
||||
|
||||
const result = await pfs.readDirsInDir(newDir);
|
||||
assert.equal(result.length, 3);
|
||||
assert.ok(result.indexOf('somefolder1') !== -1);
|
||||
assert.ok(result.indexOf('somefolder2') !== -1);
|
||||
assert.ok(result.indexOf('somefolder3') !== -1);
|
||||
|
||||
await pfs.rimraf(newDir);
|
||||
});
|
||||
|
||||
test('stat link', async () => {
|
||||
if (isWindows) {
|
||||
return Promise.resolve(); // Symlinks are not the same on win, and we can not create them programitically without admin privileges
|
||||
}
|
||||
|
||||
const id1 = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id1);
|
||||
const directory = path.join(parentDir, 'pfs', id1);
|
||||
|
||||
const id2 = uuid.generateUuid();
|
||||
const symbolicLink = path.join(parentDir, 'pfs', id2);
|
||||
|
||||
await pfs.mkdirp(directory, 493);
|
||||
|
||||
fs.symlinkSync(directory, symbolicLink);
|
||||
|
||||
let statAndIsLink = await pfs.statLink(directory);
|
||||
assert.ok(!statAndIsLink!.isSymbolicLink);
|
||||
|
||||
statAndIsLink = await pfs.statLink(symbolicLink);
|
||||
assert.ok(statAndIsLink!.isSymbolicLink);
|
||||
|
||||
pfs.rimrafSync(directory);
|
||||
});
|
||||
|
||||
test('readdir', async () => {
|
||||
if (canNormalize && typeof process.versions['electron'] !== 'undefined' /* needs electron */) {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id, 'öäü');
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
const children = await pfs.readdir(path.join(parentDir, 'pfs', id));
|
||||
assert.equal(children.some(n => n === 'öäü'), true); // Mac always converts to NFD, so
|
||||
|
||||
await pfs.rimraf(parentDir);
|
||||
}
|
||||
});
|
||||
|
||||
test('writeFile (string)', async () => {
|
||||
const smallData = 'Hello World';
|
||||
const bigData = (new Array(100 * 1024)).join('Large String\n');
|
||||
|
||||
return testWriteFileAndFlush(smallData, smallData, bigData, bigData);
|
||||
});
|
||||
|
||||
test('writeFile (Buffer)', async () => {
|
||||
const smallData = 'Hello World';
|
||||
const bigData = (new Array(100 * 1024)).join('Large String\n');
|
||||
|
||||
return testWriteFileAndFlush(Buffer.from(smallData), smallData, Buffer.from(bigData), bigData);
|
||||
});
|
||||
|
||||
test('writeFile (UInt8Array)', async () => {
|
||||
const smallData = 'Hello World';
|
||||
const bigData = (new Array(100 * 1024)).join('Large String\n');
|
||||
|
||||
return testWriteFileAndFlush(VSBuffer.fromString(smallData).buffer, smallData, VSBuffer.fromString(bigData).buffer, bigData);
|
||||
});
|
||||
|
||||
test('writeFile (stream)', async () => {
|
||||
const smallData = 'Hello World';
|
||||
const bigData = (new Array(100 * 1024)).join('Large String\n');
|
||||
|
||||
return testWriteFileAndFlush(toReadable(smallData), smallData, toReadable(bigData), bigData);
|
||||
});
|
||||
|
||||
async function testWriteFileAndFlush(
|
||||
smallData: string | Buffer | NodeJS.ReadableStream | Uint8Array,
|
||||
smallDataValue: string,
|
||||
bigData: string | Buffer | NodeJS.ReadableStream | Uint8Array,
|
||||
bigDataValue: string
|
||||
): Promise<void> {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
const testFile = path.join(newDir, 'flushed.txt');
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
await pfs.writeFile(testFile, smallData);
|
||||
assert.equal(fs.readFileSync(testFile), smallDataValue);
|
||||
|
||||
await pfs.writeFile(testFile, bigData);
|
||||
assert.equal(fs.readFileSync(testFile), bigDataValue);
|
||||
|
||||
await pfs.rimraf(parentDir);
|
||||
}
|
||||
|
||||
test('writeFile (file stream)', async () => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const sourceFile = getPathFromAmdModule(require, './fixtures/index.html');
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
const testFile = path.join(newDir, 'flushed.txt');
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
await pfs.writeFile(testFile, fs.createReadStream(sourceFile));
|
||||
assert.equal(fs.readFileSync(testFile).toString(), fs.readFileSync(sourceFile).toString());
|
||||
|
||||
await pfs.rimraf(parentDir);
|
||||
});
|
||||
|
||||
test('writeFile (string, error handling)', async () => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
const testFile = path.join(newDir, 'flushed.txt');
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
fs.mkdirSync(testFile); // this will trigger an error because testFile is now a directory!
|
||||
|
||||
let expectedError: Error | undefined;
|
||||
try {
|
||||
await pfs.writeFile(testFile, 'Hello World');
|
||||
} catch (error) {
|
||||
expectedError = error;
|
||||
}
|
||||
|
||||
assert.ok(expectedError);
|
||||
|
||||
await pfs.rimraf(parentDir);
|
||||
});
|
||||
|
||||
test('writeFile (stream, error handling EISDIR)', async () => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
const testFile = path.join(newDir, 'flushed.txt');
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
fs.mkdirSync(testFile); // this will trigger an error because testFile is now a directory!
|
||||
|
||||
const readable = toReadable('Hello World');
|
||||
|
||||
let expectedError: Error | undefined;
|
||||
try {
|
||||
await pfs.writeFile(testFile, readable);
|
||||
} catch (error) {
|
||||
expectedError = error;
|
||||
}
|
||||
|
||||
if (!expectedError || (<any>expectedError).code !== 'EISDIR') {
|
||||
return Promise.reject(new Error('Expected EISDIR error for writing to folder but got: ' + (expectedError ? (<any>expectedError).code : 'no error')));
|
||||
}
|
||||
|
||||
// verify that the stream is still consumable (for https://github.com/Microsoft/vscode/issues/42542)
|
||||
assert.equal(readable.read(), 'Hello World');
|
||||
|
||||
await pfs.rimraf(parentDir);
|
||||
});
|
||||
|
||||
test('writeFile (stream, error handling READERROR)', async () => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
const testFile = path.join(newDir, 'flushed.txt');
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
let expectedError: Error | undefined;
|
||||
try {
|
||||
await pfs.writeFile(testFile, toReadable('Hello World', true /* throw error */));
|
||||
} catch (error) {
|
||||
expectedError = error;
|
||||
}
|
||||
|
||||
if (!expectedError || expectedError.message !== readError) {
|
||||
return Promise.reject(new Error('Expected error for writing to folder'));
|
||||
}
|
||||
|
||||
await pfs.rimraf(parentDir);
|
||||
});
|
||||
|
||||
test('writeFile (stream, error handling EACCES)', async () => {
|
||||
if (isLinux) {
|
||||
return Promise.resolve(); // somehow this test fails on Linux in our TFS builds
|
||||
}
|
||||
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
const testFile = path.join(newDir, 'flushed.txt');
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
fs.writeFileSync(testFile, '');
|
||||
fs.chmodSync(testFile, 33060); // make readonly
|
||||
|
||||
let expectedError: Error | undefined;
|
||||
try {
|
||||
await pfs.writeFile(testFile, toReadable('Hello World'));
|
||||
} catch (error) {
|
||||
expectedError = error;
|
||||
}
|
||||
|
||||
if (!expectedError || !((<any>expectedError).code !== 'EACCES' || (<any>expectedError).code !== 'EPERM')) {
|
||||
return Promise.reject(new Error('Expected EACCES/EPERM error for writing to folder but got: ' + (expectedError ? (<any>expectedError).code : 'no error')));
|
||||
}
|
||||
|
||||
await pfs.rimraf(parentDir);
|
||||
});
|
||||
|
||||
test('writeFile (file stream, error handling)', async () => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const sourceFile = getPathFromAmdModule(require, './fixtures/index.html');
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
const testFile = path.join(newDir, 'flushed.txt');
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
fs.mkdirSync(testFile); // this will trigger an error because testFile is now a directory!
|
||||
|
||||
let expectedError: Error | undefined;
|
||||
try {
|
||||
await pfs.writeFile(testFile, fs.createReadStream(sourceFile));
|
||||
} catch (error) {
|
||||
expectedError = error;
|
||||
}
|
||||
|
||||
if (!expectedError) {
|
||||
return Promise.reject(new Error('Expected error for writing to folder'));
|
||||
}
|
||||
|
||||
await pfs.rimraf(parentDir);
|
||||
});
|
||||
|
||||
test('writeFileSync', async () => {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const newDir = path.join(parentDir, 'pfs', id);
|
||||
const testFile = path.join(newDir, 'flushed.txt');
|
||||
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
pfs.writeFileSync(testFile, 'Hello World');
|
||||
assert.equal(fs.readFileSync(testFile), 'Hello World');
|
||||
|
||||
const largeString = (new Array(100 * 1024)).join('Large String\n');
|
||||
|
||||
pfs.writeFileSync(testFile, largeString);
|
||||
assert.equal(fs.readFileSync(testFile), largeString);
|
||||
|
||||
await pfs.rimraf(parentDir);
|
||||
});
|
||||
});
|
|
@ -8,7 +8,7 @@ import { generateUuid } from 'vs/base/common/uuid';
|
|||
import { join } from 'vs/base/common/path';
|
||||
import { tmpdir } from 'os';
|
||||
import { equal, ok } from 'assert';
|
||||
import { mkdirp, del, writeFile, exists, unlink } from 'vs/base/node/pfs';
|
||||
import { mkdirp, writeFile, exists, unlink, rimraf, RimRafMode } from 'vs/base/node/pfs';
|
||||
import { timeout } from 'vs/base/common/async';
|
||||
import { Event, Emitter } from 'vs/base/common/event';
|
||||
import { isWindows } from 'vs/base/common/platform';
|
||||
|
@ -92,7 +92,7 @@ suite('Storage Library', () => {
|
|||
equal(deletePromiseResolved, true);
|
||||
|
||||
await storage.close();
|
||||
await del(storageDir, tmpdir());
|
||||
await rimraf(storageDir, RimRafMode.MOVE);
|
||||
});
|
||||
|
||||
test('external changes', async () => {
|
||||
|
@ -148,7 +148,7 @@ suite('Storage Library', () => {
|
|||
equal(changes.size, 0);
|
||||
|
||||
await storage.close();
|
||||
await del(storageDir, tmpdir());
|
||||
await rimraf(storageDir, RimRafMode.MOVE);
|
||||
});
|
||||
|
||||
test('close flushes data', async () => {
|
||||
|
@ -202,7 +202,7 @@ suite('Storage Library', () => {
|
|||
ok(!storage.get('bar'));
|
||||
|
||||
await storage.close();
|
||||
await del(storageDir, tmpdir());
|
||||
await rimraf(storageDir, RimRafMode.MOVE);
|
||||
});
|
||||
|
||||
test('conflicting updates', async () => {
|
||||
|
@ -244,7 +244,7 @@ suite('Storage Library', () => {
|
|||
ok(setAndDeletePromiseResolved);
|
||||
|
||||
await storage.close();
|
||||
await del(storageDir, tmpdir());
|
||||
await rimraf(storageDir, RimRafMode.MOVE);
|
||||
});
|
||||
|
||||
test('corrupt DB recovers', async () => {
|
||||
|
@ -274,7 +274,7 @@ suite('Storage Library', () => {
|
|||
equal(storage.get('foo'), 'bar');
|
||||
|
||||
await storage.close();
|
||||
await del(storageDir, tmpdir());
|
||||
await rimraf(storageDir, RimRafMode.MOVE);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -372,7 +372,7 @@ suite('SQLite Storage Library', () => {
|
|||
|
||||
await testDBBasics(join(storageDir, 'storage.db'));
|
||||
|
||||
await del(storageDir, tmpdir());
|
||||
await rimraf(storageDir, RimRafMode.MOVE);
|
||||
});
|
||||
|
||||
test('basics (open multiple times)', async () => {
|
||||
|
@ -383,7 +383,7 @@ suite('SQLite Storage Library', () => {
|
|||
await testDBBasics(join(storageDir, 'storage.db'));
|
||||
await testDBBasics(join(storageDir, 'storage.db'));
|
||||
|
||||
await del(storageDir, tmpdir());
|
||||
await rimraf(storageDir, RimRafMode.MOVE);
|
||||
});
|
||||
|
||||
test('basics (corrupt DB falls back to empty DB)', async () => {
|
||||
|
@ -401,7 +401,7 @@ suite('SQLite Storage Library', () => {
|
|||
|
||||
ok(expectedError);
|
||||
|
||||
await del(storageDir, tmpdir());
|
||||
await rimraf(storageDir, RimRafMode.MOVE);
|
||||
});
|
||||
|
||||
test('basics (corrupt DB restores from previous backup)', async () => {
|
||||
|
@ -439,7 +439,7 @@ suite('SQLite Storage Library', () => {
|
|||
|
||||
equal(recoveryCalled, false);
|
||||
|
||||
await del(storageDir, tmpdir());
|
||||
await rimraf(storageDir, RimRafMode.MOVE);
|
||||
});
|
||||
|
||||
test('basics (corrupt DB falls back to empty DB if backup is corrupt)', async () => {
|
||||
|
@ -468,7 +468,7 @@ suite('SQLite Storage Library', () => {
|
|||
|
||||
await testDBBasics(storagePath);
|
||||
|
||||
await del(storageDir, tmpdir());
|
||||
await rimraf(storageDir, RimRafMode.MOVE);
|
||||
});
|
||||
|
||||
test('basics (DB that becomes corrupt during runtime stores all state from cache on close)', async () => {
|
||||
|
@ -536,7 +536,7 @@ suite('SQLite Storage Library', () => {
|
|||
|
||||
equal(recoveryCalled, false);
|
||||
|
||||
await del(storageDir, tmpdir());
|
||||
await rimraf(storageDir, RimRafMode.MOVE);
|
||||
});
|
||||
|
||||
test('real world example', async function () {
|
||||
|
@ -627,7 +627,7 @@ suite('SQLite Storage Library', () => {
|
|||
|
||||
await storage.close();
|
||||
|
||||
await del(storageDir, tmpdir());
|
||||
await rimraf(storageDir, RimRafMode.MOVE);
|
||||
});
|
||||
|
||||
test('very large item value', async function () {
|
||||
|
@ -682,7 +682,7 @@ suite('SQLite Storage Library', () => {
|
|||
|
||||
await storage.close();
|
||||
|
||||
await del(storageDir, tmpdir());
|
||||
await rimraf(storageDir, RimRafMode.MOVE);
|
||||
});
|
||||
|
||||
test('multiple concurrent writes execute in sequence', async () => {
|
||||
|
@ -739,7 +739,7 @@ suite('SQLite Storage Library', () => {
|
|||
|
||||
await storage.close();
|
||||
|
||||
await del(storageDir, tmpdir());
|
||||
await rimraf(storageDir, RimRafMode.MOVE);
|
||||
});
|
||||
|
||||
test('lots of INSERT & DELETE (below inline max)', async () => {
|
||||
|
@ -771,7 +771,7 @@ suite('SQLite Storage Library', () => {
|
|||
|
||||
await storage.close();
|
||||
|
||||
await del(storageDir, tmpdir());
|
||||
await rimraf(storageDir, RimRafMode.MOVE);
|
||||
});
|
||||
|
||||
test('lots of INSERT & DELETE (above inline max)', async () => {
|
||||
|
@ -803,6 +803,6 @@ suite('SQLite Storage Library', () => {
|
|||
|
||||
await storage.close();
|
||||
|
||||
await del(storageDir, tmpdir());
|
||||
await rimraf(storageDir, RimRafMode.MOVE);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
import { generateUuid } from 'vs/base/common/uuid';
|
||||
import { join } from 'vs/base/common/path';
|
||||
import { tmpdir } from 'os';
|
||||
import { mkdirp, del } from 'vs/base/node/pfs';
|
||||
import { mkdirp, rimraf, RimRafMode } from 'vs/base/node/pfs';
|
||||
|
||||
export interface ITestFileResult {
|
||||
testFile: string;
|
||||
|
@ -22,7 +22,7 @@ export function testFile(folder: string, file: string): Promise<ITestFileResult>
|
|||
return mkdirp(newDir, 493).then(() => {
|
||||
return {
|
||||
testFile,
|
||||
cleanUp: () => del(parentDir, tmpdir())
|
||||
} as ITestFileResult;
|
||||
cleanUp: () => rimraf(parentDir, RimRafMode.MOVE)
|
||||
};
|
||||
});
|
||||
}
|
||||
|
|
|
@ -40,6 +40,7 @@ import { OcticonLabel } from 'vs/base/browser/ui/octiconLabel/octiconLabel';
|
|||
import { normalizeGitHubUrl } from 'vs/code/electron-browser/issue/issueReporterUtil';
|
||||
import { Button } from 'vs/base/browser/ui/button/button';
|
||||
import { withUndefinedAsNull } from 'vs/base/common/types';
|
||||
import { SystemInfo } from 'vs/platform/diagnostics/common/diagnosticsService';
|
||||
|
||||
const MAX_URL_LENGTH = platform.isWindows ? 2081 : 5400;
|
||||
|
||||
|
@ -79,11 +80,12 @@ export class IssueReporter extends Disposable {
|
|||
|
||||
this.initServices(configuration);
|
||||
|
||||
const isSnap = process.platform === 'linux' && process.env.SNAP && process.env.SNAP_REVISION;
|
||||
this.issueReporterModel = new IssueReporterModel({
|
||||
issueType: configuration.data.issueType || IssueType.Bug,
|
||||
versionInfo: {
|
||||
vscodeVersion: `${pkg.name} ${pkg.version} (${product.commit || 'Commit unknown'}, ${product.date || 'Date unknown'})`,
|
||||
os: `${os.type()} ${os.arch()} ${os.release()}`
|
||||
os: `${os.type()} ${os.arch()} ${os.release()}${isSnap ? ' snap' : ''}`
|
||||
},
|
||||
extensionsDisabled: !!this.environmentService.disableExtensions,
|
||||
});
|
||||
|
@ -104,7 +106,7 @@ export class IssueReporter extends Disposable {
|
|||
this.updatePreviewButtonState();
|
||||
});
|
||||
|
||||
ipcRenderer.on('vscode:issueSystemInfoResponse', (_: unknown, info: any) => {
|
||||
ipcRenderer.on('vscode:issueSystemInfoResponse', (_: unknown, info: SystemInfo) => {
|
||||
this.logService.trace('issueReporter: Received system data');
|
||||
this.issueReporterModel.update({ systemInfo: info });
|
||||
this.receivedSystemInfo = true;
|
||||
|
@ -903,19 +905,19 @@ export class IssueReporter extends Disposable {
|
|||
private updateSystemInfo(state: IssueReporterModelData) {
|
||||
const target = document.querySelector('.block-system .block-info');
|
||||
if (target) {
|
||||
let tableHtml = '';
|
||||
Object.keys(state.systemInfo).forEach(k => {
|
||||
const data = typeof state.systemInfo[k] === 'object'
|
||||
? Object.keys(state.systemInfo[k]).map(key => `${key}: ${state.systemInfo[k][key]}`).join('<br>')
|
||||
: state.systemInfo[k];
|
||||
const systemInfo = state.systemInfo!;
|
||||
let renderedData = `
|
||||
<table>
|
||||
<tr><td>CPUs</td><td>${systemInfo.cpus}</td></tr>
|
||||
<tr><td>GPU Status</td><td>${Object.keys(systemInfo.gpuStatus).map(key => `${key}: ${systemInfo.gpuStatus[key]}`).join('<br>')}</td></tr>
|
||||
<tr><td>Load (avg)</td><td>${systemInfo.load}</td></tr>
|
||||
<tr><td>Memory (System)</td><td>${systemInfo.memory}</td></tr>
|
||||
<tr><td>Process Argv</td><td>${systemInfo.processArgs}</td></tr>
|
||||
<tr><td>Screen Reader</td><td>${systemInfo.screenReader}</td></tr>
|
||||
<tr><td>VM</td><td>${systemInfo.vmHint}</td></tr>
|
||||
</table>`;
|
||||
|
||||
tableHtml += `
|
||||
<tr>
|
||||
<td>${k}</td>
|
||||
<td>${data}</td>
|
||||
</tr>`;
|
||||
});
|
||||
target.innerHTML = `<table>${tableHtml}</table>`;
|
||||
target.innerHTML = renderedData;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -5,13 +5,14 @@
|
|||
|
||||
import { assign } from 'vs/base/common/objects';
|
||||
import { IssueType, ISettingSearchResult, IssueReporterExtensionData } from 'vs/platform/issue/common/issue';
|
||||
import { SystemInfo } from 'vs/platform/diagnostics/common/diagnosticsService';
|
||||
|
||||
export interface IssueReporterData {
|
||||
issueType: IssueType;
|
||||
issueDescription?: string;
|
||||
|
||||
versionInfo?: any;
|
||||
systemInfo?: any;
|
||||
systemInfo?: SystemInfo;
|
||||
processInfo?: any;
|
||||
workspaceInfo?: any;
|
||||
|
||||
|
@ -149,13 +150,16 @@ ${this.getInfos()}
|
|||
|---|---|
|
||||
`;
|
||||
|
||||
Object.keys(this._data.systemInfo).forEach(k => {
|
||||
const data = typeof this._data.systemInfo[k] === 'object'
|
||||
? Object.keys(this._data.systemInfo[k]).map(key => `${key}: ${this._data.systemInfo[k][key]}`).join('<br>')
|
||||
: this._data.systemInfo[k];
|
||||
if (this._data.systemInfo) {
|
||||
|
||||
md += `|${k}|${data}|\n`;
|
||||
});
|
||||
md += `|CPUs|${this._data.systemInfo.cpus}|
|
||||
|GPU Status|${Object.keys(this._data.systemInfo.gpuStatus).map(key => `${key}: ${this._data.systemInfo!.gpuStatus[key]}`).join('<br>')}|
|
||||
|Load (avg)|${this._data.systemInfo.load}|
|
||||
|Memory (System)|${this._data.systemInfo.memory}|
|
||||
|Process Argv|${this._data.systemInfo.processArgs}|
|
||||
|Screen Reader|${this._data.systemInfo.screenReader}|
|
||||
|VM|${this._data.systemInfo.vmHint}|`;
|
||||
}
|
||||
|
||||
md += '\n</details>';
|
||||
|
||||
|
|
|
@ -44,7 +44,13 @@ Extensions: none
|
|||
const issueReporterModel = new IssueReporterModel({
|
||||
issueType: 0,
|
||||
systemInfo: {
|
||||
'GPU Status': {
|
||||
os: 'Darwin',
|
||||
cpus: 'Intel(R) Core(TM) i7-7700HQ CPU @ 2.80GHz (8 x 2800)',
|
||||
memory: '16.00GB',
|
||||
vmHint: '0%',
|
||||
processArgs: '',
|
||||
screenReader: 'no',
|
||||
gpuStatus: {
|
||||
'2d_canvas': 'enabled',
|
||||
'checker_imaging': 'disabled_off'
|
||||
}
|
||||
|
@ -65,8 +71,13 @@ OS version: undefined
|
|||
|
||||
|Item|Value|
|
||||
|---|---|
|
||||
|CPUs|Intel(R) Core(TM) i7-7700HQ CPU @ 2.80GHz (8 x 2800)|
|
||||
|GPU Status|2d_canvas: enabled<br>checker_imaging: disabled_off|
|
||||
|
||||
|Load (avg)|undefined|
|
||||
|Memory (System)|16.00GB|
|
||||
|Process Argv||
|
||||
|Screen Reader|no|
|
||||
|VM|0%|
|
||||
</details>Extensions: none
|
||||
<!-- generated by issue reporter -->`);
|
||||
});
|
||||
|
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче