Update prettier config to format trailing commas to `all` (Default) (#4457)
fix #2159 Long standing backlog issue that was split from an upgrade of prettier as it created too much diff. Finally doing it
This commit is contained in:
Родитель
9a65543efd
Коммит
a336c93709
|
@ -0,0 +1,26 @@
|
|||
---
|
||||
# Change versionKind to one of: internal, fix, dependencies, feature, deprecation, breaking
|
||||
changeKind: internal
|
||||
packages:
|
||||
- "@typespec/bundler"
|
||||
- "@typespec/compiler"
|
||||
- "@typespec/eslint-plugin"
|
||||
- "@typespec/html-program-viewer"
|
||||
- "@typespec/http-server-csharp"
|
||||
- "@typespec/http-server-javascript"
|
||||
- "@typespec/http"
|
||||
- "@typespec/internal-build-utils"
|
||||
- "@typespec/json-schema"
|
||||
- "@typespec/library-linter"
|
||||
- "@typespec/openapi"
|
||||
- "@typespec/openapi3"
|
||||
- "@typespec/playground"
|
||||
- "@typespec/protobuf"
|
||||
- "@typespec/rest"
|
||||
- typespec-vs
|
||||
- typespec-vscode
|
||||
- "@typespec/versioning"
|
||||
- "@typespec/xml"
|
||||
---
|
||||
|
||||
Update prettier config to format trailing commas to `all` (Default)
|
|
@ -1,6 +1,5 @@
|
|||
{
|
||||
"arrowParens": "always",
|
||||
"trailingComma": "es5",
|
||||
"bracketSpacing": true,
|
||||
"endOfLine": "lf",
|
||||
"printWidth": 100,
|
||||
|
|
|
@ -67,7 +67,7 @@ describe("CodeFix: change-identifier", () => {
|
|||
(node) => {
|
||||
strictEqual(node.kind, SyntaxKind.Identifier);
|
||||
return createChangeIdentifierCodeFix(node, "int32");
|
||||
}
|
||||
},
|
||||
).toChangeTo(`
|
||||
model Foo {
|
||||
a: int32;
|
||||
|
|
|
@ -107,7 +107,7 @@ function getRoutes(): [Route, readonly Diagnostic] {
|
|||
createDiagnostic(program, {
|
||||
code: "no-array",
|
||||
target: diagnosticTarget,
|
||||
})
|
||||
}),
|
||||
);
|
||||
const result = diagnostic.pipe(getParameters()); // to pipe diagnostics returned by `getParameters`
|
||||
return diagnostics.wrap(routes);
|
||||
|
@ -125,7 +125,7 @@ function getRoutes(): [Route, readonly Diagnostic] {
|
|||
createDiagnostic(program, {
|
||||
code: "no-array",
|
||||
target: diagnosticTarget,
|
||||
})
|
||||
}),
|
||||
);
|
||||
return [routes, diagnostics];
|
||||
}
|
||||
|
|
|
@ -150,7 +150,7 @@ class MyCodeEmitter extends CodeTypeEmitter {
|
|||
|
||||
modelPropertyLiteral(property: ModelProperty): EmitterOutput<string> {
|
||||
return code`a property named ${property.name} and a type of ${this.emitter.emitType(
|
||||
property.type
|
||||
property.type,
|
||||
)}`;
|
||||
}
|
||||
|
||||
|
@ -243,7 +243,7 @@ class MyCodeEmitter extends CodeTypeEmitter {
|
|||
targetDeclaration: Declaration<string>,
|
||||
pathUp: Scope<string>[],
|
||||
pathDown: Scope<string>[],
|
||||
commonScope: Scope<string> | null
|
||||
commonScope: Scope<string> | null,
|
||||
): string | EmitEntity<string> {
|
||||
const segments = pathDown.map((s) => s.name);
|
||||
segments.push(targetDeclaration.name);
|
||||
|
|
|
@ -196,7 +196,7 @@ await tester
|
|||
.expect(
|
||||
`
|
||||
model Foo {}
|
||||
`
|
||||
`,
|
||||
)
|
||||
.applyCodeFix("add-model-suffix").toEqual(`
|
||||
model FooModel {}
|
||||
|
|
|
@ -81,7 +81,7 @@ async function testBasicLatest(packages) {
|
|||
["compile", ".", "--emit", "@typespec/openapi3"],
|
||||
{
|
||||
cwd: basicLatestDir,
|
||||
}
|
||||
},
|
||||
);
|
||||
console.log("Completed tsp compile .");
|
||||
|
||||
|
@ -121,7 +121,7 @@ async function testBasicCurrentTgz(packages) {
|
|||
["compile", ".", "--emit", "@typespec/openapi3"],
|
||||
{
|
||||
cwd: basicCurrentDir,
|
||||
}
|
||||
},
|
||||
);
|
||||
console.log("Completed tsp compile .");
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ async function findMarkdownFiles(folder) {
|
|||
} else {
|
||||
return [];
|
||||
}
|
||||
})
|
||||
}),
|
||||
)
|
||||
).flat();
|
||||
}
|
||||
|
@ -54,7 +54,7 @@ async function main() {
|
|||
"---",
|
||||
"title: xyz",
|
||||
"---",
|
||||
].join("\n")
|
||||
].join("\n"),
|
||||
);
|
||||
process.exit(1);
|
||||
} else {
|
||||
|
|
|
@ -18,13 +18,13 @@ if (proc.stdout || proc.stderr) {
|
|||
if (process.argv[2] !== "publish") {
|
||||
console.error(
|
||||
`ERROR: Files above were changed during PR validation, but not included in the PR.
|
||||
Include any automated changes such as sample output, spec.html, and ThirdPartyNotices.txt in your PR.`
|
||||
Include any automated changes such as sample output, spec.html, and ThirdPartyNotices.txt in your PR.`,
|
||||
);
|
||||
} else {
|
||||
console.error(
|
||||
`ERROR: Changes have been made since this publish PR was prepared.
|
||||
In the future, remember to alert coworkers to avoid merging additional changes while publish PRs are in progress.
|
||||
Close this PR, run prepare-publish again.`
|
||||
Close this PR, run prepare-publish again.`,
|
||||
);
|
||||
}
|
||||
process.exit(1);
|
||||
|
|
|
@ -5,7 +5,7 @@ export function defineConfig(config: RepoConfig) {
|
|||
}
|
||||
|
||||
export function defineLabels<const T extends string>(
|
||||
labels: Record<T, { color: string; description: string }>
|
||||
labels: Record<T, { color: string; description: string }>,
|
||||
) {
|
||||
return labels;
|
||||
}
|
||||
|
|
|
@ -112,11 +112,11 @@ function prettyLabel(label: Label, padEnd: number = 0) {
|
|||
async function syncGithubLabels(config: RepoConfig, labels: Label[], options: ActionOptions = {}) {
|
||||
if (!options.dryRun && !process.env.GITHUB_TOKEN && !options.check) {
|
||||
throw new Error(
|
||||
"GITHUB_TOKEN environment variable is required when not running in dry-run mode or check mode."
|
||||
"GITHUB_TOKEN environment variable is required when not running in dry-run mode or check mode.",
|
||||
);
|
||||
}
|
||||
const octokit = new Octokit(
|
||||
process.env.GITHUB_TOKEN ? { auth: `token ${process.env.GITHUB_TOKEN}` } : {}
|
||||
process.env.GITHUB_TOKEN ? { auth: `token ${process.env.GITHUB_TOKEN}` } : {},
|
||||
);
|
||||
|
||||
const existingLabels = await fetchAllLabels(octokit, config.repo);
|
||||
|
@ -161,8 +161,8 @@ async function checkLabelsToDelete(labels: GithubLabel[]) {
|
|||
if (label.issues.totalCount > 0) {
|
||||
console.log(
|
||||
pc.red(
|
||||
`Label ${label.name} has ${label.issues.totalCount} issues assigned to it, make sure to rename the label manually first to not lose assignment.`
|
||||
)
|
||||
`Label ${label.name} has ${label.issues.totalCount} issues assigned to it, make sure to rename the label manually first to not lose assignment.`,
|
||||
),
|
||||
);
|
||||
hasError = true;
|
||||
}
|
||||
|
@ -203,7 +203,7 @@ async function fetchAllLabels(octokit: Octokit, repo: RepoConfig["repo"]): Promi
|
|||
{
|
||||
owner: repo.owner,
|
||||
repo: repo.repo,
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
return repository.labels.nodes;
|
||||
|
@ -224,13 +224,13 @@ async function createLabels(
|
|||
config: RepoConfig,
|
||||
octokit: Octokit,
|
||||
labels: Label[],
|
||||
options: ActionOptions
|
||||
options: ActionOptions,
|
||||
) {
|
||||
for (const label of labels) {
|
||||
await doAction(
|
||||
() => octokit.rest.issues.createLabel({ ...config.repo, ...label }),
|
||||
`Created label ${label.name}, color: ${label.color}, description: ${label.description}`,
|
||||
options
|
||||
options,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -238,13 +238,13 @@ async function updateLabels(
|
|||
config: RepoConfig,
|
||||
octokit: Octokit,
|
||||
labels: Label[],
|
||||
options: ActionOptions
|
||||
options: ActionOptions,
|
||||
) {
|
||||
for (const label of labels) {
|
||||
await doAction(
|
||||
() => octokit.rest.issues.updateLabel({ ...config.repo, ...label }),
|
||||
`Updated label ${label.name}, color: ${label.color}, description: ${label.description}`,
|
||||
options
|
||||
options,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -252,7 +252,7 @@ async function deleteLabels(
|
|||
config: RepoConfig,
|
||||
octokit: Octokit,
|
||||
labels: GithubLabel[],
|
||||
options: ActionOptions
|
||||
options: ActionOptions,
|
||||
) {
|
||||
checkLabelsToDelete(labels);
|
||||
|
||||
|
@ -260,7 +260,7 @@ async function deleteLabels(
|
|||
await doAction(
|
||||
() => octokit.rest.issues.deleteLabel({ ...config.repo, name: label.name }),
|
||||
`Deleted label ${label.name}`,
|
||||
options
|
||||
options,
|
||||
);
|
||||
console.log(`Deleted label ${label.name}`);
|
||||
}
|
||||
|
@ -302,8 +302,8 @@ async function updateContributingFile(labels: LabelsResolvedConfig, options: Act
|
|||
} else {
|
||||
console.log(
|
||||
pc.red(
|
||||
"CONTRIBUTING.md file label section is not up to date, run pnpm sync-labels to update it"
|
||||
)
|
||||
"CONTRIBUTING.md file label section is not up to date, run pnpm sync-labels to update it",
|
||||
),
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
@ -311,7 +311,7 @@ async function updateContributingFile(labels: LabelsResolvedConfig, options: Act
|
|||
await doAction(
|
||||
() => writeFile(contributingFile, formatted),
|
||||
"Updated contributing file",
|
||||
options
|
||||
options,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,8 +21,8 @@ export async function syncFile(filename: string, newContent: string, options: Ch
|
|||
} else {
|
||||
console.error(
|
||||
pc.red(
|
||||
`${filename} file label section is not up to date, run pnpm sync-labels to update it`
|
||||
)
|
||||
`${filename} file label section is not up to date, run pnpm sync-labels to update it`,
|
||||
),
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ export interface ExecResult {
|
|||
export function execAsync(
|
||||
cmd: string,
|
||||
args: string[],
|
||||
opts: SpawnOptions = {}
|
||||
opts: SpawnOptions = {},
|
||||
): Promise<ExecResult> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const child = spawn(cmd, args, opts);
|
||||
|
|
|
@ -2,14 +2,14 @@ import { execAsync, type ExecResult } from "./exec-async.js";
|
|||
|
||||
export async function listChangedFilesSince(
|
||||
ref: string,
|
||||
{ repositoryPath }: { repositoryPath: string }
|
||||
{ repositoryPath }: { repositoryPath: string },
|
||||
) {
|
||||
return splitStdoutLines(await execGit(["diff", "--name-only", `${ref}...`], { repositoryPath }));
|
||||
}
|
||||
|
||||
async function execGit(
|
||||
args: string[],
|
||||
{ repositoryPath }: { repositoryPath: string }
|
||||
{ repositoryPath }: { repositoryPath: string },
|
||||
): Promise<ExecResult> {
|
||||
const result = await execAsync("git", args, { cwd: repositoryPath });
|
||||
|
||||
|
|
|
@ -43,7 +43,7 @@ export function run(command, args, options) {
|
|||
} else if (options.throwOnNonZeroExit && proc.status !== undefined && proc.status !== 0) {
|
||||
throw new CommandFailedError(
|
||||
`Command \`${command} ${args.join(" ")}\` failed with exit code ${proc.status}`,
|
||||
proc
|
||||
proc,
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -147,5 +147,5 @@ export default tsEslint.config(
|
|||
],
|
||||
},
|
||||
...TypeSpecCommonEslintConfigs,
|
||||
...getTypeScriptProjectRules(import.meta.dirname)
|
||||
...getTypeScriptProjectRules(import.meta.dirname),
|
||||
);
|
||||
|
|
|
@ -41,7 +41,7 @@ export async function getPackageVersion(repoRoot: string, pkgName: string) {
|
|||
const project = projects.find((x) => x.manifest.name === pkgName);
|
||||
if (project === undefined) {
|
||||
throw new Error(
|
||||
`Cannot get version for package: "${pkgName}", pnpm couldn't find a package with that name in the workspace`
|
||||
`Cannot get version for package: "${pkgName}", pnpm couldn't find a package with that name in the workspace`,
|
||||
);
|
||||
}
|
||||
const version = parse(project.manifest.version);
|
||||
|
|
|
@ -40,7 +40,7 @@ export class TypeSpecBundledPackageUploader {
|
|||
key,
|
||||
this.#container.url + "/" + normalizePath(join(manifest.name, manifest.version, value)),
|
||||
];
|
||||
})
|
||||
}),
|
||||
);
|
||||
const created = await this.#uploadManifest(manifest);
|
||||
if (!created) {
|
||||
|
@ -78,7 +78,7 @@ export class TypeSpecBundledPackageUploader {
|
|||
async #uploadManifest(manifest: BundleManifest) {
|
||||
try {
|
||||
const blob = this.#container.getBlockBlobClient(
|
||||
normalizePath(join(manifest.name, manifest.version, "manifest.json"))
|
||||
normalizePath(join(manifest.name, manifest.version, "manifest.json")),
|
||||
);
|
||||
const content = JSON.stringify(manifest);
|
||||
await blob.upload(content, content.length, {
|
||||
|
@ -100,7 +100,7 @@ export class TypeSpecBundledPackageUploader {
|
|||
|
||||
async #uploadJsFile(pkgName: string, version: string, file: TypeSpecBundleFile) {
|
||||
const blob = this.#container.getBlockBlobClient(
|
||||
normalizePath(join(pkgName, version, file.filename))
|
||||
normalizePath(join(pkgName, version, file.filename)),
|
||||
);
|
||||
await blob.uploadData(Buffer.from(file.content), {
|
||||
blobHTTPHeaders: {
|
||||
|
@ -115,11 +115,11 @@ export class TypeSpecBundledPackageUploader {
|
|||
|
||||
function getCoverageContainer(
|
||||
storageAccountName: string,
|
||||
credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential
|
||||
credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,
|
||||
): ContainerClient {
|
||||
const blobSvc = new BlobServiceClient(
|
||||
`https://${storageAccountName}.blob.core.windows.net`,
|
||||
credential
|
||||
credential,
|
||||
);
|
||||
const containerClient = blobSvc.getContainerClient(pkgsContainer);
|
||||
return containerClient;
|
||||
|
|
|
@ -73,7 +73,7 @@ export async function createTypeSpecBundle(libraryPath: string): Promise<TypeSpe
|
|||
|
||||
export async function watchTypeSpecBundle(
|
||||
libraryPath: string,
|
||||
onBundle: (bundle: TypeSpecBundle) => void
|
||||
onBundle: (bundle: TypeSpecBundle) => void,
|
||||
) {
|
||||
const definition = await resolveTypeSpecBundleDefinition(libraryPath);
|
||||
const rollupOptions = await createRollupConfig(definition);
|
||||
|
@ -115,14 +115,14 @@ export async function bundleTypeSpecLibrary(libraryPath: string, outputDir: stri
|
|||
}
|
||||
|
||||
async function resolveTypeSpecBundleDefinition(
|
||||
libraryPath: string
|
||||
libraryPath: string,
|
||||
): Promise<TypeSpecBundleDefinition> {
|
||||
libraryPath = normalizePath(await realpath(libraryPath));
|
||||
const pkg = await readLibraryPackageJson(libraryPath);
|
||||
|
||||
const exports = pkg.exports
|
||||
? Object.fromEntries(
|
||||
Object.entries(pkg.exports).filter(([k, v]) => k !== "." && k !== "./testing")
|
||||
Object.entries(pkg.exports).filter(([k, v]) => k !== "." && k !== "./testing"),
|
||||
)
|
||||
: {};
|
||||
|
||||
|
@ -165,7 +165,7 @@ async function createRollupConfig(definition: TypeSpecBundleDefinition): Promise
|
|||
key.replace("./", ""),
|
||||
normalizePath(resolve(libraryPath, getExportEntryPoint(value))),
|
||||
];
|
||||
})
|
||||
}),
|
||||
);
|
||||
return {
|
||||
input: {
|
||||
|
@ -200,7 +200,7 @@ async function createRollupConfig(definition: TypeSpecBundleDefinition): Promise
|
|||
|
||||
async function generateTypeSpecBundle(
|
||||
definition: TypeSpecBundleDefinition,
|
||||
bundle: RollupBuild
|
||||
bundle: RollupBuild,
|
||||
): Promise<TypeSpecBundle> {
|
||||
const { output } = await bundle.generate({
|
||||
dir: "virtual",
|
||||
|
|
|
@ -45,9 +45,9 @@ async function main() {
|
|||
const resolvedRoot = resolvePath(process.cwd(), args.entrypoint);
|
||||
await bundleTypeSpecLibrary(
|
||||
resolvedRoot,
|
||||
args["output-dir"] ?? resolvePath(resolvedRoot, "out/browser")
|
||||
args["output-dir"] ?? resolvePath(resolvedRoot, "out/browser"),
|
||||
);
|
||||
}
|
||||
},
|
||||
)
|
||||
.version(typespecVersion)
|
||||
.demandCommand(1, "You must use one of the supported commands.").argv;
|
||||
|
|
|
@ -106,7 +106,7 @@ export function typespecBundlePlugin(options: TypeSpecBundlePluginOptions): Plug
|
|||
const importMapTag = `<script type="importmap">\n${JSON.stringify(
|
||||
createImportMap(options.folderName, definitions),
|
||||
null,
|
||||
2
|
||||
2,
|
||||
)}\n</script>`;
|
||||
return html.replace("<html", importMapTag + "\n<html");
|
||||
},
|
||||
|
@ -116,7 +116,7 @@ export function typespecBundlePlugin(options: TypeSpecBundlePluginOptions): Plug
|
|||
|
||||
function createImportMap(
|
||||
folderName: string,
|
||||
definitions: Record<string, TypeSpecBundleDefinition>
|
||||
definitions: Record<string, TypeSpecBundleDefinition>,
|
||||
) {
|
||||
const imports: Record<string, string> = {};
|
||||
for (const [library, definition] of Object.entries(definitions)) {
|
||||
|
@ -138,7 +138,7 @@ async function bundleLibrary(projectRoot: string, name: string) {
|
|||
async function watchBundleLibrary(
|
||||
projectRoot: string,
|
||||
name: string,
|
||||
onChange: (bundle: TypeSpecBundle) => void
|
||||
onChange: (bundle: TypeSpecBundle) => void,
|
||||
) {
|
||||
return await watchTypeSpecBundle(resolve(projectRoot, "node_modules", name), onChange);
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@ import type { InitTemplate } from "../src/init/init-template.js";
|
|||
import { localDir, packageRoot } from "./helpers.js";
|
||||
|
||||
const pkgJson = JSON.parse(
|
||||
(await readFile(resolve(packageRoot, "package.json"))).toString("utf-8")
|
||||
(await readFile(resolve(packageRoot, "package.json"))).toString("utf-8"),
|
||||
);
|
||||
const minCompilerVersion = pkgJson.version;
|
||||
|
||||
|
@ -52,5 +52,5 @@ const distDir = resolve(packageRoot, "dist");
|
|||
await mkdir(distDir, { recursive: true });
|
||||
await writeFile(
|
||||
resolve(packageRoot, "templates", "scaffolding.json"),
|
||||
JSON.stringify(builtInTemplates, null, 2)
|
||||
JSON.stringify(builtInTemplates, null, 2),
|
||||
);
|
||||
|
|
|
@ -20,6 +20,6 @@ for (const [name, content] of Object.entries(files)) {
|
|||
|
||||
await NodeHost.writeFile(
|
||||
resolvePath(outDir, name),
|
||||
await format(updatedContent, { ...prettierConfig, parser: "typescript" })
|
||||
await format(updatedContent, { ...prettierConfig, parser: "typescript" }),
|
||||
);
|
||||
}
|
||||
|
|
|
@ -53,7 +53,7 @@ export type EncodeDecorator = (
|
|||
context: DecoratorContext,
|
||||
target: Scalar | ModelProperty,
|
||||
encodingOrEncodeAs: Scalar | string | EnumValue,
|
||||
encodedAs?: Scalar
|
||||
encodedAs?: Scalar,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -71,7 +71,7 @@ export type DocDecorator = (
|
|||
context: DecoratorContext,
|
||||
target: Type,
|
||||
doc: string,
|
||||
formatArgs?: Type
|
||||
formatArgs?: Type,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -92,7 +92,7 @@ export type WithUpdateablePropertiesDecorator = (context: DecoratorContext, targ
|
|||
export type WithoutOmittedPropertiesDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Model,
|
||||
omit: Type
|
||||
omit: Type,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -103,7 +103,7 @@ export type WithoutOmittedPropertiesDecorator = (
|
|||
export type WithPickedPropertiesDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Model,
|
||||
pick: Type
|
||||
pick: Type,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -119,7 +119,7 @@ export type WithoutDefaultValuesDecorator = (context: DecoratorContext, target:
|
|||
export type WithDefaultKeyVisibilityDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Model,
|
||||
visibility: string
|
||||
visibility: string,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -148,7 +148,7 @@ export type SummaryDecorator = (context: DecoratorContext, target: Type, summary
|
|||
export type ReturnsDocDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Operation,
|
||||
doc: string
|
||||
doc: string,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -165,7 +165,7 @@ export type ReturnsDocDecorator = (
|
|||
export type ErrorsDocDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Operation,
|
||||
doc: string
|
||||
doc: string,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -186,7 +186,7 @@ export type ErrorsDocDecorator = (
|
|||
export type DeprecatedDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Type,
|
||||
message: string
|
||||
message: string,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -212,7 +212,7 @@ export type DeprecatedDecorator = (
|
|||
export type ServiceDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Namespace,
|
||||
options?: Type
|
||||
options?: Type,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -244,7 +244,7 @@ export type ErrorDecorator = (context: DecoratorContext, target: Model) => void;
|
|||
export type FormatDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Scalar | ModelProperty,
|
||||
format: string
|
||||
format: string,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -269,7 +269,7 @@ export type PatternDecorator = (
|
|||
context: DecoratorContext,
|
||||
target: Scalar | ModelProperty,
|
||||
pattern: string,
|
||||
validationMessage?: string
|
||||
validationMessage?: string,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -285,7 +285,7 @@ export type PatternDecorator = (
|
|||
export type MinLengthDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Scalar | ModelProperty,
|
||||
value: Numeric
|
||||
value: Numeric,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -301,7 +301,7 @@ export type MinLengthDecorator = (
|
|||
export type MaxLengthDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Scalar | ModelProperty,
|
||||
value: Numeric
|
||||
value: Numeric,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -317,7 +317,7 @@ export type MaxLengthDecorator = (
|
|||
export type MinItemsDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Type | ModelProperty,
|
||||
value: Numeric
|
||||
value: Numeric,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -333,7 +333,7 @@ export type MinItemsDecorator = (
|
|||
export type MaxItemsDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Type | ModelProperty,
|
||||
value: Numeric
|
||||
value: Numeric,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -349,7 +349,7 @@ export type MaxItemsDecorator = (
|
|||
export type MinValueDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Scalar | ModelProperty,
|
||||
value: Numeric
|
||||
value: Numeric,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -365,7 +365,7 @@ export type MinValueDecorator = (
|
|||
export type MaxValueDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Scalar | ModelProperty,
|
||||
value: Numeric
|
||||
value: Numeric,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -382,7 +382,7 @@ export type MaxValueDecorator = (
|
|||
export type MinValueExclusiveDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Scalar | ModelProperty,
|
||||
value: Numeric
|
||||
value: Numeric,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -399,7 +399,7 @@ export type MinValueExclusiveDecorator = (
|
|||
export type MaxValueExclusiveDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Scalar | ModelProperty,
|
||||
value: Numeric
|
||||
value: Numeric,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -422,7 +422,7 @@ export type SecretDecorator = (context: DecoratorContext, target: Scalar | Model
|
|||
export type ListDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Operation,
|
||||
listedType?: Model
|
||||
listedType?: Model,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -433,7 +433,7 @@ export type ListDecorator = (
|
|||
export type TagDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Namespace | Interface | Operation,
|
||||
tag: string
|
||||
tag: string,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -454,7 +454,7 @@ export type FriendlyNameDecorator = (
|
|||
context: DecoratorContext,
|
||||
target: Type,
|
||||
name: string,
|
||||
formatArgs?: Type
|
||||
formatArgs?: Type,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -475,7 +475,7 @@ export type FriendlyNameDecorator = (
|
|||
export type KnownValuesDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Scalar | ModelProperty,
|
||||
values: Enum
|
||||
values: Enum,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -492,7 +492,7 @@ export type KnownValuesDecorator = (
|
|||
export type KeyDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: ModelProperty,
|
||||
altName?: string
|
||||
altName?: string,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -511,7 +511,7 @@ export type KeyDecorator = (
|
|||
export type OverloadDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Operation,
|
||||
overloadbase: Operation
|
||||
overloadbase: Operation,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -533,7 +533,7 @@ export type ProjectedNameDecorator = (
|
|||
context: DecoratorContext,
|
||||
target: Type,
|
||||
targetName: string,
|
||||
projectedName: string
|
||||
projectedName: string,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -560,7 +560,7 @@ export type EncodedNameDecorator = (
|
|||
context: DecoratorContext,
|
||||
target: Type,
|
||||
mimeType: string,
|
||||
name: string
|
||||
name: string,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -587,7 +587,7 @@ export type EncodedNameDecorator = (
|
|||
export type DiscriminatorDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Model | Union,
|
||||
propertyName: string
|
||||
propertyName: string,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -608,7 +608,7 @@ export type ExampleDecorator = (
|
|||
context: DecoratorContext,
|
||||
target: Model | Enum | Scalar | Union | ModelProperty | UnionVariant,
|
||||
example: unknown,
|
||||
options?: ExampleOptions
|
||||
options?: ExampleOptions,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -626,7 +626,7 @@ export type OpExampleDecorator = (
|
|||
context: DecoratorContext,
|
||||
target: Operation,
|
||||
example: OperationExample,
|
||||
options?: ExampleOptions
|
||||
options?: ExampleOptions,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
@ -725,7 +725,7 @@ export type InspectTypeDecorator = (context: DecoratorContext, target: Type, tex
|
|||
export type InspectTypeNameDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Type,
|
||||
text: string
|
||||
text: string,
|
||||
) => void;
|
||||
|
||||
/**
|
||||
|
|
|
@ -23,7 +23,7 @@ const MAX_UNICODE_CODEPOINT = 0x10ffff;
|
|||
|
||||
const disallowedRegex = new RegExp(
|
||||
`[${disallowedProperties.map((p) => `\\p{${p}}`).join("")}]`,
|
||||
"u"
|
||||
"u",
|
||||
);
|
||||
|
||||
const map = computeMap();
|
||||
|
|
|
@ -17,7 +17,7 @@ export interface ExpandConfigOptions {
|
|||
|
||||
export function expandConfigVariables(
|
||||
config: TypeSpecConfig,
|
||||
expandOptions: ExpandConfigOptions
|
||||
expandOptions: ExpandConfigOptions,
|
||||
): [TypeSpecConfig, readonly Diagnostic[]] {
|
||||
const diagnostics = createDiagnosticCollector();
|
||||
const builtInVars = {
|
||||
|
@ -29,11 +29,11 @@ export function expandConfigVariables(
|
|||
...builtInVars,
|
||||
...diagnostics.pipe(resolveArgs(config.parameters, expandOptions.args, builtInVars)),
|
||||
env: diagnostics.pipe(
|
||||
resolveArgs(config.environmentVariables, expandOptions.env, builtInVars, true)
|
||||
resolveArgs(config.environmentVariables, expandOptions.env, builtInVars, true),
|
||||
),
|
||||
};
|
||||
const outputDir = diagnostics.pipe(
|
||||
resolveValue(expandOptions.outputDir ?? config.outputDir, commonVars)
|
||||
resolveValue(expandOptions.outputDir ?? config.outputDir, commonVars),
|
||||
);
|
||||
|
||||
const result = { ...config, outputDir };
|
||||
|
@ -53,7 +53,7 @@ function resolveArgs(
|
|||
declarations: Record<string, ConfigParameter | ConfigEnvironmentVariable> | undefined,
|
||||
args: Record<string, string | undefined> | undefined,
|
||||
predefinedVariables: Record<string, string | Record<string, string>>,
|
||||
allowUnspecified = false
|
||||
allowUnspecified = false,
|
||||
): [Record<string, string>, readonly Diagnostic[]] {
|
||||
const unmatchedArgs = new Set(Object.keys(args ?? {}));
|
||||
const result: Record<string, string> = {};
|
||||
|
@ -61,7 +61,7 @@ function resolveArgs(
|
|||
for (const [name, definition] of Object.entries(declarations)) {
|
||||
unmatchedArgs.delete(name);
|
||||
result[name] = ignoreDiagnostics(
|
||||
resolveValue(args?.[name] ?? definition.default, predefinedVariables)
|
||||
resolveValue(args?.[name] ?? definition.default, predefinedVariables),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -83,7 +83,7 @@ const VariableInterpolationRegex = /{([a-zA-Z-_.]+)}/g;
|
|||
|
||||
function resolveValue(
|
||||
value: string,
|
||||
predefinedVariables: Record<string, string | Record<string, string>>
|
||||
predefinedVariables: Record<string, string | Record<string, string>>,
|
||||
): [string, readonly Diagnostic[]] {
|
||||
const [result, diagnostics] = resolveValues({ value }, predefinedVariables);
|
||||
return [result.value, diagnostics];
|
||||
|
@ -91,7 +91,7 @@ function resolveValue(
|
|||
|
||||
export function resolveValues<T extends Record<string, unknown>>(
|
||||
values: T,
|
||||
predefinedVariables: Record<string, string | Record<string, string>> = {}
|
||||
predefinedVariables: Record<string, string | Record<string, string>> = {},
|
||||
): [T, readonly Diagnostic[]] {
|
||||
const diagnostics: Diagnostic[] = [];
|
||||
const resolvedValues: Record<string, unknown> = {};
|
||||
|
@ -119,7 +119,7 @@ export function resolveValues<T extends Record<string, unknown>>(
|
|||
code: "config-circular-variable",
|
||||
target: NoTarget,
|
||||
format: { name: expression },
|
||||
})
|
||||
}),
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
|
|
|
@ -25,14 +25,14 @@ export const defaultConfig = deepFreeze({
|
|||
export async function findTypeSpecConfigPath(
|
||||
host: CompilerHost,
|
||||
path: string,
|
||||
lookup: boolean = true
|
||||
lookup: boolean = true,
|
||||
): Promise<string | undefined> {
|
||||
// if the path is a file, return immediately
|
||||
const stats = await doIO(
|
||||
() => host.stat(path),
|
||||
path,
|
||||
() => {},
|
||||
{ allowFileNotFound: true }
|
||||
{ allowFileNotFound: true },
|
||||
);
|
||||
if (!stats) {
|
||||
return undefined;
|
||||
|
@ -49,7 +49,7 @@ export async function findTypeSpecConfigPath(
|
|||
() => host.stat(current),
|
||||
current,
|
||||
() => {},
|
||||
{ allowFileNotFound: true }
|
||||
{ allowFileNotFound: true },
|
||||
);
|
||||
if (stats?.isFile()) {
|
||||
return current;
|
||||
|
@ -83,7 +83,7 @@ export async function loadTypeSpecConfigForPath(
|
|||
host: CompilerHost,
|
||||
path: string,
|
||||
errorIfNotFound: boolean = false,
|
||||
lookup: boolean = true
|
||||
lookup: boolean = true,
|
||||
): Promise<TypeSpecConfig> {
|
||||
const typespecConfigPath = await findTypeSpecConfigPath(host, path, lookup);
|
||||
if (typespecConfigPath === undefined) {
|
||||
|
@ -97,7 +97,7 @@ export async function loadTypeSpecConfigForPath(
|
|||
path: path,
|
||||
},
|
||||
target: NoTarget,
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
return tsConfig;
|
||||
|
@ -113,7 +113,7 @@ export async function loadTypeSpecConfigForPath(
|
|||
message: "`cadl-project.yaml` is deprecated. Please rename to `tspconfig.yaml`.",
|
||||
},
|
||||
target: NoTarget,
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
return tsConfig;
|
||||
|
@ -124,7 +124,7 @@ export async function loadTypeSpecConfigForPath(
|
|||
*/
|
||||
export async function loadTypeSpecConfigFile(
|
||||
host: CompilerHost,
|
||||
filePath: string
|
||||
filePath: string,
|
||||
): Promise<TypeSpecConfig> {
|
||||
const config = await loadConfigFile(host, filePath, parseYaml);
|
||||
if (config.diagnostics.length === 0 && config.extends) {
|
||||
|
@ -140,7 +140,7 @@ export async function loadTypeSpecConfigFile(
|
|||
message: "`cadl-project.yaml` is deprecated. Please rename to `tspconfig.yaml`.",
|
||||
},
|
||||
target: NoTarget,
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -168,13 +168,13 @@ const configValidator = createJSONSchemaValidator(TypeSpecConfigJsonSchema);
|
|||
async function searchConfigFile(
|
||||
host: CompilerHost,
|
||||
path: string,
|
||||
filename: string
|
||||
filename: string,
|
||||
): Promise<string | undefined> {
|
||||
const pkgPath = joinPaths(path, filename);
|
||||
const stat = await doIO(
|
||||
() => host.stat(pkgPath),
|
||||
pkgPath,
|
||||
() => {}
|
||||
() => {},
|
||||
);
|
||||
|
||||
return stat?.isFile() === true ? pkgPath : undefined;
|
||||
|
@ -183,7 +183,7 @@ async function searchConfigFile(
|
|||
async function loadConfigFile(
|
||||
host: CompilerHost,
|
||||
filename: string,
|
||||
loadData: (content: SourceFile) => [YamlScript, readonly Diagnostic[]]
|
||||
loadData: (content: SourceFile) => [YamlScript, readonly Diagnostic[]],
|
||||
): Promise<TypeSpecConfig> {
|
||||
let diagnostics: Diagnostic[] = [];
|
||||
const reportDiagnostic = (d: Diagnostic) => diagnostics.push(d);
|
||||
|
@ -235,7 +235,7 @@ export function validateConfigPathsAbsolute(config: TypeSpecConfig): readonly Di
|
|||
}
|
||||
const diagnostic = validatePathAbsolute(
|
||||
value,
|
||||
config.file ? { file: config.file, path } : NoTarget
|
||||
config.file ? { file: config.file, path } : NoTarget,
|
||||
);
|
||||
if (diagnostic) {
|
||||
diagnostics.push(diagnostic);
|
||||
|
@ -251,7 +251,7 @@ export function validateConfigPathsAbsolute(config: TypeSpecConfig): readonly Di
|
|||
|
||||
function validatePathAbsolute(
|
||||
path: string,
|
||||
target: { file: YamlScript; path: string[] } | typeof NoTarget
|
||||
target: { file: YamlScript; path: string[] } | typeof NoTarget,
|
||||
): Diagnostic | undefined {
|
||||
if (path.startsWith(".") || !isPathAbsolute(path)) {
|
||||
return createDiagnostic({
|
||||
|
|
|
@ -41,7 +41,7 @@ export interface ConfigToOptionsOptions {
|
|||
*/
|
||||
export async function resolveCompilerOptions(
|
||||
host: CompilerHost,
|
||||
options: ResolveCompilerOptionsOptions
|
||||
options: ResolveCompilerOptionsOptions,
|
||||
): Promise<[CompilerOptions, readonly Diagnostic[]]> {
|
||||
const diagnostics = createDiagnosticCollector();
|
||||
|
||||
|
@ -49,7 +49,7 @@ export async function resolveCompilerOptions(
|
|||
host.stat,
|
||||
options.entrypoint,
|
||||
(diag) => diagnostics.add(diag),
|
||||
{ allowFileNotFound: true }
|
||||
{ allowFileNotFound: true },
|
||||
);
|
||||
const configPath =
|
||||
options.configPath ??
|
||||
|
@ -58,7 +58,7 @@ export async function resolveCompilerOptions(
|
|||
host,
|
||||
configPath,
|
||||
options.configPath !== undefined,
|
||||
options.configPath === undefined
|
||||
options.configPath === undefined,
|
||||
);
|
||||
config.diagnostics.forEach((x) => diagnostics.add(x));
|
||||
|
||||
|
@ -87,7 +87,7 @@ export function resolveOptionsFromConfig(config: TypeSpecConfig, options: Config
|
|||
outputDir: options.overrides?.outputDir,
|
||||
env: options.env ?? {},
|
||||
args: options.args,
|
||||
})
|
||||
}),
|
||||
);
|
||||
validateConfigPathsAbsolute(expandedConfig).forEach((x) => diagnostics.add(x));
|
||||
|
||||
|
@ -107,7 +107,7 @@ export function resolveOptionsFromConfig(config: TypeSpecConfig, options: Config
|
|||
|
||||
function mergeOptions(
|
||||
base: Record<string, Record<string, unknown>> | undefined,
|
||||
overrides: Record<string, Record<string, unknown>> | undefined
|
||||
overrides: Record<string, Record<string, unknown>> | undefined,
|
||||
): Record<string, EmitterOptions> {
|
||||
const configuredEmitters: Record<string, Record<string, unknown>> = deepClone(base ?? {});
|
||||
|
||||
|
|
|
@ -126,7 +126,7 @@ export function createBinder(program: Program): Binder {
|
|||
mutate(sourceFile).symbol = createSymbol(
|
||||
sourceFile,
|
||||
sourceFile.file.path,
|
||||
SymbolFlags.SourceFile
|
||||
SymbolFlags.SourceFile,
|
||||
);
|
||||
const rootNs = sourceFile.esmExports["namespace"];
|
||||
|
||||
|
@ -147,7 +147,7 @@ export function createBinder(program: Program): Binder {
|
|||
"decorator",
|
||||
decoratorName,
|
||||
decorator,
|
||||
sourceFile
|
||||
sourceFile,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -184,7 +184,7 @@ export function createBinder(program: Program): Binder {
|
|||
kind: "decorator" | "function",
|
||||
name: string,
|
||||
fn: (...args: any[]) => any,
|
||||
sourceFile: JsSourceFileNode
|
||||
sourceFile: JsSourceFileNode,
|
||||
) {
|
||||
let containerSymbol = sourceFile.symbol;
|
||||
|
||||
|
@ -232,7 +232,7 @@ export function createBinder(program: Program): Binder {
|
|||
sourceFile,
|
||||
"@" + name,
|
||||
SymbolFlags.Decorator | SymbolFlags.Implementation,
|
||||
containerSymbol
|
||||
containerSymbol,
|
||||
);
|
||||
} else {
|
||||
tracer.trace("function", `Bound function "${name}" in namespace "${nsParts.join(".")}".`);
|
||||
|
@ -240,7 +240,7 @@ export function createBinder(program: Program): Binder {
|
|||
sourceFile,
|
||||
name,
|
||||
SymbolFlags.Function | SymbolFlags.Implementation,
|
||||
containerSymbol
|
||||
containerSymbol,
|
||||
);
|
||||
}
|
||||
mutate(sym).value = fn;
|
||||
|
@ -459,7 +459,7 @@ export function createBinder(program: Program): Binder {
|
|||
}
|
||||
|
||||
function bindProjectionLambdaParameterDeclaration(
|
||||
node: ProjectionLambdaParameterDeclarationNode
|
||||
node: ProjectionLambdaParameterDeclarationNode,
|
||||
) {
|
||||
declareSymbol(node, SymbolFlags.FunctionParameter);
|
||||
}
|
||||
|
@ -658,7 +658,7 @@ export function createSymbol(
|
|||
name: string,
|
||||
flags: SymbolFlags,
|
||||
parent?: Sym,
|
||||
value?: any
|
||||
value?: any,
|
||||
): Sym {
|
||||
let exports: SymbolTable | undefined;
|
||||
if (flags & SymbolFlags.ExportContainer) {
|
||||
|
|
|
@ -249,7 +249,7 @@ export function isNonAsciiIdentifierCharacter(codePoint: number) {
|
|||
|
||||
export function codePointBefore(
|
||||
text: string,
|
||||
pos: number
|
||||
pos: number,
|
||||
): { char: number | undefined; size: number } {
|
||||
if (pos <= 0 || pos > text.length) {
|
||||
return { char: undefined, size: 0 };
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -27,7 +27,7 @@ export async function getCompilerOptions(
|
|||
entrypoint: string,
|
||||
cwd: string,
|
||||
args: CompileCliArgs,
|
||||
env: Record<string, string | undefined>
|
||||
env: Record<string, string | undefined>,
|
||||
): Promise<[CompilerOptions | undefined, readonly Diagnostic[]]> {
|
||||
const diagnostics = createDiagnosticCollector();
|
||||
|
||||
|
@ -54,7 +54,7 @@ export async function getCompilerOptions(
|
|||
emit: args.emit,
|
||||
options: cliOptions.options,
|
||||
}),
|
||||
})
|
||||
}),
|
||||
);
|
||||
if (args["no-emit"]) {
|
||||
resolvedOptions.noEmit = true;
|
||||
|
@ -64,7 +64,7 @@ export async function getCompilerOptions(
|
|||
omitUndefined({
|
||||
...resolvedOptions,
|
||||
miscOptions: cliOptions.miscOptions,
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -91,7 +91,7 @@ function resolveCliOptions(args: CompileCliArgs): {
|
|||
const optionParts = option.split("=");
|
||||
if (optionParts.length !== 2) {
|
||||
throw new Error(
|
||||
`The --option parameter value "${option}" must be in the format: <emitterName>.some-options=value`
|
||||
`The --option parameter value "${option}" must be in the format: <emitterName>.some-options=value`,
|
||||
);
|
||||
}
|
||||
let optionKeyParts = optionParts[0].split(".");
|
||||
|
|
|
@ -16,13 +16,13 @@ import { ProjectWatcher, WatchHost, createWatchHost, createWatcher } from "./wat
|
|||
|
||||
export async function compileAction(
|
||||
host: CliCompilerHost,
|
||||
args: CompileCliArgs & { path: string; pretty?: boolean }
|
||||
args: CompileCliArgs & { path: string; pretty?: boolean },
|
||||
) {
|
||||
const diagnostics: Diagnostic[] = [];
|
||||
const entrypoint = await resolveTypeSpecEntrypoint(
|
||||
host,
|
||||
resolvePath(process.cwd(), args.path),
|
||||
(diag) => diagnostics.push(diag)
|
||||
(diag) => diagnostics.push(diag),
|
||||
);
|
||||
if (entrypoint === undefined || diagnostics.length > 0) {
|
||||
logDiagnostics(diagnostics, host.logSink);
|
||||
|
@ -40,14 +40,14 @@ export async function compileAction(
|
|||
async function getCompilerOptionsOrExit(
|
||||
host: CompilerHost,
|
||||
entrypoint: string,
|
||||
args: CompileCliArgs
|
||||
args: CompileCliArgs,
|
||||
): Promise<CompilerOptions> {
|
||||
const [options, diagnostics] = await getCompilerOptions(
|
||||
host,
|
||||
entrypoint,
|
||||
process.cwd(),
|
||||
args,
|
||||
process.env
|
||||
process.env,
|
||||
);
|
||||
if (diagnostics.length > 0) {
|
||||
logDiagnostics(diagnostics, host.logSink);
|
||||
|
@ -63,7 +63,7 @@ async function getCompilerOptionsOrExit(
|
|||
async function compileOnce(
|
||||
host: CompilerHost,
|
||||
path: string,
|
||||
compilerOptions: CompilerOptions
|
||||
compilerOptions: CompilerOptions,
|
||||
): Promise<void> {
|
||||
try {
|
||||
const program = await compileProgram(host, resolve(path), compilerOptions);
|
||||
|
@ -79,7 +79,7 @@ async function compileOnce(
|
|||
function compileWatch(
|
||||
cliHost: CliCompilerHost,
|
||||
path: string,
|
||||
compilerOptions: CompilerOptions
|
||||
compilerOptions: CompilerOptions,
|
||||
): Promise<void> {
|
||||
const entrypoint = resolve(path);
|
||||
const watchHost: WatchHost = createWatchHost(cliHost);
|
||||
|
@ -142,7 +142,7 @@ function compileWatch(
|
|||
function logProgramResult(
|
||||
host: CompilerHost,
|
||||
program: Program,
|
||||
{ showTimestamp }: { showTimestamp?: boolean } = {}
|
||||
{ showTimestamp }: { showTimestamp?: boolean } = {},
|
||||
) {
|
||||
const log = (message?: any, ...optionalParams: any[]) => {
|
||||
const timestamp = showTimestamp ? `[${new Date().toLocaleTimeString()}] ` : "";
|
||||
|
@ -162,7 +162,7 @@ function logProgramResult(
|
|||
|
||||
if (program.emitters.length === 0 && !program.compilerOptions.noEmit) {
|
||||
log(
|
||||
"No emitter was configured, no output was generated. Use `--emit <emitterName>` to pick emitter or specify it in the TypeSpec config."
|
||||
"No emitter was configured, no output was generated. Use `--emit <emitterName>` to pick emitter or specify it in the TypeSpec config.",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@ export interface WatchHost extends CompilerHost {
|
|||
}
|
||||
|
||||
export function createWatcher(
|
||||
onFileChanged: (event: WatchEventType, name: string) => void
|
||||
onFileChanged: (event: WatchEventType, name: string) => void,
|
||||
): ProjectWatcher {
|
||||
const current = new Map<string, FSWatcher>();
|
||||
const dupFilter = createDupsFilter();
|
||||
|
@ -27,7 +27,7 @@ export function createWatcher(
|
|||
file,
|
||||
dupFilter((event: WatchEventType, _name: string | null) => {
|
||||
onFileChanged(event, file);
|
||||
})
|
||||
}),
|
||||
);
|
||||
return watcher;
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ export interface InitArgs {
|
|||
|
||||
export async function initAction(
|
||||
host: CliCompilerHost,
|
||||
args: InitArgs
|
||||
args: InitArgs,
|
||||
): Promise<readonly Diagnostic[]> {
|
||||
try {
|
||||
await initTypeSpecProject(host, process.cwd(), args);
|
||||
|
|
|
@ -53,7 +53,7 @@ export async function uninstallVSExtension(host: CliCompilerHost): Promise<reado
|
|||
|
||||
function getVsixInstallerPath(): [string | undefined, readonly Diagnostic[]] {
|
||||
return getVSInstallerPath(
|
||||
"resources/app/ServiceHub/Services/Microsoft.VisualStudio.Setup.Service/VSIXInstaller.exe"
|
||||
"resources/app/ServiceHub/Services/Microsoft.VisualStudio.Setup.Service/VSIXInstaller.exe",
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -70,7 +70,7 @@ function getVSInstallerPath(relativePath: string): [string | undefined, readonly
|
|||
joinPaths(
|
||||
process.env["ProgramFiles(x86)"] ?? "",
|
||||
"Microsoft Visual Studio/Installer",
|
||||
relativePath
|
||||
relativePath,
|
||||
),
|
||||
[],
|
||||
];
|
||||
|
@ -88,7 +88,7 @@ function isVSInstalled(host: CliCompilerHost, versionRange: string) {
|
|||
{
|
||||
stdio: [null, "pipe", "inherit"],
|
||||
allowNotFound: true,
|
||||
}
|
||||
},
|
||||
);
|
||||
return proc.status === 0 && proc.stdout;
|
||||
}
|
||||
|
|
|
@ -9,10 +9,10 @@ export interface InstallVSCodeExtensionOptions {
|
|||
}
|
||||
export async function installVSCodeExtension(
|
||||
host: CliCompilerHost,
|
||||
options: InstallVSCodeExtensionOptions
|
||||
options: InstallVSCodeExtensionOptions,
|
||||
) {
|
||||
return await installVsix(host, "typespec-vscode", (vsixPaths) =>
|
||||
runCode(host, ["--install-extension", vsixPaths[0]], options.insiders)
|
||||
runCode(host, ["--install-extension", vsixPaths[0]], options.insiders),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -22,7 +22,7 @@ export interface UninstallVSCodeExtensionOptions {
|
|||
|
||||
export async function uninstallVSCodeExtension(
|
||||
host: CliCompilerHost,
|
||||
options: UninstallVSCodeExtensionOptions
|
||||
options: UninstallVSCodeExtensionOptions,
|
||||
) {
|
||||
return runCode(host, ["--uninstall-extension", "microsoft.typespec-vscode"], options.insiders);
|
||||
}
|
||||
|
@ -30,7 +30,7 @@ export async function uninstallVSCodeExtension(
|
|||
function runCode(
|
||||
host: CliCompilerHost,
|
||||
codeArgs: string[],
|
||||
insiders: boolean
|
||||
insiders: boolean,
|
||||
): readonly Diagnostic[] {
|
||||
try {
|
||||
run(host, insiders ? "code-insiders" : "code", codeArgs, {
|
||||
|
|
|
@ -128,7 +128,7 @@ async function main() {
|
|||
describe: "Key/value of arguments that are used in the configuration.",
|
||||
});
|
||||
},
|
||||
withCliHost((host, args) => compileAction(host, args))
|
||||
withCliHost((host, args) => compileAction(host, args)),
|
||||
)
|
||||
.command("code", "Manage VS Code Extension.", (cmd) => {
|
||||
return cmd
|
||||
|
@ -143,16 +143,16 @@ async function main() {
|
|||
"Install VS Code Extension",
|
||||
() => {},
|
||||
withCliHostAndDiagnostics<CliHostArgs & InstallVSCodeExtensionOptions>((host, args) =>
|
||||
installVSCodeExtension(host, args)
|
||||
)
|
||||
installVSCodeExtension(host, args),
|
||||
),
|
||||
)
|
||||
.command(
|
||||
"uninstall",
|
||||
"Uninstall VS Code Extension",
|
||||
() => {},
|
||||
withCliHostAndDiagnostics<CliHostArgs & UninstallVSCodeExtensionOptions>((host, args) =>
|
||||
uninstallVSCodeExtension(host, args)
|
||||
)
|
||||
uninstallVSCodeExtension(host, args),
|
||||
),
|
||||
);
|
||||
})
|
||||
.command("vs", "Manage Visual Studio Extension.", (cmd) => {
|
||||
|
@ -162,13 +162,13 @@ async function main() {
|
|||
"install",
|
||||
"Install Visual Studio Extension.",
|
||||
() => {},
|
||||
withCliHostAndDiagnostics((host) => installVSExtension(host))
|
||||
withCliHostAndDiagnostics((host) => installVSExtension(host)),
|
||||
)
|
||||
.command(
|
||||
"uninstall",
|
||||
"Uninstall VS Extension",
|
||||
() => {},
|
||||
withCliHostAndDiagnostics((host) => uninstallVSExtension(host))
|
||||
withCliHostAndDiagnostics((host) => uninstallVSExtension(host)),
|
||||
);
|
||||
})
|
||||
.command(
|
||||
|
@ -194,7 +194,7 @@ async function main() {
|
|||
describe: "Verify the files are formatted.",
|
||||
});
|
||||
},
|
||||
withCliHost((host, args) => formatAction(host, args))
|
||||
withCliHost((host, args) => formatAction(host, args)),
|
||||
)
|
||||
.command(
|
||||
"init [templatesUrl]",
|
||||
|
@ -209,19 +209,19 @@ async function main() {
|
|||
type: "string",
|
||||
description: "Name of the template to use",
|
||||
}),
|
||||
withCliHostAndDiagnostics((host, args) => initAction(host, args))
|
||||
withCliHostAndDiagnostics((host, args) => initAction(host, args)),
|
||||
)
|
||||
.command(
|
||||
"install",
|
||||
"Install TypeSpec dependencies",
|
||||
() => {},
|
||||
withCliHost((host) => installTypeSpecDependencies(host, process.cwd()))
|
||||
withCliHost((host) => installTypeSpecDependencies(host, process.cwd())),
|
||||
)
|
||||
.command(
|
||||
"info",
|
||||
"Show information about the current TypeSpec compiler.",
|
||||
() => {},
|
||||
withCliHostAndDiagnostics((host) => printInfoAction(host))
|
||||
withCliHostAndDiagnostics((host) => printInfoAction(host)),
|
||||
)
|
||||
.version(typespecVersion)
|
||||
.demandCommand(1, "You must use one of the supported commands.").argv;
|
||||
|
|
|
@ -8,7 +8,7 @@ import { run } from "./utils.js";
|
|||
export async function installVsix<T = void>(
|
||||
host: CliCompilerHost,
|
||||
pkg: string,
|
||||
install: (vsixPaths: string[]) => T
|
||||
install: (vsixPaths: string[]) => T,
|
||||
): Promise<T> {
|
||||
// download npm package to temporary directory
|
||||
const temp = await mkdtemp(joinPaths(os.tmpdir(), "typespec"));
|
||||
|
@ -45,7 +45,7 @@ export async function installVsix<T = void>(
|
|||
|
||||
compilerAssert(
|
||||
vsixPaths.length > 0,
|
||||
`Installed ${pkg} from npm, but didn't find any .vsix files in it.`
|
||||
`Installed ${pkg} from npm, but didn't find any .vsix files in it.`,
|
||||
);
|
||||
|
||||
// install extension
|
||||
|
|
|
@ -28,7 +28,7 @@ export interface CliHostArgs {
|
|||
}
|
||||
|
||||
export function withCliHost<T extends CliHostArgs>(
|
||||
fn: (host: CliCompilerHost, args: T) => void | Promise<void>
|
||||
fn: (host: CliCompilerHost, args: T) => void | Promise<void>,
|
||||
): (args: T) => void | Promise<void> {
|
||||
return (args: T) => {
|
||||
const host = createCLICompilerHost(args);
|
||||
|
@ -40,7 +40,7 @@ export function withCliHost<T extends CliHostArgs>(
|
|||
* Resolve Cli host automatically using cli args and handle diagnostics returned by the action.
|
||||
*/
|
||||
export function withCliHostAndDiagnostics<T extends CliHostArgs>(
|
||||
fn: (host: CliCompilerHost, args: T) => readonly Diagnostic[] | Promise<readonly Diagnostic[]>
|
||||
fn: (host: CliCompilerHost, args: T) => readonly Diagnostic[] | Promise<readonly Diagnostic[]>,
|
||||
): (args: T) => void | Promise<void> {
|
||||
return async (args: T) => {
|
||||
const host = createCLICompilerHost(args);
|
||||
|
@ -63,7 +63,7 @@ export function run(
|
|||
host: CliCompilerHost,
|
||||
command: string,
|
||||
commandArgs: string[],
|
||||
options?: RunOptions
|
||||
options?: RunOptions,
|
||||
) {
|
||||
const logger = host.logger;
|
||||
if (options) {
|
||||
|
@ -118,7 +118,7 @@ export function run(
|
|||
logger.error(
|
||||
`error: Command '${baseCommandName} ${commandArgs.join(" ")}' failed with exit code ${
|
||||
proc.status
|
||||
}.`
|
||||
}.`,
|
||||
);
|
||||
process.exit(proc.status || 1);
|
||||
}
|
||||
|
|
|
@ -45,7 +45,7 @@ export function validateDecoratorTarget<K extends TypeKind>(
|
|||
context: DecoratorContext,
|
||||
target: Type,
|
||||
decoratorName: string,
|
||||
expectedType: K | readonly K[]
|
||||
expectedType: K | readonly K[],
|
||||
): target is K extends "Any" ? Type : Type & { kind: K } {
|
||||
const isCorrectType = isTypeSpecValueTypeOf(target, expectedType);
|
||||
if (!isCorrectType) {
|
||||
|
@ -66,14 +66,14 @@ export function validateDecoratorTarget<K extends TypeKind>(
|
|||
export function isIntrinsicType(
|
||||
program: Program,
|
||||
type: Scalar,
|
||||
kind: IntrinsicScalarName
|
||||
kind: IntrinsicScalarName,
|
||||
): boolean {
|
||||
return ignoreDiagnostics(
|
||||
program.checker.isTypeAssignableTo(
|
||||
type.projectionBase ?? type,
|
||||
program.checker.getStdType(kind),
|
||||
type
|
||||
)
|
||||
type,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -84,13 +84,13 @@ export function validateDecoratorTargetIntrinsic(
|
|||
context: DecoratorContext,
|
||||
target: Scalar | ModelProperty,
|
||||
decoratorName: string,
|
||||
expectedType: IntrinsicScalarName | IntrinsicScalarName[]
|
||||
expectedType: IntrinsicScalarName | IntrinsicScalarName[],
|
||||
): boolean {
|
||||
const expectedTypeStrs = typeof expectedType === "string" ? [expectedType] : expectedType;
|
||||
const expectedTypes = expectedTypeStrs.map((x) => context.program.checker.getStdType(x));
|
||||
const type = getPropertyType(target);
|
||||
const isCorrect = expectedTypes.some(
|
||||
(x) => context.program.checker.isTypeAssignableTo(type, x, type)[0]
|
||||
(x) => context.program.checker.isTypeAssignableTo(type, x, type)[0],
|
||||
);
|
||||
if (!isCorrect) {
|
||||
context.program.reportDiagnostic(
|
||||
|
@ -101,7 +101,7 @@ export function validateDecoratorTargetIntrinsic(
|
|||
to: `type it is not one of: ${expectedTypeStrs.join(", ")}`,
|
||||
},
|
||||
target: context.decoratorTarget,
|
||||
})
|
||||
}),
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
@ -119,7 +119,7 @@ export const isCadlValueTypeOf = isTypeSpecValueTypeOf;
|
|||
*/
|
||||
export function isTypeSpecValueTypeOf<K extends TypeKind>(
|
||||
target: TypeSpecValue,
|
||||
expectedType: K | readonly K[]
|
||||
expectedType: K | readonly K[],
|
||||
): target is InferredTypeSpecValue<K> {
|
||||
const kind = getTypeKind(target);
|
||||
if (kind === undefined) {
|
||||
|
@ -159,7 +159,7 @@ export function validateDecoratorParamType<K extends Type["kind"]>(
|
|||
program: Program,
|
||||
target: Type,
|
||||
value: TypeSpecValue,
|
||||
expectedType: K | K[]
|
||||
expectedType: K | K[],
|
||||
): value is InferredTypeSpecValue<K> {
|
||||
if (!isTypeSpecValueTypeOf(value, expectedType)) {
|
||||
reportDiagnostic(program, {
|
||||
|
@ -244,7 +244,7 @@ export interface DecoratorValidator<
|
|||
validate(
|
||||
context: DecoratorContext,
|
||||
target: InferredTypeSpecValue<T>,
|
||||
parameters: InferParameters<P, S>
|
||||
parameters: InferParameters<P, S>,
|
||||
): boolean;
|
||||
}
|
||||
|
||||
|
@ -314,7 +314,7 @@ export function validateDecoratorParamCount(
|
|||
context: DecoratorContext,
|
||||
min: number,
|
||||
max: number | undefined,
|
||||
parameters: unknown[]
|
||||
parameters: unknown[],
|
||||
): boolean {
|
||||
let missing = 0;
|
||||
for (let i = parameters.length - 1; i >= 0; i--) {
|
||||
|
@ -368,7 +368,7 @@ export const cadlTypeToJson = typespecTypeToJson;
|
|||
*/
|
||||
export function typespecTypeToJson<T>(
|
||||
typespecType: TypeSpecValue,
|
||||
target: DiagnosticTarget
|
||||
target: DiagnosticTarget,
|
||||
): [T | undefined, Diagnostic[]] {
|
||||
if (typeof typespecType !== "object") {
|
||||
return [typespecType as any, []];
|
||||
|
@ -379,7 +379,7 @@ export function typespecTypeToJson<T>(
|
|||
function typespecTypeToJsonInternal(
|
||||
typespecType: Type,
|
||||
target: DiagnosticTarget,
|
||||
path: string[]
|
||||
path: string[],
|
||||
): [any | undefined, Diagnostic[]] {
|
||||
switch (typespecType.kind) {
|
||||
case "String":
|
||||
|
@ -442,7 +442,7 @@ function typespecTypeToJsonInternal(
|
|||
export function validateDecoratorUniqueOnNode(
|
||||
context: DecoratorContext,
|
||||
type: Type,
|
||||
decorator: DecoratorFunction
|
||||
decorator: DecoratorFunction,
|
||||
) {
|
||||
compilerAssert("decorators" in type, "Type should have decorators");
|
||||
|
||||
|
@ -450,7 +450,7 @@ export function validateDecoratorUniqueOnNode(
|
|||
(x) =>
|
||||
x.decorator === decorator &&
|
||||
x.node?.kind === SyntaxKind.DecoratorExpression &&
|
||||
x.node?.parent === type.node
|
||||
x.node?.parent === type.node,
|
||||
);
|
||||
|
||||
if (sameDecorators.length > 1) {
|
||||
|
@ -478,7 +478,7 @@ export function validateDecoratorNotOnType(
|
|||
context: DecoratorContext,
|
||||
type: Type,
|
||||
badDecorator: DecoratorFunction,
|
||||
givenDecorator: DecoratorFunction
|
||||
givenDecorator: DecoratorFunction,
|
||||
) {
|
||||
compilerAssert("decorators" in type, "Type should have decorators");
|
||||
const decAppsToCheck = [];
|
||||
|
|
|
@ -33,7 +33,7 @@ export function isDeprecated(program: Program, type: Type): boolean {
|
|||
*/
|
||||
export function getDeprecationDetails(
|
||||
program: Program,
|
||||
typeOrNode: Type | Node
|
||||
typeOrNode: Type | Node,
|
||||
): DeprecationDetails | undefined {
|
||||
function isType(maybeType: Type | Node): maybeType is Type {
|
||||
return typeof maybeType.kind === "string";
|
||||
|
@ -45,7 +45,7 @@ export function getDeprecationDetails(
|
|||
} else {
|
||||
// Look at the node for a deprecation directive
|
||||
const deprecatedDirective = ((typeOrNode as BaseNode).directives ?? []).find(
|
||||
(directive) => directive.target.sv === "deprecated"
|
||||
(directive) => directive.target.sv === "deprecated",
|
||||
);
|
||||
|
||||
if (deprecatedDirective?.arguments[0].kind === SyntaxKind.StringLiteral) {
|
||||
|
|
|
@ -15,14 +15,14 @@ import type {
|
|||
*/
|
||||
export function createDiagnosticCreator<T extends { [code: string]: DiagnosticMessages }>(
|
||||
diagnostics: DiagnosticMap<T>,
|
||||
libraryName?: string
|
||||
libraryName?: string,
|
||||
): DiagnosticCreator<T> {
|
||||
const errorMessage = libraryName
|
||||
? `It must match one of the code defined in the library '${libraryName}'`
|
||||
: "It must match one of the code defined in the compiler.";
|
||||
|
||||
function createDiagnostic<C extends keyof T, M extends keyof T[C] = "default">(
|
||||
diagnostic: DiagnosticReport<T, C, M>
|
||||
diagnostic: DiagnosticReport<T, C, M>,
|
||||
): Diagnostic {
|
||||
const diagnosticDef = diagnostics[diagnostic.code];
|
||||
|
||||
|
@ -32,7 +32,7 @@ export function createDiagnosticCreator<T extends { [code: string]: DiagnosticMe
|
|||
.join("\n");
|
||||
const code = String(diagnostic.code);
|
||||
throw new Error(
|
||||
`Unexpected diagnostic code '${code}'. ${errorMessage}. Defined codes:\n${codeStr}`
|
||||
`Unexpected diagnostic code '${code}'. ${errorMessage}. Defined codes:\n${codeStr}`,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -44,7 +44,7 @@ export function createDiagnosticCreator<T extends { [code: string]: DiagnosticMe
|
|||
const messageId = String(diagnostic.messageId);
|
||||
const code = String(diagnostic.code);
|
||||
throw new Error(
|
||||
`Unexpected message id '${messageId}'. ${errorMessage} for code '${code}'. Defined codes:\n${codeStr}`
|
||||
`Unexpected message id '${messageId}'. ${errorMessage} for code '${code}'. Defined codes:\n${codeStr}`,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -64,7 +64,7 @@ export function createDiagnosticCreator<T extends { [code: string]: DiagnosticMe
|
|||
|
||||
function reportDiagnostic<C extends keyof T, M extends keyof T[C] = "default">(
|
||||
program: Program,
|
||||
diagnostic: DiagnosticReport<T, C, M>
|
||||
diagnostic: DiagnosticReport<T, C, M>,
|
||||
) {
|
||||
const diag = createDiagnostic(diagnostic);
|
||||
program.reportDiagnostic(diag);
|
||||
|
|
|
@ -50,7 +50,7 @@ export function formatDiagnostic(diagnostic: Diagnostic) {
|
|||
url: diagnostic.url,
|
||||
sourceLocation: getSourceLocation(diagnostic.target, { locateId: true }),
|
||||
},
|
||||
{ pretty: false }
|
||||
{ pretty: false },
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -63,19 +63,19 @@ export interface SourceLocationOptions {
|
|||
}
|
||||
export function getSourceLocation(
|
||||
target: DiagnosticTarget,
|
||||
options?: SourceLocationOptions
|
||||
options?: SourceLocationOptions,
|
||||
): SourceLocation;
|
||||
export function getSourceLocation(
|
||||
target: typeof NoTarget | undefined,
|
||||
options?: SourceLocationOptions
|
||||
options?: SourceLocationOptions,
|
||||
): undefined;
|
||||
export function getSourceLocation(
|
||||
target: DiagnosticTarget | typeof NoTarget | undefined,
|
||||
options?: SourceLocationOptions
|
||||
options?: SourceLocationOptions,
|
||||
): SourceLocation | undefined;
|
||||
export function getSourceLocation(
|
||||
target: DiagnosticTarget | typeof NoTarget | undefined,
|
||||
options: SourceLocationOptions = {}
|
||||
options: SourceLocationOptions = {},
|
||||
): SourceLocation | undefined {
|
||||
if (target === NoTarget || target === undefined) {
|
||||
return undefined;
|
||||
|
@ -131,7 +131,7 @@ function getSourceLocationOfNode(node: Node, options: SourceLocationOptions): So
|
|||
return createSyntheticSourceLocation(
|
||||
node.flags & NodeFlags.Synthetic
|
||||
? undefined
|
||||
: "<unknown location - cannot obtain source location of unbound node - file bug at https://github.com/microsoft/typespec>"
|
||||
: "<unknown location - cannot obtain source location of unbound node - file bug at https://github.com/microsoft/typespec>",
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -156,7 +156,7 @@ function getSourceLocationOfNode(node: Node, options: SourceLocationOptions): So
|
|||
* when verbose output is disabled.
|
||||
*/
|
||||
export function logVerboseTestOutput(
|
||||
messageOrCallback: string | ((log: (message: string) => void) => void)
|
||||
messageOrCallback: string | ((log: (message: string) => void) => void),
|
||||
) {
|
||||
if (process.env.TYPESPEC_VERBOSE_TEST_OUTPUT) {
|
||||
if (typeof messageOrCallback === "string") {
|
||||
|
@ -183,7 +183,7 @@ export function logVerboseTestOutput(
|
|||
export function compilerAssert(
|
||||
condition: any,
|
||||
message: string,
|
||||
target?: DiagnosticTarget
|
||||
target?: DiagnosticTarget,
|
||||
): asserts condition {
|
||||
if (condition) {
|
||||
return;
|
||||
|
@ -229,7 +229,7 @@ export function assertType<TKind extends Type["kind"][]>(
|
|||
export function reportDeprecated(
|
||||
program: Program,
|
||||
message: string,
|
||||
target: DiagnosticTarget | typeof NoTarget
|
||||
target: DiagnosticTarget | typeof NoTarget,
|
||||
): void {
|
||||
program.reportDiagnostic({
|
||||
severity: "warning",
|
||||
|
|
|
@ -11,7 +11,7 @@ import { CompilerHost } from "./types.js";
|
|||
export async function resolveTypeSpecEntrypoint(
|
||||
host: CompilerHost,
|
||||
path: string,
|
||||
reportDiagnostic: DiagnosticHandler
|
||||
reportDiagnostic: DiagnosticHandler,
|
||||
): Promise<string | undefined> {
|
||||
const resolvedPath = resolvePath(path);
|
||||
const mainStat = await doIO(host.stat, resolvedPath, reportDiagnostic);
|
||||
|
@ -29,7 +29,7 @@ export async function resolveTypeSpecEntrypoint(
|
|||
export async function resolveTypeSpecEntrypointForDir(
|
||||
host: CompilerHost,
|
||||
dir: string,
|
||||
reportDiagnostic: DiagnosticHandler
|
||||
reportDiagnostic: DiagnosticHandler,
|
||||
): Promise<string> {
|
||||
const pkgJsonPath = resolvePath(dir, "package.json");
|
||||
const [pkg] = await loadFile(host, pkgJsonPath, JSON.parse, reportDiagnostic, {
|
||||
|
@ -45,7 +45,7 @@ export async function resolveTypeSpecEntrypointForDir(
|
|||
const stat = await doIO(
|
||||
() => host.stat(mainFile),
|
||||
mainFile,
|
||||
() => {}
|
||||
() => {},
|
||||
);
|
||||
// if not found, use the normal resolution.
|
||||
if (stat?.isFile() !== true) {
|
||||
|
|
|
@ -49,7 +49,7 @@ export class ExternalError extends Error {
|
|||
|
||||
function renderExternalErrorInfo(
|
||||
info: ExternalErrorInfo,
|
||||
color: (text: string, color: Colors) => string = (x) => x
|
||||
color: (text: string, color: Colors) => string = (x) => x,
|
||||
): string {
|
||||
const { metadata, kind } = info;
|
||||
const msg = [
|
||||
|
@ -57,7 +57,7 @@ function renderExternalErrorInfo(
|
|||
kind === "emitter"
|
||||
? `Emitter "${metadata.name}" crashed! This is a bug.`
|
||||
: `Library "${metadata.name}" $onValidate crashed! This is a bug.`,
|
||||
"red"
|
||||
"red",
|
||||
),
|
||||
];
|
||||
if (metadata.bugs?.url) {
|
||||
|
|
|
@ -26,7 +26,7 @@ export interface TypeSpecFormatResult {
|
|||
*/
|
||||
export async function formatTypeSpecFiles(
|
||||
patterns: string[],
|
||||
{ exclude, debug }: TypeSpecFormatOptions
|
||||
{ exclude, debug }: TypeSpecFormatOptions,
|
||||
): Promise<[TypeSpecFormatResult, readonly Diagnostic[]]> {
|
||||
const files = await findFiles(patterns, exclude);
|
||||
const diagnostics: Diagnostic[] = [];
|
||||
|
@ -43,7 +43,7 @@ export async function formatTypeSpecFiles(
|
|||
code: "format-failed",
|
||||
format: { file, details },
|
||||
target: NoTarget,
|
||||
})
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
throw e;
|
||||
|
@ -60,7 +60,7 @@ export async function formatTypeSpecFiles(
|
|||
*/
|
||||
export async function findUnformattedTypeSpecFiles(
|
||||
patterns: string[],
|
||||
{ exclude, debug }: TypeSpecFormatOptions
|
||||
{ exclude, debug }: TypeSpecFormatOptions,
|
||||
): Promise<string[]> {
|
||||
const files = await findFiles(patterns, exclude);
|
||||
const unformatted = [];
|
||||
|
|
|
@ -21,7 +21,7 @@ export const checkFormatCadl = checkFormatTypeSpec;
|
|||
*/
|
||||
export async function checkFormatTypeSpec(
|
||||
code: string,
|
||||
prettierConfig?: Options
|
||||
prettierConfig?: Options,
|
||||
): Promise<boolean> {
|
||||
return check(code, {
|
||||
...prettierConfig,
|
||||
|
|
|
@ -13,7 +13,7 @@ export interface DiscriminatedUnion {
|
|||
|
||||
export function getDiscriminatedUnion(
|
||||
type: Model | Union,
|
||||
discriminator: Discriminator
|
||||
discriminator: Discriminator,
|
||||
): [DiscriminatedUnion, readonly Diagnostic[]] {
|
||||
switch (type.kind) {
|
||||
case "Model":
|
||||
|
@ -39,7 +39,7 @@ export function validateInheritanceDiscriminatedUnions(program: Program) {
|
|||
|
||||
function getDiscriminatedUnionForUnion(
|
||||
type: Union,
|
||||
discriminator: Discriminator
|
||||
discriminator: Discriminator,
|
||||
): [DiscriminatedUnion, readonly Diagnostic[]] {
|
||||
const variants = new Map<string, Model>();
|
||||
const diagnostics: Diagnostic[] = [];
|
||||
|
@ -52,7 +52,7 @@ function getDiscriminatedUnionForUnion(
|
|||
code: "invalid-discriminated-union-variant",
|
||||
format: { name: variant.name.toString() },
|
||||
target: variant,
|
||||
})
|
||||
}),
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
@ -65,7 +65,7 @@ function getDiscriminatedUnionForUnion(
|
|||
messageId: "noDiscriminant",
|
||||
format: { name: variant.name.toString(), discriminant: discriminator.propertyName },
|
||||
target: variant,
|
||||
})
|
||||
}),
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
@ -81,7 +81,7 @@ function getDiscriminatedUnionForUnion(
|
|||
messageId: "wrongDiscriminantType",
|
||||
format: { name: variant.name.toString(), discriminant: discriminator.propertyName },
|
||||
target: variant.type,
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -96,7 +96,7 @@ function getDiscriminatedUnionForUnion(
|
|||
|
||||
function getDiscriminatedUnionForModel(
|
||||
type: Model,
|
||||
discriminator: Discriminator
|
||||
discriminator: Discriminator,
|
||||
): [DiscriminatedUnion, readonly Diagnostic[]] {
|
||||
const variants = new Map<string, Model>();
|
||||
const diagnostics: Diagnostic[] = [];
|
||||
|
@ -115,7 +115,7 @@ function getDiscriminatedUnionForModel(
|
|||
code: "missing-discriminator-property",
|
||||
format: { discriminator: discriminator.propertyName },
|
||||
target: derivedModel,
|
||||
})
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
checkForVariantsIn(derivedModel);
|
||||
|
@ -141,7 +141,7 @@ function getDiscriminatedUnionForModel(
|
|||
|
||||
function reportDuplicateDiscriminatorValues(
|
||||
duplicates: DuplicateTracker<string, Model>,
|
||||
diagnostics: Diagnostic[]
|
||||
diagnostics: Diagnostic[],
|
||||
) {
|
||||
for (const [duplicateKey, models] of duplicates.entries()) {
|
||||
for (const model of models) {
|
||||
|
@ -151,7 +151,7 @@ function reportDuplicateDiscriminatorValues(
|
|||
messageId: "duplicate",
|
||||
format: { discriminator: duplicateKey },
|
||||
target: model,
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -160,7 +160,7 @@ function reportDuplicateDiscriminatorValues(
|
|||
function getDiscriminatorProperty(
|
||||
model: Model,
|
||||
discriminator: Discriminator,
|
||||
diagnostics: Diagnostic[]
|
||||
diagnostics: Diagnostic[],
|
||||
) {
|
||||
const prop = model.properties.get(discriminator.propertyName);
|
||||
if (prop && prop.optional) {
|
||||
|
@ -169,7 +169,7 @@ function getDiscriminatorProperty(
|
|||
code: "invalid-discriminator-value",
|
||||
messageId: "required",
|
||||
target: prop,
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
return prop;
|
||||
|
@ -178,7 +178,7 @@ function getDiscriminatorProperty(
|
|||
function getDiscriminatorValues(
|
||||
model: Model,
|
||||
discriminator: Discriminator,
|
||||
diagnostics: Diagnostic[]
|
||||
diagnostics: Diagnostic[],
|
||||
): string[] | undefined {
|
||||
const prop = getDiscriminatorProperty(model, discriminator, diagnostics);
|
||||
if (!prop) return undefined;
|
||||
|
@ -190,7 +190,7 @@ function getDiscriminatorValues(
|
|||
code: "invalid-discriminator-value",
|
||||
format: { kind: prop.type.kind },
|
||||
target: prop,
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
return keys;
|
||||
|
|
|
@ -16,7 +16,7 @@ export interface ListOperationOptions {
|
|||
*/
|
||||
export function listOperationsIn(
|
||||
container: Namespace | Interface,
|
||||
options: ListOperationOptions = {}
|
||||
options: ListOperationOptions = {},
|
||||
): Operation[] {
|
||||
const operations: Operation[] = [];
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ const VariableInterpolationRegex = /{([a-zA-Z-_.]+)}(\/|\.?)/g;
|
|||
*/
|
||||
export function interpolatePath(
|
||||
pathTemplate: string,
|
||||
predefinedVariables: Record<string, string | undefined>
|
||||
predefinedVariables: Record<string, string | undefined>,
|
||||
): string {
|
||||
return pathTemplate.replace(VariableInterpolationRegex, (match, expression, suffix) => {
|
||||
const isPathSegment = suffix === "/" || suffix === ".";
|
||||
|
@ -22,7 +22,7 @@ export function interpolatePath(
|
|||
|
||||
function resolveExpression(
|
||||
predefinedVariables: Record<string, string | undefined>,
|
||||
expression: string
|
||||
expression: string,
|
||||
): string | undefined {
|
||||
const segments = expression.split(".");
|
||||
let resolved: any = predefinedVariables;
|
||||
|
|
|
@ -6,7 +6,7 @@ import type { Diagnostic, StringTemplate } from "../types.js";
|
|||
* @deprecated use `{@link StringTemplate["stringValue"]} property on {@link StringTemplate} instead.
|
||||
*/
|
||||
export function stringTemplateToString(
|
||||
stringTemplate: StringTemplate
|
||||
stringTemplate: StringTemplate,
|
||||
): [string, readonly Diagnostic[]] {
|
||||
if (stringTemplate.stringValue !== undefined) {
|
||||
return [stringTemplate.stringValue, []];
|
||||
|
@ -16,7 +16,7 @@ export function stringTemplateToString(
|
|||
}
|
||||
|
||||
export function isStringTemplateSerializable(
|
||||
stringTemplate: StringTemplate
|
||||
stringTemplate: StringTemplate,
|
||||
): [boolean, readonly Diagnostic[]] {
|
||||
if (stringTemplate.stringValue !== undefined) {
|
||||
return [true, []];
|
||||
|
@ -29,7 +29,7 @@ export function isStringTemplateSerializable(
|
|||
* get a list of diagnostic explaining why this string template cannot be converted to a string.
|
||||
*/
|
||||
export function explainStringTemplateNotSerializable(
|
||||
stringTemplate: StringTemplate
|
||||
stringTemplate: StringTemplate,
|
||||
): readonly Diagnostic[] {
|
||||
const diagnostics = createDiagnosticCollector();
|
||||
for (const span of stringTemplate.spans) {
|
||||
|
@ -52,7 +52,7 @@ export function explainStringTemplateNotSerializable(
|
|||
createDiagnostic({
|
||||
code: "non-literal-string-template",
|
||||
target: span.node,
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -149,7 +149,7 @@ function getEnumName(e: Enum, options: TypeNameOptions | undefined): string {
|
|||
function getScalarName(scalar: Scalar, options: TypeNameOptions | undefined): string {
|
||||
return `${getNamespacePrefix(scalar.namespace, options)}${getIdentifierName(
|
||||
scalar.name,
|
||||
options
|
||||
options,
|
||||
)}`;
|
||||
}
|
||||
|
||||
|
@ -178,7 +178,7 @@ function getModelName(model: Model, options: TypeNameOptions | undefined) {
|
|||
} else if ((model.node as ModelStatementNode)?.templateParameters?.length > 0) {
|
||||
// template
|
||||
const params = (model.node as ModelStatementNode).templateParameters.map((t) =>
|
||||
getIdentifierName(t.id.sv, options)
|
||||
getIdentifierName(t.id.sv, options),
|
||||
);
|
||||
return `${modelName}<${params.join(", ")}>`;
|
||||
} else {
|
||||
|
@ -199,7 +199,7 @@ function getUnionName(type: Union, options: TypeNameOptions | undefined): string
|
|||
* Check if the given namespace is the standard library `TypeSpec` namespace.
|
||||
*/
|
||||
function isTypeSpecNamespace(
|
||||
namespace: Namespace
|
||||
namespace: Namespace,
|
||||
): namespace is Namespace & { name: "TypeSpec"; namespace: Namespace } {
|
||||
return namespace.name === "TypeSpec" && namespace.namespace?.name === "";
|
||||
}
|
||||
|
|
|
@ -61,7 +61,7 @@ function addUsagesInContainer(type: OperationContainer, usages: Map<TrackableTyp
|
|||
function trackUsage(
|
||||
usages: Map<TrackableType, UsageFlags>,
|
||||
type: TrackableType,
|
||||
usage: UsageFlags
|
||||
usage: UsageFlags,
|
||||
) {
|
||||
const existingFlag = usages.get(type) ?? UsageFlags.None;
|
||||
usages.set(type, existingFlag | usage);
|
||||
|
@ -87,17 +87,17 @@ function addUsagesInInterface(Interface: Interface, usages: Map<TrackableType, U
|
|||
|
||||
function addUsagesInOperation(operation: Operation, usages: Map<TrackableType, UsageFlags>): void {
|
||||
navigateReferencedTypes(operation.parameters, (type) =>
|
||||
trackUsage(usages, type, UsageFlags.Input)
|
||||
trackUsage(usages, type, UsageFlags.Input),
|
||||
);
|
||||
navigateReferencedTypes(operation.returnType, (type) =>
|
||||
trackUsage(usages, type, UsageFlags.Output)
|
||||
trackUsage(usages, type, UsageFlags.Output),
|
||||
);
|
||||
}
|
||||
|
||||
function navigateReferencedTypes(
|
||||
type: Type,
|
||||
callback: (type: TrackableType) => void,
|
||||
visited: Set<Type> = new Set()
|
||||
visited: Set<Type> = new Set(),
|
||||
) {
|
||||
if (visited.has(type)) {
|
||||
return;
|
||||
|
@ -130,7 +130,7 @@ function navigateReferencedTypes(
|
|||
function navigateIterable<T extends Type>(
|
||||
map: Map<string | symbol, T> | T[],
|
||||
callback: (type: TrackableType) => void,
|
||||
visited: Set<Type> = new Set()
|
||||
visited: Set<Type> = new Set(),
|
||||
) {
|
||||
for (const type of map.values()) {
|
||||
navigateReferencedTypes(type, callback, visited);
|
||||
|
|
|
@ -11,7 +11,7 @@ interface SpawnError {
|
|||
|
||||
export async function installTypeSpecDependencies(
|
||||
host: CliCompilerHost,
|
||||
directory: string
|
||||
directory: string,
|
||||
): Promise<void> {
|
||||
const child = spawn("npm", ["install"], {
|
||||
shell: process.platform === "win32",
|
||||
|
@ -24,7 +24,7 @@ export async function installTypeSpecDependencies(
|
|||
child.on("error", (error: SpawnError) => {
|
||||
if (error.code === "ENOENT") {
|
||||
host.logger.error(
|
||||
"Cannot find `npm` executable. Make sure to have npm installed in your path."
|
||||
"Cannot find `npm` executable. Make sure to have npm installed in your path.",
|
||||
);
|
||||
} else {
|
||||
host.logger.error(error.toString());
|
||||
|
|
|
@ -195,7 +195,7 @@ function getDocKey(target: DocTarget): symbol {
|
|||
export function getDocDataInternal(
|
||||
program: Program,
|
||||
target: Type,
|
||||
key: DocTarget
|
||||
key: DocTarget,
|
||||
): DocData | undefined {
|
||||
return program.stateMap(getDocKey(key)).get(target);
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ import type {
|
|||
*/
|
||||
export function legacyMarshallTypeForJS(
|
||||
checker: Checker,
|
||||
value: Value
|
||||
value: Value,
|
||||
): Type | Value | Record<string, unknown> | unknown[] | string | number | boolean {
|
||||
switch (value.valueKind) {
|
||||
case "BooleanValue":
|
||||
|
@ -43,7 +43,7 @@ export function legacyMarshallTypeForJS(
|
|||
|
||||
export function marshallTypeForJS<T extends Value>(
|
||||
value: T,
|
||||
valueConstraint: Type | undefined
|
||||
valueConstraint: Type | undefined,
|
||||
): MarshalledValue<T> {
|
||||
switch (value.valueKind) {
|
||||
case "BooleanValue":
|
||||
|
@ -82,7 +82,7 @@ function numericValueToJs(type: NumericValue, valueConstraint: Type | undefined)
|
|||
const asNumber = type.value.asNumber();
|
||||
compilerAssert(
|
||||
asNumber !== null,
|
||||
`Numeric value '${type.value.toString()}' is not a able to convert to a number without loosing precision.`
|
||||
`Numeric value '${type.value.toString()}' is not a able to convert to a number without loosing precision.`,
|
||||
);
|
||||
return asNumber;
|
||||
}
|
||||
|
|
|
@ -34,7 +34,7 @@ export const createCadlLibrary = createTypeSpecLibrary;
|
|||
|
||||
function createStateKeys<T extends string>(
|
||||
libName: string,
|
||||
state: Record<T, StateDef> | undefined
|
||||
state: Record<T, StateDef> | undefined,
|
||||
): Record<T, symbol> {
|
||||
const result: Record<string, symbol> = {};
|
||||
|
||||
|
@ -113,7 +113,7 @@ export function defineLinter(def: LinterDefinition): LinterDefinition {
|
|||
|
||||
/** Create a new linter rule. */
|
||||
export function createLinterRule<const N extends string, const T extends DiagnosticMessages>(
|
||||
definition: LinterRuleDefinition<N, T>
|
||||
definition: LinterRuleDefinition<N, T>,
|
||||
) {
|
||||
compilerAssert(!definition.name.includes("/"), "Rule name cannot contain a '/'.");
|
||||
return definition;
|
||||
|
|
|
@ -28,7 +28,7 @@ export interface Linter {
|
|||
*/
|
||||
export function resolveLinterDefinition(
|
||||
libName: string,
|
||||
linter: LinterDefinition
|
||||
linter: LinterDefinition,
|
||||
): LinterResolvedDefinition {
|
||||
const rules: LinterRule<string, any>[] = linter.rules.map((rule) => {
|
||||
return { ...rule, id: `${libName}/${rule.name}` };
|
||||
|
@ -53,7 +53,7 @@ export function resolveLinterDefinition(
|
|||
|
||||
export function createLinter(
|
||||
program: Program,
|
||||
loadLibrary: (name: string) => Promise<LibraryInstance | undefined>
|
||||
loadLibrary: (name: string) => Promise<LibraryInstance | undefined>,
|
||||
): Linter {
|
||||
const tracer = program.tracer.sub("linter");
|
||||
|
||||
|
@ -84,7 +84,7 @@ export function createLinter(
|
|||
code: "unknown-rule-set",
|
||||
format: { libraryName: ref.libraryName, ruleSetName: ref.name },
|
||||
target: NoTarget,
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -110,7 +110,7 @@ export function createLinter(
|
|||
code: "unknown-rule",
|
||||
format: { libraryName: ref.libraryName, ruleName: ref.name },
|
||||
target: NoTarget,
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -125,7 +125,7 @@ export function createLinter(
|
|||
code: "rule-enabled-disabled",
|
||||
format: { ruleName },
|
||||
target: NoTarget,
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
enabledRules.delete(ruleName);
|
||||
|
@ -133,7 +133,7 @@ export function createLinter(
|
|||
}
|
||||
tracer.trace(
|
||||
"extend-rule-set.end",
|
||||
"Rules enabled: \n" + [...enabledRules.keys()].map((x) => ` - ${x}`).join("\n")
|
||||
"Rules enabled: \n" + [...enabledRules.keys()].map((x) => ` - ${x}`).join("\n"),
|
||||
);
|
||||
|
||||
return diagnostics.diagnostics;
|
||||
|
@ -145,7 +145,7 @@ export function createLinter(
|
|||
tracer.trace(
|
||||
"lint",
|
||||
`Running linter with following rules:\n` +
|
||||
[...enabledRules.keys()].map((x) => ` - ${x}`).join("\n")
|
||||
[...enabledRules.keys()].map((x) => ` - ${x}`).join("\n"),
|
||||
);
|
||||
|
||||
for (const rule of enabledRules.values()) {
|
||||
|
@ -175,7 +175,7 @@ export function createLinter(
|
|||
for (const rule of linter.rules) {
|
||||
tracer.trace(
|
||||
"register-library.rule",
|
||||
`Registering rule "${rule.id}" for library "${name}".`
|
||||
`Registering rule "${rule.id}" for library "${name}".`,
|
||||
);
|
||||
if (ruleMap.has(rule.id)) {
|
||||
compilerAssert(false, `Unexpected duplicate linter rule: "${rule.id}"`);
|
||||
|
@ -190,7 +190,7 @@ export function createLinter(
|
|||
}
|
||||
|
||||
function parseRuleReference(
|
||||
ref: RuleRef
|
||||
ref: RuleRef,
|
||||
): [{ libraryName: string; name: string } | undefined, readonly Diagnostic[]] {
|
||||
const segments = ref.split("/");
|
||||
const name = segments.pop();
|
||||
|
@ -208,7 +208,7 @@ export function createLinter(
|
|||
export function createLinterRuleContext<N extends string, DM extends DiagnosticMessages>(
|
||||
program: Program,
|
||||
rule: LinterRule<N, DM>,
|
||||
diagnosticCollector: DiagnosticCollector
|
||||
diagnosticCollector: DiagnosticCollector,
|
||||
): LinterRuleContext<DM> {
|
||||
return {
|
||||
program,
|
||||
|
@ -216,7 +216,7 @@ export function createLinterRuleContext<N extends string, DM extends DiagnosticM
|
|||
};
|
||||
|
||||
function createDiagnostic<M extends keyof DM>(
|
||||
diag: LinterRuleDiagnosticReport<DM, M>
|
||||
diag: LinterRuleDiagnosticReport<DM, M>,
|
||||
): Diagnostic {
|
||||
const message = rule.messages[diag.messageId ?? "default"];
|
||||
if (!message) {
|
||||
|
@ -225,7 +225,7 @@ export function createLinterRuleContext<N extends string, DM extends DiagnosticM
|
|||
.join("\n");
|
||||
const messageId = String(diag.messageId);
|
||||
throw new Error(
|
||||
`Unexpected message id '${messageId}' for rule '${rule.name}'. Defined messages:\n${messageString}`
|
||||
`Unexpected message id '${messageId}' for rule '${rule.name}'. Defined messages:\n${messageString}`,
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -58,7 +58,7 @@ type ResolveModuleErrorCode = "MODULE_NOT_FOUND" | "INVALID_MAIN";
|
|||
export class ResolveModuleError extends Error {
|
||||
public constructor(
|
||||
public code: ResolveModuleErrorCode,
|
||||
message: string
|
||||
message: string,
|
||||
) {
|
||||
super(message);
|
||||
}
|
||||
|
@ -102,7 +102,7 @@ export interface ResolvedModule {
|
|||
export async function resolveModule(
|
||||
host: ResolveModuleHost,
|
||||
name: string,
|
||||
options: ResolveModuleOptions
|
||||
options: ResolveModuleOptions,
|
||||
): Promise<ModuleResolutionResult> {
|
||||
const realpath = async (x: string) => resolvePath(await host.realpath(x));
|
||||
|
||||
|
@ -127,7 +127,7 @@ export async function resolveModule(
|
|||
|
||||
throw new ResolveModuleError(
|
||||
"MODULE_NOT_FOUND",
|
||||
`Cannot find module '${name}' from '${baseDir}'`
|
||||
`Cannot find module '${name}' from '${baseDir}'`,
|
||||
);
|
||||
|
||||
/**
|
||||
|
@ -146,7 +146,7 @@ export async function resolveModule(
|
|||
|
||||
function getPackageCandidates(
|
||||
name: string,
|
||||
baseDir: string
|
||||
baseDir: string,
|
||||
): Array<{ path: string; type: "node_modules" | "self" }> {
|
||||
const dirs = listAllParentDirs(baseDir);
|
||||
return dirs.flatMap((x) => [
|
||||
|
@ -157,7 +157,7 @@ export async function resolveModule(
|
|||
|
||||
async function findAsNodeModule(
|
||||
name: string,
|
||||
baseDir: string
|
||||
baseDir: string,
|
||||
): Promise<ResolvedModule | undefined> {
|
||||
const dirs = getPackageCandidates(name, baseDir);
|
||||
for (const { type, path } of dirs) {
|
||||
|
@ -184,11 +184,11 @@ export async function resolveModule(
|
|||
async function loadAsDirectory(directory: string): Promise<ModuleResolutionResult | undefined>;
|
||||
async function loadAsDirectory(
|
||||
directory: string,
|
||||
mustBePackage: true
|
||||
mustBePackage: true,
|
||||
): Promise<ResolvedModule | undefined>;
|
||||
async function loadAsDirectory(
|
||||
directory: string,
|
||||
mustBePackage?: boolean
|
||||
mustBePackage?: boolean,
|
||||
): Promise<ModuleResolutionResult | undefined> {
|
||||
const pkgFile = resolvePath(directory, "package.json");
|
||||
if (await isFile(host, pkgFile)) {
|
||||
|
@ -210,7 +210,7 @@ export async function resolveModule(
|
|||
|
||||
async function loadPackage(
|
||||
directory: string,
|
||||
pkg: NodePackage
|
||||
pkg: NodePackage,
|
||||
): Promise<ResolvedModule | undefined> {
|
||||
const mainFile = options.resolveMain ? options.resolveMain(pkg) : pkg.main;
|
||||
if (typeof mainFile !== "string") {
|
||||
|
@ -223,7 +223,7 @@ export async function resolveModule(
|
|||
loaded = (await loadAsFile(mainFullPath)) ?? (await loadAsDirectory(mainFullPath));
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
`Cannot find module '${mainFullPath}'. Please verify that the package.json has a valid "main" entry`
|
||||
`Cannot find module '${mainFullPath}'. Please verify that the package.json has a valid "main" entry`,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -240,7 +240,7 @@ export async function resolveModule(
|
|||
} else {
|
||||
throw new ResolveModuleError(
|
||||
"INVALID_MAIN",
|
||||
`Package ${pkg.name} main file "${mainFile}" is invalid.`
|
||||
`Package ${pkg.name} main file "${mainFile}" is invalid.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -348,7 +348,7 @@ export function parse(code: string | SourceFile, options: ParseOptions = {}): Ty
|
|||
}
|
||||
|
||||
export function parseStandaloneTypeReference(
|
||||
code: string | SourceFile
|
||||
code: string | SourceFile,
|
||||
): [TypeReferenceNode, readonly Diagnostic[]] {
|
||||
const parser = createParser(code);
|
||||
const node = parser.parseStandaloneReferenceExpression();
|
||||
|
@ -665,7 +665,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
pos: number,
|
||||
decorators: DecoratorExpressionNode[],
|
||||
docs: DocNode[],
|
||||
directives: DirectiveExpressionNode[]
|
||||
directives: DirectiveExpressionNode[],
|
||||
): NamespaceStatementNode {
|
||||
parseExpected(Token.NamespaceKeyword);
|
||||
let currentName = parseIdentifierOrMemberExpression();
|
||||
|
@ -712,7 +712,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
|
||||
function parseInterfaceStatement(
|
||||
pos: number,
|
||||
decorators: DecoratorExpressionNode[]
|
||||
decorators: DecoratorExpressionNode[],
|
||||
): InterfaceStatementNode {
|
||||
parseExpected(Token.InterfaceKeyword);
|
||||
const id = parseIdentifier();
|
||||
|
@ -730,7 +730,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
|
||||
const { items: operations, range: bodyRange } = parseList(
|
||||
ListKind.InterfaceMembers,
|
||||
(pos, decorators) => parseOperationStatement(pos, decorators, true)
|
||||
(pos, decorators) => parseOperationStatement(pos, decorators, true),
|
||||
);
|
||||
|
||||
return {
|
||||
|
@ -765,7 +765,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
|
||||
function parseUnionStatement(
|
||||
pos: number,
|
||||
decorators: DecoratorExpressionNode[]
|
||||
decorators: DecoratorExpressionNode[],
|
||||
): UnionStatementNode {
|
||||
parseExpected(Token.UnionKeyword);
|
||||
const id = parseIdentifier();
|
||||
|
@ -846,7 +846,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
function parseOperationStatement(
|
||||
pos: number,
|
||||
decorators: DecoratorExpressionNode[],
|
||||
inInterface?: boolean
|
||||
inInterface?: boolean,
|
||||
): OperationStatementNode {
|
||||
if (inInterface) {
|
||||
parseOptional(Token.OpKeyword);
|
||||
|
@ -906,7 +906,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
const pos = tokenPos();
|
||||
const { items: properties, range: bodyRange } = parseList(
|
||||
ListKind.OperationParameters,
|
||||
parseModelPropertyOrSpread
|
||||
parseModelPropertyOrSpread,
|
||||
);
|
||||
const parameters: ModelExpressionNode = {
|
||||
kind: SyntaxKind.ModelExpression,
|
||||
|
@ -919,7 +919,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
|
||||
function parseModelStatement(
|
||||
pos: number,
|
||||
decorators: DecoratorExpressionNode[]
|
||||
decorators: DecoratorExpressionNode[],
|
||||
): ModelStatementNode {
|
||||
parseExpected(Token.ModelKeyword);
|
||||
const id = parseIdentifier();
|
||||
|
@ -1035,7 +1035,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
|
||||
function parseModelSpreadProperty(
|
||||
pos: number,
|
||||
decorators: DecoratorExpressionNode[]
|
||||
decorators: DecoratorExpressionNode[],
|
||||
): ModelSpreadPropertyNode {
|
||||
parseExpected(Token.Ellipsis);
|
||||
|
||||
|
@ -1053,7 +1053,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
|
||||
function parseModelProperty(
|
||||
pos: number,
|
||||
decorators: DecoratorExpressionNode[]
|
||||
decorators: DecoratorExpressionNode[],
|
||||
): ModelPropertyNode {
|
||||
const id = parseIdentifier({
|
||||
message: "property",
|
||||
|
@ -1079,7 +1079,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
|
||||
function parseObjectLiteralPropertyOrSpread(
|
||||
pos: number,
|
||||
decorators: DecoratorExpressionNode[]
|
||||
decorators: DecoratorExpressionNode[],
|
||||
): ObjectLiteralPropertyNode | ObjectLiteralSpreadPropertyNode {
|
||||
reportInvalidDecorators(decorators, "object literal property");
|
||||
|
||||
|
@ -1119,7 +1119,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
|
||||
function parseScalarStatement(
|
||||
pos: number,
|
||||
decorators: DecoratorExpressionNode[]
|
||||
decorators: DecoratorExpressionNode[],
|
||||
): ScalarStatementNode {
|
||||
parseExpected(Token.ScalarKeyword);
|
||||
const id = parseIdentifier();
|
||||
|
@ -1160,7 +1160,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
|
||||
function parseScalarMember(
|
||||
pos: number,
|
||||
decorators: DecoratorExpressionNode[]
|
||||
decorators: DecoratorExpressionNode[],
|
||||
): ScalarConstructorNode {
|
||||
reportInvalidDecorators(decorators, "scalar member");
|
||||
|
||||
|
@ -1177,7 +1177,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
|
||||
function parseEnumStatement(
|
||||
pos: number,
|
||||
decorators: DecoratorExpressionNode[]
|
||||
decorators: DecoratorExpressionNode[],
|
||||
): EnumStatementNode {
|
||||
parseExpected(Token.EnumKeyword);
|
||||
const id = parseIdentifier();
|
||||
|
@ -1199,7 +1199,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
|
||||
function parseEnumSpreadMember(
|
||||
pos: number,
|
||||
decorators: DecoratorExpressionNode[]
|
||||
decorators: DecoratorExpressionNode[],
|
||||
): EnumSpreadMemberNode {
|
||||
parseExpected(Token.Ellipsis);
|
||||
|
||||
|
@ -1411,7 +1411,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
}
|
||||
|
||||
function parseReferenceExpression(
|
||||
message?: keyof CompilerDiagnostics["token-expected"]
|
||||
message?: keyof CompilerDiagnostics["token-expected"],
|
||||
): TypeReferenceNode {
|
||||
const pos = tokenPos();
|
||||
const target = parseIdentifierOrMemberExpression(message);
|
||||
|
@ -1419,7 +1419,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
}
|
||||
|
||||
function parseCallOrReferenceExpression(
|
||||
message?: keyof CompilerDiagnostics["token-expected"]
|
||||
message?: keyof CompilerDiagnostics["token-expected"],
|
||||
): TypeReferenceNode | CallExpressionNode {
|
||||
const pos = tokenPos();
|
||||
const target = parseIdentifierOrMemberExpression(message);
|
||||
|
@ -1438,7 +1438,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
|
||||
function parseReferenceExpressionInternal(
|
||||
target: IdentifierNode | MemberExpressionNode,
|
||||
pos: number
|
||||
pos: number,
|
||||
): TypeReferenceNode {
|
||||
const { items: args } = parseOptionalList(ListKind.TemplateArguments, parseTemplateArgument);
|
||||
|
||||
|
@ -1631,7 +1631,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
|
||||
function parseIdentifierOrMemberExpression(
|
||||
message?: keyof CompilerDiagnostics["token-expected"],
|
||||
recoverFromKeyword = true
|
||||
recoverFromKeyword = true,
|
||||
): IdentifierNode | MemberExpressionNode {
|
||||
const pos = tokenPos();
|
||||
let base: IdentifierNode | MemberExpressionNode = parseIdentifier({
|
||||
|
@ -1774,7 +1774,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
const pos = tokenPos();
|
||||
const { items: properties, range: bodyRange } = parseList(
|
||||
ListKind.ModelProperties,
|
||||
parseModelPropertyOrSpread
|
||||
parseModelPropertyOrSpread,
|
||||
);
|
||||
return {
|
||||
kind: SyntaxKind.ModelExpression,
|
||||
|
@ -1788,7 +1788,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
const pos = tokenPos();
|
||||
const { items: properties, range: bodyRange } = parseList(
|
||||
ListKind.ObjectLiteralProperties,
|
||||
parseObjectLiteralPropertyOrSpread
|
||||
parseObjectLiteralPropertyOrSpread,
|
||||
);
|
||||
return {
|
||||
kind: SyntaxKind.ObjectLiteral,
|
||||
|
@ -1828,7 +1828,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
if (head.tokenFlags & TokenFlags.TripleQuoted) {
|
||||
const [indentationsStart, indentationEnd] = scanner.findTripleQuotedStringIndent(
|
||||
last.literal.pos,
|
||||
last.literal.end
|
||||
last.literal.end,
|
||||
);
|
||||
mutate(head).value = scanner.unindentAndUnescapeTripleQuotedString(
|
||||
head.pos,
|
||||
|
@ -1836,7 +1836,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
indentationsStart,
|
||||
indentationEnd,
|
||||
Token.StringTemplateHead,
|
||||
head.tokenFlags
|
||||
head.tokenFlags,
|
||||
);
|
||||
for (const span of spans) {
|
||||
mutate(span.literal).value = scanner.unindentAndUnescapeTripleQuotedString(
|
||||
|
@ -1845,7 +1845,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
indentationsStart,
|
||||
indentationEnd,
|
||||
span === last ? Token.StringTemplateTail : Token.StringTemplateMiddle,
|
||||
head.tokenFlags
|
||||
head.tokenFlags,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -1894,7 +1894,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
};
|
||||
}
|
||||
function parseLiteralOfTemplateSpan(
|
||||
headTokenFlags: TokenFlags
|
||||
headTokenFlags: TokenFlags,
|
||||
): StringTemplateMiddleNode | StringTemplateTailNode {
|
||||
const pos = tokenPos();
|
||||
const flags = tokenFlags();
|
||||
|
@ -1986,7 +1986,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
}
|
||||
|
||||
function parseDeclaration(
|
||||
pos: number
|
||||
pos: number,
|
||||
): DecoratorDeclarationStatementNode | FunctionDeclarationStatementNode | InvalidStatementNode {
|
||||
const modifiers = parseModifiers();
|
||||
switch (token()) {
|
||||
|
@ -2018,7 +2018,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
|
||||
function parseDecoratorDeclarationStatement(
|
||||
pos: number,
|
||||
modifiers: Modifier[]
|
||||
modifiers: Modifier[],
|
||||
): DecoratorDeclarationStatementNode {
|
||||
const modifierFlags = modifiersToFlags(modifiers);
|
||||
parseExpected(Token.DecKeyword);
|
||||
|
@ -2053,7 +2053,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
|
||||
function parseFunctionDeclarationStatement(
|
||||
pos: number,
|
||||
modifiers: Modifier[]
|
||||
modifiers: Modifier[],
|
||||
): FunctionDeclarationStatementNode {
|
||||
const modifierFlags = modifiersToFlags(modifiers);
|
||||
parseExpected(Token.FnKeyword);
|
||||
|
@ -2078,7 +2078,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
function parseFunctionParameters(): ListDetail<FunctionParameterNode> {
|
||||
const parameters = parseList<typeof ListKind.FunctionParameters, FunctionParameterNode>(
|
||||
ListKind.FunctionParameters,
|
||||
parseFunctionParameter
|
||||
parseFunctionParameter,
|
||||
);
|
||||
|
||||
let foundOptional = false;
|
||||
|
@ -2474,7 +2474,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
|
||||
function parseProjectionMemberExpressionRest(
|
||||
expr: ProjectionExpression,
|
||||
pos: number
|
||||
pos: number,
|
||||
): ProjectionExpression {
|
||||
while (token() !== Token.EndOfFile) {
|
||||
if (parseOptional(Token.Dot)) {
|
||||
|
@ -2544,7 +2544,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
pos: expr.pos,
|
||||
end: expr.end,
|
||||
flags: NodeFlags.None,
|
||||
})
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
error({ code: "token-expected", messageId: "identifier", target: expr });
|
||||
|
@ -2575,7 +2575,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
|
||||
function parseProjectionLambdaExpressionRest(
|
||||
pos: number,
|
||||
parameters: ProjectionLambdaParameterDeclarationNode[]
|
||||
parameters: ProjectionLambdaParameterDeclarationNode[],
|
||||
): ProjectionLambdaExpressionNode {
|
||||
parseExpected(Token.EqualsGreaterThan);
|
||||
const body = parseProjectionBlockExpression();
|
||||
|
@ -2591,7 +2591,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
const pos = tokenPos();
|
||||
const { items: properties } = parseList(
|
||||
ListKind.ModelProperties,
|
||||
parseProjectionModelPropertyOrSpread
|
||||
parseProjectionModelPropertyOrSpread,
|
||||
);
|
||||
return {
|
||||
kind: SyntaxKind.ProjectionModelExpression,
|
||||
|
@ -2602,7 +2602,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
|
||||
function parseProjectionModelPropertyOrSpread(
|
||||
pos: number,
|
||||
decorators: DecoratorExpressionNode[]
|
||||
decorators: DecoratorExpressionNode[],
|
||||
) {
|
||||
return token() === Token.Ellipsis
|
||||
? parseProjectionModelSpreadProperty(pos, decorators)
|
||||
|
@ -2611,7 +2611,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
|
||||
function parseProjectionModelSpreadProperty(
|
||||
pos: number,
|
||||
decorators: DecoratorExpressionNode[]
|
||||
decorators: DecoratorExpressionNode[],
|
||||
): ProjectionModelSpreadPropertyNode {
|
||||
parseExpected(Token.Ellipsis);
|
||||
|
||||
|
@ -2628,7 +2628,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
|
||||
function parseProjectionModelProperty(
|
||||
pos: number,
|
||||
decorators: DecoratorExpressionNode[]
|
||||
decorators: DecoratorExpressionNode[],
|
||||
): ProjectionModelPropertyNode | ProjectionModelSpreadPropertyNode {
|
||||
const id = parseIdentifier({ message: "property", allowStringLiteral: true });
|
||||
|
||||
|
@ -2713,7 +2713,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
Token.InterfaceKeyword,
|
||||
Token.UnionKeyword,
|
||||
Token.EnumKeyword,
|
||||
Token.ScalarKeyword
|
||||
Token.ScalarKeyword,
|
||||
);
|
||||
|
||||
switch (selectorTok) {
|
||||
|
@ -2961,7 +2961,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
pos: number,
|
||||
tagName: IdentifierNode,
|
||||
kind: ParamLikeTag["kind"],
|
||||
messageId: keyof CompilerDiagnostics["doc-invalid-identifier"]
|
||||
messageId: keyof CompilerDiagnostics["doc-invalid-identifier"],
|
||||
): ParamLikeTag {
|
||||
const { name, content } = parseDocParamLikeTagInternal(messageId);
|
||||
|
||||
|
@ -2987,7 +2987,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
}
|
||||
|
||||
function parseDocParamLikeTagInternal(
|
||||
messageId: keyof CompilerDiagnostics["doc-invalid-identifier"]
|
||||
messageId: keyof CompilerDiagnostics["doc-invalid-identifier"],
|
||||
): { name: IdentifierNode; content: DocTextNode[] } {
|
||||
const name = parseDocIdentifier(messageId);
|
||||
parseOptionalHyphenDocParamLikeTag();
|
||||
|
@ -3015,7 +3015,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
function parseDocSimpleTag(
|
||||
pos: number,
|
||||
tagName: IdentifierNode,
|
||||
kind: SimpleTag["kind"]
|
||||
kind: SimpleTag["kind"],
|
||||
): SimpleTag {
|
||||
const content = parseDocContent();
|
||||
return {
|
||||
|
@ -3027,7 +3027,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
}
|
||||
|
||||
function parseDocIdentifier(
|
||||
messageId: keyof CompilerDiagnostics["doc-invalid-identifier"]
|
||||
messageId: keyof CompilerDiagnostics["doc-invalid-identifier"],
|
||||
): IdentifierNode {
|
||||
// We don't allow whitespace between @ and tag name, but allow
|
||||
// whitespace before all other identifiers.
|
||||
|
@ -3184,7 +3184,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
*/
|
||||
function parseList<K extends ListKind, T extends Node>(
|
||||
kind: K,
|
||||
parseItem: ParseListItem<K, T>
|
||||
parseItem: ParseListItem<K, T>,
|
||||
): ListDetail<T> {
|
||||
const r: ListDetail<T> = createEmptyList<T>();
|
||||
if (kind.open !== Token.None) {
|
||||
|
@ -3308,7 +3308,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
*/
|
||||
function parseOptionalList<K extends SurroundedListKind, T extends Node>(
|
||||
kind: K,
|
||||
parseItem: ParseListItem<K, T>
|
||||
parseItem: ParseListItem<K, T>,
|
||||
): ListDetail<T> {
|
||||
return token() === kind.open ? parseList(kind, parseItem) : createEmptyList<T>();
|
||||
}
|
||||
|
@ -3344,7 +3344,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
|
||||
function parseInvalidStatement(
|
||||
pos: number,
|
||||
decorators: DecoratorExpressionNode[]
|
||||
decorators: DecoratorExpressionNode[],
|
||||
): InvalidStatementNode {
|
||||
// Error recovery: avoid an avalanche of errors when we get cornered into
|
||||
// parsing statements where none exist. Skip until we find a statement
|
||||
|
@ -3374,7 +3374,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
report: DiagnosticReportWithoutTarget<CompilerDiagnostics, C, M> & {
|
||||
target?: Partial<TextRange> & { realPos?: number };
|
||||
printable?: boolean;
|
||||
}
|
||||
},
|
||||
) {
|
||||
parseErrorInNextFinishedNode = true;
|
||||
|
||||
|
@ -3405,7 +3405,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
|
||||
assert(
|
||||
diagnostic.severity === "error",
|
||||
"This function is for reporting errors. Use warning() for warnings."
|
||||
"This function is for reporting errors. Use warning() for warnings.",
|
||||
);
|
||||
|
||||
parseDiagnostics.push(diagnostic);
|
||||
|
@ -3417,7 +3417,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
>(
|
||||
report: DiagnosticReportWithoutTarget<CompilerDiagnostics, C, M> & {
|
||||
target?: Partial<TextRange>;
|
||||
}
|
||||
},
|
||||
) {
|
||||
const location = {
|
||||
file: scanner.file,
|
||||
|
@ -3432,7 +3432,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
|
||||
assert(
|
||||
diagnostic.severity === "warning",
|
||||
"This function is for reporting warnings only. Use error() for errors."
|
||||
"This function is for reporting warnings only. Use error() for errors.",
|
||||
);
|
||||
|
||||
parseDiagnostics.push(diagnostic);
|
||||
|
@ -3538,7 +3538,7 @@ function createParser(code: string | SourceFile, options: ParseOptions = {}): Pa
|
|||
export type NodeCallback<T> = (c: Node) => T;
|
||||
|
||||
export function exprIsBareIdentifier(
|
||||
expr: Expression
|
||||
expr: Expression,
|
||||
): expr is TypeReferenceNode & { target: IdentifierNode; arguments: [] } {
|
||||
return (
|
||||
expr.kind === SyntaxKind.TypeReference &&
|
||||
|
@ -3848,7 +3848,7 @@ export function positionInRange(position: number, range: TextRange) {
|
|||
export function getNodeAtPositionDetail(
|
||||
script: TypeSpecScriptNode,
|
||||
position: number,
|
||||
filter: (node: Node, flag: "cur" | "pre" | "post") => boolean = () => true
|
||||
filter: (node: Node, flag: "cur" | "pre" | "post") => boolean = () => true,
|
||||
): PositionDetail {
|
||||
const cur = getNodeAtPosition(script, position, (n) => filter(n, "cur"));
|
||||
|
||||
|
@ -3908,17 +3908,17 @@ export function getNodeAtPositionDetail(
|
|||
export function getNodeAtPosition(
|
||||
script: TypeSpecScriptNode,
|
||||
position: number,
|
||||
filter?: (node: Node) => boolean
|
||||
filter?: (node: Node) => boolean,
|
||||
): Node | undefined;
|
||||
export function getNodeAtPosition<T extends Node>(
|
||||
script: TypeSpecScriptNode,
|
||||
position: number,
|
||||
filter: (node: Node) => node is T
|
||||
filter: (node: Node) => node is T,
|
||||
): T | undefined;
|
||||
export function getNodeAtPosition(
|
||||
script: TypeSpecScriptNode,
|
||||
position: number,
|
||||
filter = (node: Node) => true
|
||||
filter = (node: Node) => true,
|
||||
): Node | undefined {
|
||||
return visit(script);
|
||||
|
||||
|
@ -4003,7 +4003,7 @@ function isBlocklessNamespace(node: Node) {
|
|||
export function getFirstAncestor(
|
||||
node: Node,
|
||||
test: NodeCallback<boolean>,
|
||||
includeSelf: boolean = false
|
||||
includeSelf: boolean = false,
|
||||
): Node | undefined {
|
||||
if (includeSelf && test(node)) {
|
||||
return node;
|
||||
|
|
|
@ -108,7 +108,7 @@ function getEncodedRootLength(path: string): number {
|
|||
|
||||
const p1 = path.indexOf(
|
||||
ch0 === CharacterCodes.slash ? directorySeparator : altDirectorySeparator,
|
||||
2
|
||||
2,
|
||||
);
|
||||
if (p1 < 0) return path.length; // UNC: "//server" or "\\server"
|
||||
|
||||
|
@ -352,7 +352,7 @@ export function joinPaths(path: string, ...paths: (string | undefined)[]): strin
|
|||
*/
|
||||
export function resolvePath(path: string, ...paths: (string | undefined)[]): string {
|
||||
return normalizePath(
|
||||
paths.some((x) => x !== undefined) ? joinPaths(path, ...paths) : normalizeSlashes(path)
|
||||
paths.some((x) => x !== undefined) ? joinPaths(path, ...paths) : normalizeSlashes(path),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -404,7 +404,7 @@ function getPathWithoutRoot(pathComponents: readonly string[]) {
|
|||
|
||||
export function getNormalizedAbsolutePathWithoutRoot(
|
||||
fileName: string,
|
||||
currentDirectory: string | undefined
|
||||
currentDirectory: string | undefined,
|
||||
) {
|
||||
return getPathWithoutRoot(getNormalizedPathComponents(fileName, currentDirectory));
|
||||
}
|
||||
|
@ -565,7 +565,7 @@ function getPathComponentsRelativeTo(
|
|||
from: string,
|
||||
to: string,
|
||||
stringEqualityComparer: (a: string, b: string) => boolean,
|
||||
getCanonicalFileName: GetCanonicalFileName
|
||||
getCanonicalFileName: GetCanonicalFileName,
|
||||
) {
|
||||
const fromComponents = reducePathComponents(getPathComponents(from));
|
||||
const toComponents = reducePathComponents(getPathComponents(to));
|
||||
|
@ -600,12 +600,12 @@ export function getRelativePathFromDirectory(from: string, to: string, ignoreCas
|
|||
export function getRelativePathFromDirectory(
|
||||
fromDirectory: string,
|
||||
to: string,
|
||||
getCanonicalFileName: GetCanonicalFileName
|
||||
getCanonicalFileName: GetCanonicalFileName,
|
||||
): string;
|
||||
export function getRelativePathFromDirectory(
|
||||
fromDirectory: string,
|
||||
to: string,
|
||||
getCanonicalFileNameOrIgnoreCase: GetCanonicalFileName | boolean
|
||||
getCanonicalFileNameOrIgnoreCase: GetCanonicalFileName | boolean,
|
||||
) {
|
||||
if (getRootLength(fromDirectory) > 0 !== getRootLength(to) > 0) {
|
||||
throw new Error("Paths must either both be absolute or both be relative");
|
||||
|
@ -622,7 +622,7 @@ export function getRelativePathFromDirectory(
|
|||
fromDirectory,
|
||||
to,
|
||||
ignoreCase ? equateStringsCaseInsensitive : equateStringsCaseSensitive,
|
||||
getCanonicalFileName
|
||||
getCanonicalFileName,
|
||||
);
|
||||
return getPathFromPathComponents(pathComponents);
|
||||
}
|
||||
|
|
|
@ -64,7 +64,7 @@ export interface ProjectedProgram extends Program {
|
|||
export function projectProgram(
|
||||
program: Program,
|
||||
projections: ProjectionApplication[],
|
||||
startNode?: Type
|
||||
startNode?: Type,
|
||||
): ProjectedProgram {
|
||||
return createProjector(program, projections, startNode);
|
||||
}
|
||||
|
@ -85,7 +85,7 @@ export interface Program {
|
|||
loadTypeSpecScript(typespecScript: SourceFile): Promise<TypeSpecScriptNode>;
|
||||
onValidate(
|
||||
cb: (program: Program) => void | Promise<void>,
|
||||
LibraryMetadata: LibraryMetadata
|
||||
LibraryMetadata: LibraryMetadata,
|
||||
): void;
|
||||
getOption(key: string): string | undefined;
|
||||
stateSet(key: symbol): Set<Type>;
|
||||
|
@ -131,7 +131,7 @@ export async function compile(
|
|||
host: CompilerHost,
|
||||
mainFile: string,
|
||||
options: CompilerOptions = {},
|
||||
oldProgram?: Program // NOTE: deliberately separate from options to avoid memory leak by chaining all old programs together.
|
||||
oldProgram?: Program, // NOTE: deliberately separate from options to avoid memory leak by chaining all old programs together.
|
||||
): Promise<Program> {
|
||||
const validateCbs: Validator[] = [];
|
||||
const stateMaps = new Map<symbol, StateMap>();
|
||||
|
@ -271,7 +271,7 @@ export async function compile(
|
|||
const found = libraries.get(packageJson.name);
|
||||
if (found && found.path !== root && found.manifest.version !== packageJson.version) {
|
||||
let incompatibleIndex: TypeSpecLibraryReference[] | undefined = incompatibleLibraries.get(
|
||||
packageJson.name
|
||||
packageJson.name,
|
||||
);
|
||||
if (incompatibleIndex === undefined) {
|
||||
incompatibleIndex = [found];
|
||||
|
@ -293,7 +293,7 @@ export async function compile(
|
|||
.join("\n"),
|
||||
},
|
||||
target: NoTarget,
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -342,7 +342,7 @@ export async function compile(
|
|||
const locationContext: LocationContext = { type: "compiler" };
|
||||
return loader.importFile(
|
||||
resolvePath(host.getExecutionRoot(), "lib/intrinsics.tsp"),
|
||||
locationContext
|
||||
locationContext,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -386,7 +386,7 @@ export async function compile(
|
|||
async function loadEmitters(
|
||||
basedir: string,
|
||||
emitterNameOrPaths: string[],
|
||||
emitterOptions: Record<string, EmitterOptions>
|
||||
emitterOptions: Record<string, EmitterOptions>,
|
||||
) {
|
||||
for (const emitterNameOrPath of emitterNameOrPaths) {
|
||||
const emitter = await loadEmitter(basedir, emitterNameOrPath, emitterOptions);
|
||||
|
@ -398,7 +398,7 @@ export async function compile(
|
|||
|
||||
async function resolveEmitterModuleAndEntrypoint(
|
||||
basedir: string,
|
||||
emitterNameOrPath: string
|
||||
emitterNameOrPath: string,
|
||||
): Promise<
|
||||
[
|
||||
{ module: ModuleResolutionResult; entrypoint: JsSourceFileNode | undefined } | undefined,
|
||||
|
@ -410,7 +410,7 @@ export async function compile(
|
|||
const [module, diagnostics] = await resolveJSLibrary(
|
||||
emitterNameOrPath,
|
||||
basedir,
|
||||
locationContext
|
||||
locationContext,
|
||||
);
|
||||
if (!module) {
|
||||
return [undefined, diagnostics];
|
||||
|
@ -424,11 +424,11 @@ export async function compile(
|
|||
|
||||
async function loadLibrary(
|
||||
basedir: string,
|
||||
libraryNameOrPath: string
|
||||
libraryNameOrPath: string,
|
||||
): Promise<LibraryInstance | undefined> {
|
||||
const [resolution, diagnostics] = await resolveEmitterModuleAndEntrypoint(
|
||||
basedir,
|
||||
libraryNameOrPath
|
||||
libraryNameOrPath,
|
||||
);
|
||||
|
||||
if (resolution === undefined) {
|
||||
|
@ -452,7 +452,7 @@ export async function compile(
|
|||
async function loadEmitter(
|
||||
basedir: string,
|
||||
emitterNameOrPath: string,
|
||||
emittersOptions: Record<string, EmitterOptions>
|
||||
emittersOptions: Record<string, EmitterOptions>,
|
||||
): Promise<EmitterRef | undefined> {
|
||||
const library = await loadLibrary(basedir, emitterNameOrPath);
|
||||
|
||||
|
@ -467,7 +467,7 @@ export async function compile(
|
|||
code: "invalid-emitter",
|
||||
format: { emitterPackage: emitterNameOrPath },
|
||||
target: NoTarget,
|
||||
})
|
||||
}),
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
|
@ -495,7 +495,7 @@ export async function compile(
|
|||
path: ["options", emitterNameOrPath],
|
||||
script: options.configFile.file,
|
||||
}
|
||||
: NoTarget
|
||||
: NoTarget,
|
||||
);
|
||||
if (diagnostics && diagnostics.length > 0) {
|
||||
program.reportDiagnostics(diagnostics);
|
||||
|
@ -515,7 +515,7 @@ export async function compile(
|
|||
code: "invalid-emitter",
|
||||
format: { emitterPackage: emitterNameOrPath },
|
||||
target: NoTarget,
|
||||
})
|
||||
}),
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
|
@ -523,7 +523,7 @@ export async function compile(
|
|||
|
||||
function computeLibraryMetadata(
|
||||
module: ModuleResolutionResult,
|
||||
libDefinition: TypeSpecLibrary<any> | undefined
|
||||
libDefinition: TypeSpecLibrary<any> | undefined,
|
||||
): LibraryMetadata {
|
||||
if (module.type === "file") {
|
||||
return {
|
||||
|
@ -590,7 +590,7 @@ export async function compile(
|
|||
code: "on-validate-fail",
|
||||
format: { error: error.stack },
|
||||
target: NoTarget,
|
||||
})
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
throw new ExternalError({ kind: "validator", metadata: validator.metadata, error });
|
||||
|
@ -611,7 +611,7 @@ export async function compile(
|
|||
code: "missing-import",
|
||||
format: { requiredImport, emitterName },
|
||||
target: NoTarget,
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -624,7 +624,7 @@ export async function compile(
|
|||
async function resolveJSLibrary(
|
||||
specifier: string,
|
||||
baseDir: string,
|
||||
locationContext: LocationContext
|
||||
locationContext: LocationContext,
|
||||
): Promise<[ModuleResolutionResult | undefined, readonly Diagnostic[]]> {
|
||||
try {
|
||||
return [await resolveModule(getResolveModuleHost(), specifier, { baseDir }), []];
|
||||
|
@ -688,11 +688,11 @@ export async function compile(
|
|||
},
|
||||
},
|
||||
"@typespec/compiler",
|
||||
{ baseDir }
|
||||
{ baseDir },
|
||||
);
|
||||
compilerAssert(
|
||||
resolved.type === "module",
|
||||
`Expected to have resolved "@typespec/compiler" to a node module.`
|
||||
`Expected to have resolved "@typespec/compiler" to a node module.`,
|
||||
);
|
||||
actual = resolved;
|
||||
} catch (err: any) {
|
||||
|
@ -711,7 +711,7 @@ export async function compile(
|
|||
code: "compiler-version-mismatch",
|
||||
format: { basedir: baseDir, betterTypeSpecServerPath, actual: actual.path, expected },
|
||||
target: NoTarget,
|
||||
})
|
||||
}),
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
@ -814,7 +814,7 @@ export async function compile(
|
|||
*/
|
||||
function findDirectiveSuppressingCode(
|
||||
code: string,
|
||||
directives: readonly DirectiveExpressionNode[]
|
||||
directives: readonly DirectiveExpressionNode[],
|
||||
): Directive | undefined {
|
||||
for (const directive of directives.map((x) => parseDirective(x))) {
|
||||
if (directive.name === "suppress") {
|
||||
|
@ -871,7 +871,7 @@ export async function compile(
|
|||
code: "duplicate-symbol",
|
||||
format: { name },
|
||||
target: symbol,
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import type { Program, ProjectedProgram } from "./index.js";
|
||||
|
||||
export function isProjectedProgram(
|
||||
program: Program | ProjectedProgram
|
||||
program: Program | ProjectedProgram,
|
||||
): program is ProjectedProgram {
|
||||
return "projector" in program;
|
||||
}
|
||||
|
|
|
@ -96,7 +96,7 @@ export function createProjectionMembers(checker: Checker): {
|
|||
node: undefined!,
|
||||
default: defaultT,
|
||||
type,
|
||||
})
|
||||
}),
|
||||
);
|
||||
|
||||
return voidType;
|
||||
|
@ -285,7 +285,7 @@ export function createProjectionMembers(checker: Checker): {
|
|||
parameters,
|
||||
returnType,
|
||||
decorators: [],
|
||||
})
|
||||
}),
|
||||
);
|
||||
return voidType;
|
||||
});
|
||||
|
@ -348,7 +348,7 @@ export function createProjectionMembers(checker: Checker): {
|
|||
decorators: [],
|
||||
node: undefined!,
|
||||
value: type ? type.value : undefined,
|
||||
})
|
||||
}),
|
||||
);
|
||||
|
||||
return voidType;
|
||||
|
|
|
@ -52,7 +52,7 @@ import type {
|
|||
export function createProjector(
|
||||
program: Program,
|
||||
projections: ProjectionApplication[],
|
||||
startNode?: Type
|
||||
startNode?: Type,
|
||||
): ProjectedProgram {
|
||||
const projectedTypes = new Map<Type, Type>();
|
||||
const checker = program.checker;
|
||||
|
@ -103,10 +103,10 @@ export function createProjector(
|
|||
function projectType(type: IndeterminateEntity): IndeterminateEntity;
|
||||
function projectType(type: Type | Value): Type | Value;
|
||||
function projectType(
|
||||
type: Type | Value | IndeterminateEntity
|
||||
type: Type | Value | IndeterminateEntity,
|
||||
): Type | Value | IndeterminateEntity;
|
||||
function projectType(
|
||||
type: Type | Value | IndeterminateEntity
|
||||
type: Type | Value | IndeterminateEntity,
|
||||
): Type | Value | IndeterminateEntity {
|
||||
if (isValue(type)) {
|
||||
return type;
|
||||
|
@ -132,7 +132,7 @@ export function createProjector(
|
|||
case "Namespace":
|
||||
compilerAssert(
|
||||
projectingNamespaces,
|
||||
`Namespace ${type.name} should have already been projected.`
|
||||
`Namespace ${type.name} should have already been projected.`,
|
||||
);
|
||||
projected = projectNamespace(type, false);
|
||||
break;
|
||||
|
@ -631,7 +631,7 @@ export function createProjector(
|
|||
const projected = checker.project(
|
||||
projectedType,
|
||||
targetNode,
|
||||
projectionApplication.arguments
|
||||
projectionApplication.arguments,
|
||||
);
|
||||
if (projected !== projectedType) {
|
||||
// override the projected type cache with the returned type
|
||||
|
|
|
@ -347,7 +347,7 @@ export interface Scanner {
|
|||
indentationStart: number,
|
||||
indentationEnd: number,
|
||||
token: Token.StringLiteral | StringTemplateToken,
|
||||
tokenFlags: TokenFlags
|
||||
tokenFlags: TokenFlags,
|
||||
): string;
|
||||
|
||||
/** Reset the scanner to the given start and end positions, invoke the callback, and then restore scanner state. */
|
||||
|
@ -405,7 +405,7 @@ export function isStatementKeyword(token: Token) {
|
|||
|
||||
export function createScanner(
|
||||
source: string | SourceFile,
|
||||
diagnosticHandler: DiagnosticHandler
|
||||
diagnosticHandler: DiagnosticHandler,
|
||||
): Scanner {
|
||||
const file = typeof source === "string" ? createSourceFile(source, "<anonymous file>") : source;
|
||||
const input = file.text;
|
||||
|
@ -791,7 +791,7 @@ export function createScanner(
|
|||
>(
|
||||
report: Omit<DiagnosticReport<CompilerDiagnostics, C, M>, "target">,
|
||||
pos?: number,
|
||||
end?: number
|
||||
end?: number,
|
||||
) {
|
||||
const diagnostic = createDiagnostic({
|
||||
...report,
|
||||
|
@ -916,7 +916,7 @@ export function createScanner(
|
|||
}
|
||||
|
||||
function scanStringTemplateSpan(
|
||||
tokenFlags: TokenFlags
|
||||
tokenFlags: TokenFlags,
|
||||
): Token.StringTemplateMiddle | Token.StringTemplateTail {
|
||||
position++; // consume '{'
|
||||
|
||||
|
@ -926,7 +926,7 @@ export function createScanner(
|
|||
function scanStringLiteralLike<M extends Token, T extends Token>(
|
||||
requestedTokenFlags: TokenFlags,
|
||||
template: M,
|
||||
tail: T
|
||||
tail: T,
|
||||
): M | T {
|
||||
const multiLine = requestedTokenFlags & TokenFlags.TripleQuoted;
|
||||
tokenFlags = requestedTokenFlags;
|
||||
|
@ -976,7 +976,7 @@ export function createScanner(
|
|||
|
||||
function getStringLiteralOffsetStart(
|
||||
token: Token.StringLiteral | StringTemplateToken,
|
||||
tokenFlags: TokenFlags
|
||||
tokenFlags: TokenFlags,
|
||||
) {
|
||||
switch (token) {
|
||||
case Token.StringLiteral:
|
||||
|
@ -989,7 +989,7 @@ export function createScanner(
|
|||
|
||||
function getStringLiteralOffsetEnd(
|
||||
token: Token.StringLiteral | StringTemplateToken,
|
||||
tokenFlags: TokenFlags
|
||||
tokenFlags: TokenFlags,
|
||||
) {
|
||||
switch (token) {
|
||||
case Token.StringLiteral:
|
||||
|
@ -1002,7 +1002,7 @@ export function createScanner(
|
|||
|
||||
function getStringTokenValue(
|
||||
token: Token.StringLiteral | StringTemplateToken,
|
||||
tokenFlags: TokenFlags
|
||||
tokenFlags: TokenFlags,
|
||||
): string {
|
||||
if (tokenFlags & TokenFlags.TripleQuoted) {
|
||||
const start = tokenPosition;
|
||||
|
@ -1014,7 +1014,7 @@ export function createScanner(
|
|||
indentationStart,
|
||||
indentationEnd,
|
||||
token,
|
||||
tokenFlags
|
||||
tokenFlags,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1114,7 +1114,7 @@ export function createScanner(
|
|||
indentationStart: number,
|
||||
indentationEnd: number,
|
||||
token: Token.StringLiteral | StringTemplateToken,
|
||||
tokenFlags: TokenFlags
|
||||
tokenFlags: TokenFlags,
|
||||
): string {
|
||||
const startOffset = getStringLiteralOffsetStart(token, tokenFlags);
|
||||
const endOffset = getStringLiteralOffsetEnd(token, tokenFlags);
|
||||
|
@ -1217,7 +1217,7 @@ export function createScanner(
|
|||
pos: number,
|
||||
end: number,
|
||||
indentationStart: number,
|
||||
indentationEnd: number
|
||||
indentationEnd: number,
|
||||
): number {
|
||||
let indentationPos = indentationStart;
|
||||
end = Math.min(end, pos + (indentationEnd - indentationStart));
|
||||
|
@ -1433,7 +1433,7 @@ export function createScanner(
|
|||
export function skipTriviaBackward(
|
||||
script: TypeSpecScriptNode,
|
||||
position: number,
|
||||
endPosition = -1
|
||||
endPosition = -1,
|
||||
): number {
|
||||
endPosition = endPosition < -1 ? -1 : endPosition;
|
||||
const input = script.file.text;
|
||||
|
@ -1499,7 +1499,7 @@ export function skipTrivia(input: string, position: number, endPosition = input.
|
|||
export function skipWhiteSpace(
|
||||
input: string,
|
||||
position: number,
|
||||
endPosition = input.length
|
||||
endPosition = input.length,
|
||||
): number {
|
||||
while (position < endPosition) {
|
||||
const ch = input.charCodeAt(position);
|
||||
|
@ -1516,7 +1516,7 @@ export function skipWhiteSpace(
|
|||
function skipSingleLineComment(
|
||||
input: string,
|
||||
position: number,
|
||||
endPosition = input.length
|
||||
endPosition = input.length,
|
||||
): number {
|
||||
position += 2; // consume '//'
|
||||
|
||||
|
@ -1532,7 +1532,7 @@ function skipSingleLineComment(
|
|||
function skipMultiLineComment(
|
||||
input: string,
|
||||
position: number,
|
||||
endPosition = input.length
|
||||
endPosition = input.length,
|
||||
): [position: number, terminated: boolean] {
|
||||
position += 2; // consume '/*'
|
||||
|
||||
|
@ -1588,11 +1588,11 @@ function getTokenDisplayTable(entries: [Token, string][]): readonly string[] {
|
|||
for (const [token, display] of entries) {
|
||||
compilerAssert(
|
||||
token >= 0 && token < Token.__Count,
|
||||
`Invalid entry in token display table, ${token}, ${Token[token]}, ${display}`
|
||||
`Invalid entry in token display table, ${token}, ${Token[token]}, ${display}`,
|
||||
);
|
||||
compilerAssert(
|
||||
!table[token],
|
||||
`Duplicate entry in token display table for: ${token}, ${Token[token]}, ${display}`
|
||||
`Duplicate entry in token display table for: ${token}, ${Token[token]}, ${display}`,
|
||||
);
|
||||
table[token] = display;
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@ function absolutePathStatus(path: string): "valid" | "not-absolute" | "windows-s
|
|||
|
||||
export function createJSONSchemaValidator<T>(
|
||||
schema: JSONSchemaType<T>,
|
||||
options: JSONSchemaValidatorOptions = { strict: true }
|
||||
options: JSONSchemaValidatorOptions = { strict: true },
|
||||
): JSONSchemaValidator {
|
||||
const ajv = new Ajv({
|
||||
strict: options.strict,
|
||||
|
@ -46,13 +46,13 @@ export function createJSONSchemaValidator<T>(
|
|||
|
||||
function validate(
|
||||
config: unknown,
|
||||
target: YamlScript | YamlPathTarget | SourceFile | typeof NoTarget
|
||||
target: YamlScript | YamlPathTarget | SourceFile | typeof NoTarget,
|
||||
): Diagnostic[] {
|
||||
const validate = ajv.compile(schema);
|
||||
const valid = validate(config);
|
||||
compilerAssert(
|
||||
!valid || !validate.errors,
|
||||
"There should be errors reported if the schema is not valid."
|
||||
"There should be errors reported if the schema is not valid.",
|
||||
);
|
||||
|
||||
const diagnostics = [];
|
||||
|
@ -70,7 +70,7 @@ const IGNORED_AJV_PARAMS = new Set(["type", "errors"]);
|
|||
function ajvErrorToDiagnostic(
|
||||
obj: unknown,
|
||||
error: ErrorObject<string, Record<string, any>, unknown>,
|
||||
target: YamlScript | YamlPathTarget | SourceFile | typeof NoTarget
|
||||
target: YamlScript | YamlPathTarget | SourceFile | typeof NoTarget,
|
||||
): Diagnostic {
|
||||
const tspTarget = resolveTarget(error, target);
|
||||
if (error.params.format === "absolute-path") {
|
||||
|
@ -84,7 +84,7 @@ function ajvErrorToDiagnostic(
|
|||
|
||||
const messageLines = [`Schema violation: ${error.message} (${error.instancePath || "/"})`];
|
||||
for (const [name, value] of Object.entries(error.params).filter(
|
||||
([name]) => !IGNORED_AJV_PARAMS.has(name)
|
||||
([name]) => !IGNORED_AJV_PARAMS.has(name),
|
||||
)) {
|
||||
const formattedValue = Array.isArray(value) ? [...new Set(value)].join(", ") : value;
|
||||
messageLines.push(` ${name}: ${formattedValue}`);
|
||||
|
@ -101,7 +101,7 @@ function ajvErrorToDiagnostic(
|
|||
|
||||
function resolveTarget(
|
||||
error: ErrorObject<string, Record<string, any>, unknown>,
|
||||
target: YamlScript | YamlPathTarget | SourceFile | typeof NoTarget
|
||||
target: YamlScript | YamlPathTarget | SourceFile | typeof NoTarget,
|
||||
): DiagnosticTarget | typeof NoTarget {
|
||||
if (target === NoTarget) {
|
||||
return NoTarget;
|
||||
|
@ -116,7 +116,7 @@ function resolveTarget(
|
|||
return getLocationInYamlScript(
|
||||
target.script,
|
||||
[...target.path, ...getErrorPath(error)],
|
||||
"key"
|
||||
"key",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -132,7 +132,7 @@ function getErrorPath(error: ErrorObject<string, Record<string, any>, unknown>):
|
|||
}
|
||||
function getErrorValue(
|
||||
obj: any,
|
||||
error: ErrorObject<string, Record<string, any>, unknown>
|
||||
error: ErrorObject<string, Record<string, any>, unknown>,
|
||||
): unknown {
|
||||
const path = getErrorPath(error);
|
||||
let current = obj;
|
||||
|
|
|
@ -50,7 +50,7 @@ const defaultOptions = {
|
|||
export function navigateProgram(
|
||||
program: Program,
|
||||
listeners: SemanticNodeListener,
|
||||
options: NavigationOptions = {}
|
||||
options: NavigationOptions = {},
|
||||
) {
|
||||
const context = createNavigationContext(listeners, options);
|
||||
context.emit("root", program);
|
||||
|
@ -67,7 +67,7 @@ export function navigateProgram(
|
|||
export function navigateType(
|
||||
type: Type,
|
||||
listeners: SemanticNodeListener,
|
||||
options: NavigationOptions
|
||||
options: NavigationOptions,
|
||||
) {
|
||||
const context = createNavigationContext(listeners, options);
|
||||
navigateTypeInternal(type, context);
|
||||
|
@ -83,7 +83,7 @@ export function navigateType(
|
|||
export function scopeNavigationToNamespace<T extends TypeListeners>(
|
||||
namespace: Namespace,
|
||||
listeners: T,
|
||||
options: NamespaceNavigationOptions = {}
|
||||
options: NamespaceNavigationOptions = {},
|
||||
): T {
|
||||
const wrappedListeners: TypeListeners = {};
|
||||
for (const [name, callback] of Object.entries(listeners)) {
|
||||
|
@ -105,7 +105,7 @@ export function scopeNavigationToNamespace<T extends TypeListeners>(
|
|||
export function navigateTypesInNamespace(
|
||||
namespace: Namespace,
|
||||
listeners: TypeListeners,
|
||||
options: NamespaceNavigationOptions & NavigationOptions = {}
|
||||
options: NamespaceNavigationOptions & NavigationOptions = {},
|
||||
) {
|
||||
navigateType(namespace, scopeNavigationToNamespace(namespace, listeners, options), options);
|
||||
}
|
||||
|
@ -116,7 +116,7 @@ export function navigateTypesInNamespace(
|
|||
* @returns Semantic node listener.
|
||||
*/
|
||||
export function mapEventEmitterToNodeListener(
|
||||
eventEmitter: EventEmitter<SemanticNodeListener>
|
||||
eventEmitter: EventEmitter<SemanticNodeListener>,
|
||||
): SemanticNodeListener {
|
||||
const listener: SemanticNodeListener = {};
|
||||
for (const eventName of eventNames) {
|
||||
|
@ -140,7 +140,7 @@ function isSubNamespace(subNamespace: Namespace, namespace: Namespace): boolean
|
|||
}
|
||||
function createNavigationContext(
|
||||
listeners: SemanticNodeListener,
|
||||
options: NavigationOptions = {}
|
||||
options: NavigationOptions = {},
|
||||
): NavigationContext {
|
||||
return {
|
||||
visited: new Set(),
|
||||
|
@ -206,7 +206,7 @@ function checkVisited(visited: Set<any>, item: Type) {
|
|||
|
||||
function shouldNavigateTemplatableType(
|
||||
context: NavigationContext,
|
||||
type: Operation | Interface | Model | Union
|
||||
type: Operation | Interface | Model | Union,
|
||||
) {
|
||||
if (context.options.includeTemplateDeclaration) {
|
||||
return type.isFinished || isTemplateDeclaration(type);
|
||||
|
|
|
@ -56,13 +56,13 @@ export interface SourceLoader {
|
|||
importFile(
|
||||
path: string,
|
||||
locationContext?: LocationContext,
|
||||
kind?: "import" | "entrypoint"
|
||||
kind?: "import" | "entrypoint",
|
||||
): Promise<void>;
|
||||
importPath(
|
||||
path: string,
|
||||
target: DiagnosticTarget | typeof NoTarget,
|
||||
relativeTo: string,
|
||||
locationContext?: LocationContext
|
||||
locationContext?: LocationContext,
|
||||
): Promise<void>;
|
||||
readonly resolution: SourceResolution;
|
||||
}
|
||||
|
@ -74,7 +74,7 @@ export interface SourceLoader {
|
|||
*/
|
||||
export async function createSourceLoader(
|
||||
host: CompilerHost,
|
||||
options?: LoadSourceOptions
|
||||
options?: LoadSourceOptions,
|
||||
): Promise<SourceLoader> {
|
||||
const diagnostics = createDiagnosticCollector();
|
||||
const tracer = options?.tracer;
|
||||
|
@ -87,7 +87,7 @@ export async function createSourceLoader(
|
|||
async function importFile(
|
||||
path: string,
|
||||
locationContext: LocationContext,
|
||||
kind: "import" | "entrypoint" = "import"
|
||||
kind: "import" | "entrypoint" = "import",
|
||||
) {
|
||||
const sourceFileKind = host.getSourceFileKind(path);
|
||||
|
||||
|
@ -103,7 +103,7 @@ export async function createSourceLoader(
|
|||
createDiagnostic({
|
||||
code: kind === "import" ? "invalid-import" : "invalid-main",
|
||||
target: NoTarget,
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -123,7 +123,7 @@ export async function createSourceLoader(
|
|||
async function loadTypeSpecFile(
|
||||
path: string,
|
||||
locationContext: LocationContext,
|
||||
diagnosticTarget: DiagnosticTarget | typeof NoTarget
|
||||
diagnosticTarget: DiagnosticTarget | typeof NoTarget,
|
||||
) {
|
||||
if (seenSourceFiles.has(path)) {
|
||||
return;
|
||||
|
@ -175,7 +175,7 @@ export async function createSourceLoader(
|
|||
await loadImports(
|
||||
file.statements.filter(isImportStatement).map((x) => ({ path: x.path.value, target: x })),
|
||||
basedir,
|
||||
getSourceFileLocationContext(file.file)
|
||||
getSourceFileLocationContext(file.file),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -188,7 +188,7 @@ export async function createSourceLoader(
|
|||
async function loadImports(
|
||||
imports: Array<{ path: string; target: DiagnosticTarget | typeof NoTarget }>,
|
||||
relativeTo: string,
|
||||
locationContext: LocationContext
|
||||
locationContext: LocationContext,
|
||||
) {
|
||||
// collect imports
|
||||
for (const { path, target } of imports) {
|
||||
|
@ -200,7 +200,7 @@ export async function createSourceLoader(
|
|||
path: string,
|
||||
target: DiagnosticTarget | typeof NoTarget,
|
||||
relativeTo: string,
|
||||
locationContext: LocationContext = { type: "project" }
|
||||
locationContext: LocationContext = { type: "project" },
|
||||
) {
|
||||
const library = await resolveTypeSpecLibrary(path, relativeTo, target);
|
||||
if (library === undefined) {
|
||||
|
@ -213,7 +213,7 @@ export async function createSourceLoader(
|
|||
});
|
||||
tracer?.trace(
|
||||
"import-resolution.library",
|
||||
`Loading library "${path}" from "${library.mainFile}"`
|
||||
`Loading library "${path}" from "${library.mainFile}"`,
|
||||
);
|
||||
|
||||
const metadata = computeModuleMetadata(library);
|
||||
|
@ -240,7 +240,7 @@ export async function createSourceLoader(
|
|||
async function resolveTypeSpecLibrary(
|
||||
specifier: string,
|
||||
baseDir: string,
|
||||
target: DiagnosticTarget | typeof NoTarget
|
||||
target: DiagnosticTarget | typeof NoTarget,
|
||||
): Promise<ModuleResolutionResult | undefined> {
|
||||
try {
|
||||
return await resolveModule(getResolveModuleHost(), specifier, {
|
||||
|
@ -255,7 +255,7 @@ export async function createSourceLoader(
|
|||
} catch (e: any) {
|
||||
if (e.code === "MODULE_NOT_FOUND") {
|
||||
diagnostics.add(
|
||||
createDiagnostic({ code: "import-not-found", format: { path: specifier }, target })
|
||||
createDiagnostic({ code: "import-not-found", format: { path: specifier }, target }),
|
||||
);
|
||||
return undefined;
|
||||
} else if (e.code === "INVALID_MAIN") {
|
||||
|
@ -265,7 +265,7 @@ export async function createSourceLoader(
|
|||
format: { path: specifier },
|
||||
messageId: "tspMain",
|
||||
target,
|
||||
})
|
||||
}),
|
||||
);
|
||||
return undefined;
|
||||
} else {
|
||||
|
@ -277,7 +277,7 @@ export async function createSourceLoader(
|
|||
async function loadDirectory(
|
||||
dir: string,
|
||||
locationContext: LocationContext,
|
||||
diagnosticTarget: DiagnosticTarget | typeof NoTarget
|
||||
diagnosticTarget: DiagnosticTarget | typeof NoTarget,
|
||||
): Promise<string> {
|
||||
const mainFile = await resolveTypeSpecEntrypointForDir(host, dir, (x) => diagnostics.add(x));
|
||||
await loadTypeSpecFile(mainFile, locationContext, diagnosticTarget);
|
||||
|
@ -290,7 +290,7 @@ export async function createSourceLoader(
|
|||
async function importJsFile(
|
||||
path: string,
|
||||
locationContext: LocationContext,
|
||||
diagnosticTarget: DiagnosticTarget | typeof NoTarget
|
||||
diagnosticTarget: DiagnosticTarget | typeof NoTarget,
|
||||
) {
|
||||
const sourceFile = jsSourceFiles.get(path);
|
||||
if (sourceFile !== undefined) {
|
||||
|
@ -339,7 +339,7 @@ function computeModuleMetadata(module: ResolvedModule): ModuleLibraryMetadata {
|
|||
export async function loadJsFile(
|
||||
host: CompilerHost,
|
||||
path: string,
|
||||
diagnosticTarget: DiagnosticTarget | typeof NoTarget
|
||||
diagnosticTarget: DiagnosticTarget | typeof NoTarget,
|
||||
): Promise<[JsSourceFileNode | undefined, readonly Diagnostic[]]> {
|
||||
const file = createSourceFile("", path);
|
||||
const diagnostics: Diagnostic[] = [];
|
||||
|
|
|
@ -6,7 +6,7 @@ export class StateSet extends Map<undefined | Projector, Set<Type>> {}
|
|||
class StateMapView<V> implements Map<Type, V> {
|
||||
public constructor(
|
||||
private state: StateMap,
|
||||
private projector?: Projector
|
||||
private projector?: Projector,
|
||||
) {}
|
||||
|
||||
has(t: Type) {
|
||||
|
@ -70,7 +70,7 @@ class StateMapView<V> implements Map<Type, V> {
|
|||
class StateSetView implements Set<Type> {
|
||||
public constructor(
|
||||
private state: StateSet,
|
||||
private projector?: Projector
|
||||
private projector?: Projector,
|
||||
) {}
|
||||
|
||||
has(t: Type) {
|
||||
|
@ -130,7 +130,7 @@ class StateSetView implements Set<Type> {
|
|||
export function createStateAccessors(
|
||||
stateMaps: Map<symbol, StateMap>,
|
||||
stateSets: Map<symbol, StateSet>,
|
||||
projector?: Projector
|
||||
projector?: Projector,
|
||||
) {
|
||||
function stateMap<T>(key: symbol): StateMapView<T> {
|
||||
let m = stateMaps.get(key);
|
||||
|
|
|
@ -47,13 +47,13 @@ export interface TypeRelation {
|
|||
isTypeAssignableTo(
|
||||
source: Entity | IndeterminateEntity,
|
||||
target: Entity,
|
||||
diagnosticTarget: Entity | Node
|
||||
diagnosticTarget: Entity | Node,
|
||||
): [boolean, readonly Diagnostic[]];
|
||||
|
||||
isValueOfType(
|
||||
source: Value,
|
||||
target: Type,
|
||||
diagnosticTarget: Entity | Node
|
||||
diagnosticTarget: Entity | Node,
|
||||
): [boolean, readonly Diagnostic[]];
|
||||
|
||||
isReflectionType(type: Type): type is Model & { name: ReflectionTypeName };
|
||||
|
@ -121,13 +121,13 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
function isTypeAssignableTo(
|
||||
source: Entity | IndeterminateEntity,
|
||||
target: Entity,
|
||||
diagnosticTarget: Entity | Node
|
||||
diagnosticTarget: Entity | Node,
|
||||
): [boolean, readonly Diagnostic[]] {
|
||||
const [related, errors] = isTypeAssignableToInternal(
|
||||
source,
|
||||
target,
|
||||
diagnosticTarget,
|
||||
new MultiKeyMap<[Entity, Entity], Related>()
|
||||
new MultiKeyMap<[Entity, Entity], Related>(),
|
||||
);
|
||||
return [related === Related.true, convertErrorsToDiagnostics(errors, diagnosticTarget)];
|
||||
}
|
||||
|
@ -149,7 +149,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
|
||||
function convertErrorsToDiagnostics(
|
||||
errors: readonly TypeRelationError[],
|
||||
diagnosticBase: Entity | Node
|
||||
diagnosticBase: Entity | Node,
|
||||
): readonly Diagnostic[] {
|
||||
return errors.flatMap((x) => convertErrorToDiagnostic(x, diagnosticBase));
|
||||
}
|
||||
|
@ -171,7 +171,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
|
||||
function flattenErrors(
|
||||
error: TypeRelationError,
|
||||
diagnosticBase: Entity | Node
|
||||
diagnosticBase: Entity | Node,
|
||||
): TypeRelationError[] {
|
||||
if (!isTargetChildOf(error.target, diagnosticBase)) {
|
||||
return [{ ...error, target: diagnosticBase }];
|
||||
|
@ -183,7 +183,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
}
|
||||
function convertErrorToDiagnostic(
|
||||
error: TypeRelationError,
|
||||
diagnosticBase: Entity | Node
|
||||
diagnosticBase: Entity | Node,
|
||||
): Diagnostic[] {
|
||||
const flattened = flattenErrors(error, diagnosticBase);
|
||||
return flattened.map((error) => {
|
||||
|
@ -209,13 +209,13 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
function isValueOfType(
|
||||
source: Value,
|
||||
target: Type,
|
||||
diagnosticTarget: Entity | Node
|
||||
diagnosticTarget: Entity | Node,
|
||||
): [boolean, readonly Diagnostic[]] {
|
||||
const [related, errors] = isValueOfTypeInternal(
|
||||
source,
|
||||
target,
|
||||
diagnosticTarget,
|
||||
new MultiKeyMap<[Entity, Entity], Related>()
|
||||
new MultiKeyMap<[Entity, Entity], Related>(),
|
||||
);
|
||||
return [related === Related.true, convertErrorsToDiagnostics(errors, diagnosticTarget)];
|
||||
}
|
||||
|
@ -224,7 +224,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
source: Entity | IndeterminateEntity,
|
||||
target: Entity,
|
||||
diagnosticTarget: Entity | Node,
|
||||
relationCache: MultiKeyMap<[Entity | IndeterminateEntity, Entity], Related>
|
||||
relationCache: MultiKeyMap<[Entity | IndeterminateEntity, Entity], Related>,
|
||||
): [Related, readonly TypeRelationError[]] {
|
||||
const cached = relationCache.get([source, target]);
|
||||
if (cached !== undefined) {
|
||||
|
@ -234,7 +234,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
source,
|
||||
target,
|
||||
diagnosticTarget,
|
||||
new MultiKeyMap<[Entity, Entity], Related>()
|
||||
new MultiKeyMap<[Entity, Entity], Related>(),
|
||||
);
|
||||
relationCache.set([source, target], result);
|
||||
return [result, diagnostics];
|
||||
|
@ -244,7 +244,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
source: Entity | IndeterminateEntity,
|
||||
target: Entity,
|
||||
diagnosticTarget: Entity | Node,
|
||||
relationCache: MultiKeyMap<[Entity, Entity], Related>
|
||||
relationCache: MultiKeyMap<[Entity, Entity], Related>,
|
||||
): [Related, readonly TypeRelationError[]] {
|
||||
// BACKCOMPAT: Allow certain type to be accepted as values
|
||||
if (
|
||||
|
@ -260,16 +260,16 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
source.constraint.type,
|
||||
target.valueType,
|
||||
diagnosticTarget,
|
||||
relationCache
|
||||
relationCache,
|
||||
);
|
||||
if (assignable) {
|
||||
const constraint = getEntityName(source.constraint);
|
||||
reportDeprecated(
|
||||
program,
|
||||
`Template constrainted to '${constraint}' will not be assignable to '${getEntityName(
|
||||
target
|
||||
target,
|
||||
)}' in the future. Update the constraint to be 'valueof ${constraint}'`,
|
||||
diagnosticTarget
|
||||
diagnosticTarget,
|
||||
);
|
||||
return [Related.true, []];
|
||||
}
|
||||
|
@ -296,7 +296,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
source,
|
||||
target,
|
||||
diagnosticTarget,
|
||||
relationCache
|
||||
relationCache,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -320,7 +320,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
variant.type,
|
||||
target,
|
||||
diagnosticTarget,
|
||||
relationCache
|
||||
relationCache,
|
||||
);
|
||||
if (!variantAssignable) {
|
||||
return [Related.false, [createUnassignableDiagnostic(source, target, diagnosticTarget)]];
|
||||
|
@ -360,7 +360,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
source,
|
||||
target as Model & { indexer: ModelIndexer },
|
||||
diagnosticTarget,
|
||||
relationCache
|
||||
relationCache,
|
||||
);
|
||||
} else {
|
||||
// For other models just fallback to unassignable
|
||||
|
@ -389,13 +389,13 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
indeterminate: IndeterminateEntity,
|
||||
target: Type | MixedParameterConstraint,
|
||||
diagnosticTarget: Entity | Node,
|
||||
relationCache: MultiKeyMap<[Entity, Entity], Related>
|
||||
relationCache: MultiKeyMap<[Entity, Entity], Related>,
|
||||
): [Related, readonly TypeRelationError[]] {
|
||||
const [typeRelated, typeDiagnostics] = isTypeAssignableToInternal(
|
||||
indeterminate.type,
|
||||
target,
|
||||
diagnosticTarget,
|
||||
relationCache
|
||||
relationCache,
|
||||
);
|
||||
if (typeRelated) {
|
||||
return [Related.true, []];
|
||||
|
@ -406,7 +406,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
indeterminate.type,
|
||||
target.valueType,
|
||||
diagnosticTarget,
|
||||
relationCache
|
||||
relationCache,
|
||||
);
|
||||
|
||||
if (valueRelated) {
|
||||
|
@ -421,7 +421,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
source: Entity,
|
||||
target: Type,
|
||||
diagnosticTarget: Entity | Node,
|
||||
relationCache: MultiKeyMap<[Entity, Entity], Related>
|
||||
relationCache: MultiKeyMap<[Entity, Entity], Related>,
|
||||
): [Related, readonly TypeRelationError[]] {
|
||||
if (!isValue(source)) {
|
||||
return [Related.false, [createUnassignableDiagnostic(source, target, diagnosticTarget)]];
|
||||
|
@ -434,7 +434,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
source: Entity,
|
||||
target: MixedParameterConstraint,
|
||||
diagnosticTarget: Entity | Node,
|
||||
relationCache: MultiKeyMap<[Entity, Entity], Related>
|
||||
relationCache: MultiKeyMap<[Entity, Entity], Related>,
|
||||
): [Related, readonly TypeRelationError[]] {
|
||||
if ("entityKind" in source && source.entityKind === "MixedParameterConstraint") {
|
||||
if (source.type && target.type) {
|
||||
|
@ -442,7 +442,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
source.type,
|
||||
target.type,
|
||||
diagnosticTarget,
|
||||
relationCache
|
||||
relationCache,
|
||||
);
|
||||
if (variantAssignable === Related.false) {
|
||||
return [Related.false, diagnostics];
|
||||
|
@ -454,7 +454,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
source.valueType,
|
||||
target.valueType,
|
||||
diagnosticTarget,
|
||||
relationCache
|
||||
relationCache,
|
||||
);
|
||||
if (variantAssignable === Related.false) {
|
||||
return [Related.false, diagnostics];
|
||||
|
@ -469,7 +469,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
source,
|
||||
target.type,
|
||||
diagnosticTarget,
|
||||
relationCache
|
||||
relationCache,
|
||||
);
|
||||
if (related) {
|
||||
return [Related.true, []];
|
||||
|
@ -480,7 +480,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
source,
|
||||
target.valueType,
|
||||
diagnosticTarget,
|
||||
relationCache
|
||||
relationCache,
|
||||
);
|
||||
if (related) {
|
||||
return [Related.true, []];
|
||||
|
@ -494,7 +494,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
source: Value,
|
||||
target: Type,
|
||||
diagnosticTarget: Entity | Node,
|
||||
relationCache: MultiKeyMap<[Entity, Entity], Related>
|
||||
relationCache: MultiKeyMap<[Entity, Entity], Related>,
|
||||
): [Related, readonly TypeRelationError[]] {
|
||||
return isTypeAssignableToInternal(source.type, target, diagnosticTarget, relationCache);
|
||||
}
|
||||
|
@ -608,7 +608,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
while (!target.namespace || !isTypeSpecNamespace(target.namespace)) {
|
||||
compilerAssert(
|
||||
target.baseScalar,
|
||||
"Should not be possible to be derived from TypeSpec.numeric and not have a base when not in TypeSpec namespace."
|
||||
"Should not be possible to be derived from TypeSpec.numeric and not have a base when not in TypeSpec namespace.",
|
||||
);
|
||||
target = target.baseScalar;
|
||||
}
|
||||
|
@ -652,7 +652,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
source: Model,
|
||||
target: Model,
|
||||
diagnosticTarget: Entity | Node,
|
||||
relationCache: MultiKeyMap<[Entity, Entity], Related>
|
||||
relationCache: MultiKeyMap<[Entity, Entity], Related>,
|
||||
): [Related, readonly TypeRelationError[]] {
|
||||
relationCache.set([source, target], Related.maybe);
|
||||
const errors: TypeRelationError[] = [];
|
||||
|
@ -671,7 +671,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
targetType: getTypeName(target),
|
||||
},
|
||||
diagnosticTarget: source,
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
|
@ -686,14 +686,14 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
targetType: getTypeName(target),
|
||||
},
|
||||
diagnosticTarget,
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
const [related, propErrors] = isTypeAssignableToInternal(
|
||||
sourceProperty.type,
|
||||
prop.type,
|
||||
diagnosticTarget,
|
||||
relationCache
|
||||
relationCache,
|
||||
);
|
||||
if (!related) {
|
||||
errors.push(...wrapUnassignablePropertyErrors(sourceProperty, propErrors));
|
||||
|
@ -706,7 +706,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
remainingProperties,
|
||||
target.indexer.value,
|
||||
diagnosticTarget,
|
||||
relationCache
|
||||
relationCache,
|
||||
);
|
||||
errors.push(...indexerDiagnostics);
|
||||
|
||||
|
@ -716,7 +716,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
source,
|
||||
target as any,
|
||||
diagnosticTarget,
|
||||
relationCache
|
||||
relationCache,
|
||||
);
|
||||
if (!related) {
|
||||
errors.push(...indexDiagnostics);
|
||||
|
@ -733,7 +733,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
type: getEntityName(target),
|
||||
},
|
||||
diagnosticTarget: prop,
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -767,14 +767,14 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
properties: Map<string, ModelProperty>,
|
||||
indexerConstaint: Type,
|
||||
diagnosticTarget: Entity | Node,
|
||||
relationCache: MultiKeyMap<[Type, Type], Related>
|
||||
relationCache: MultiKeyMap<[Type, Type], Related>,
|
||||
): [Related, readonly TypeRelationError[]] {
|
||||
for (const prop of properties.values()) {
|
||||
const [related, diagnostics] = isTypeAssignableToInternal(
|
||||
prop.type,
|
||||
indexerConstaint,
|
||||
diagnosticTarget,
|
||||
relationCache
|
||||
relationCache,
|
||||
);
|
||||
if (!related) {
|
||||
return [Related.false, diagnostics];
|
||||
|
@ -789,7 +789,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
source: Model,
|
||||
target: Model & { indexer: ModelIndexer },
|
||||
diagnosticTarget: Entity | Node,
|
||||
relationCache: MultiKeyMap<[Entity, Entity], Related>
|
||||
relationCache: MultiKeyMap<[Entity, Entity], Related>,
|
||||
): [Related, readonly TypeRelationError[]] {
|
||||
if (source.indexer === undefined || source.indexer.key !== target.indexer.key) {
|
||||
return [
|
||||
|
@ -810,7 +810,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
source.indexer.value!,
|
||||
target.indexer.value,
|
||||
diagnosticTarget,
|
||||
relationCache
|
||||
relationCache,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -818,7 +818,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
source: Tuple,
|
||||
target: ArrayModelType,
|
||||
diagnosticTarget: Entity | Node,
|
||||
relationCache: MultiKeyMap<[Entity, Entity], Related>
|
||||
relationCache: MultiKeyMap<[Entity, Entity], Related>,
|
||||
): [Related, readonly TypeRelationError[]] {
|
||||
const minItems = getMinItems(program, target);
|
||||
const maxItems = getMaxItems(program, target);
|
||||
|
@ -830,7 +830,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
source,
|
||||
target,
|
||||
diagnosticTarget,
|
||||
`Source has ${source.values.length} element(s) but target requires ${minItems}.`
|
||||
`Source has ${source.values.length} element(s) but target requires ${minItems}.`,
|
||||
),
|
||||
],
|
||||
];
|
||||
|
@ -843,7 +843,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
source,
|
||||
target,
|
||||
diagnosticTarget,
|
||||
`Source has ${source.values.length} element(s) but target only allows ${maxItems}.`
|
||||
`Source has ${source.values.length} element(s) but target only allows ${maxItems}.`,
|
||||
),
|
||||
],
|
||||
];
|
||||
|
@ -853,7 +853,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
item,
|
||||
target.indexer.value!,
|
||||
diagnosticTarget,
|
||||
relationCache
|
||||
relationCache,
|
||||
);
|
||||
if (!related) {
|
||||
return [Related.false, diagnostics];
|
||||
|
@ -866,7 +866,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
source: Tuple | ArrayValue,
|
||||
target: Tuple,
|
||||
diagnosticTarget: Entity | Node,
|
||||
relationCache: MultiKeyMap<[Entity, Entity], Related>
|
||||
relationCache: MultiKeyMap<[Entity, Entity], Related>,
|
||||
): [Related, readonly TypeRelationError[]] {
|
||||
if (source.values.length !== target.values.length) {
|
||||
return [
|
||||
|
@ -876,7 +876,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
source,
|
||||
target,
|
||||
diagnosticTarget,
|
||||
`Source has ${source.values.length} element(s) but target requires ${target.values.length}.`
|
||||
`Source has ${source.values.length} element(s) but target requires ${target.values.length}.`,
|
||||
),
|
||||
],
|
||||
];
|
||||
|
@ -887,7 +887,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
sourceItem,
|
||||
targetItem,
|
||||
diagnosticTarget,
|
||||
relationCache
|
||||
relationCache,
|
||||
);
|
||||
if (!related) {
|
||||
return [Related.false, diagnostics];
|
||||
|
@ -900,7 +900,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
source: Type,
|
||||
target: Union,
|
||||
diagnosticTarget: Entity | Node,
|
||||
relationCache: MultiKeyMap<[Entity, Entity], Related>
|
||||
relationCache: MultiKeyMap<[Entity, Entity], Related>,
|
||||
): [Related, TypeRelationError[]] {
|
||||
if (source.kind === "UnionVariant" && source.union === target) {
|
||||
return [Related.true, []];
|
||||
|
@ -910,7 +910,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
source,
|
||||
option.type,
|
||||
diagnosticTarget,
|
||||
relationCache
|
||||
relationCache,
|
||||
);
|
||||
if (related) {
|
||||
return [Related.true, []];
|
||||
|
@ -922,7 +922,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
function isAssignableToEnum(
|
||||
source: Type,
|
||||
target: Enum,
|
||||
diagnosticTarget: Entity | Node
|
||||
diagnosticTarget: Entity | Node,
|
||||
): [Related, TypeRelationError[]] {
|
||||
switch (source.kind) {
|
||||
case "Enum":
|
||||
|
@ -943,7 +943,7 @@ export function createTypeRelationChecker(program: Program, checker: Checker): T
|
|||
}
|
||||
|
||||
function isTypeSpecNamespace(
|
||||
namespace: Namespace
|
||||
namespace: Namespace,
|
||||
): namespace is Namespace & { name: "TypeSpec"; namespace: Namespace } {
|
||||
return (
|
||||
namespace.name === "TypeSpec" &&
|
||||
|
@ -965,7 +965,7 @@ interface TypeRelationeErrorInit<C extends TypeRelationError["code"]> {
|
|||
function wrapUnassignableErrors(
|
||||
source: Entity,
|
||||
target: Entity,
|
||||
errors: readonly TypeRelationError[]
|
||||
errors: readonly TypeRelationError[],
|
||||
): readonly TypeRelationError[] {
|
||||
const error = createUnassignableDiagnostic(source, target, source);
|
||||
error.children = errors;
|
||||
|
@ -973,7 +973,7 @@ function wrapUnassignableErrors(
|
|||
}
|
||||
function wrapUnassignablePropertyErrors(
|
||||
source: ModelProperty,
|
||||
errors: readonly TypeRelationError[]
|
||||
errors: readonly TypeRelationError[],
|
||||
): readonly TypeRelationError[] {
|
||||
const error = createTypeRelationError({
|
||||
code: "property-unassignable",
|
||||
|
@ -1012,7 +1012,7 @@ function createUnassignableDiagnostic(
|
|||
source: Entity,
|
||||
target: Entity,
|
||||
diagnosticTarget: Entity | Node,
|
||||
details?: string
|
||||
details?: string,
|
||||
): TypeRelationError {
|
||||
return createTypeRelationError({
|
||||
code: "unassignable",
|
||||
|
|
|
@ -91,7 +91,7 @@ export function getParentTemplateNode(node: Node): (Node & TemplateDeclarationNo
|
|||
* Check the given type is a finished template instance.
|
||||
*/
|
||||
export function isTemplateInstance(
|
||||
type: Type
|
||||
type: Type,
|
||||
): type is TemplatedType & { templateArguments: Type[]; templateMapper: TypeMapper } {
|
||||
const maybeTemplateType = type as TemplatedType;
|
||||
return (
|
||||
|
@ -120,7 +120,7 @@ export function isDeclaredType(type: Type): boolean {
|
|||
* Resolve if the type is a template type declaration(Non initialized template type).
|
||||
*/
|
||||
export function isTemplateDeclaration(
|
||||
type: TemplatedType
|
||||
type: TemplatedType,
|
||||
): type is TemplatedType & { node: TemplateDeclarationNode } {
|
||||
if (type.node === undefined) {
|
||||
return false;
|
||||
|
@ -152,7 +152,7 @@ export function isTemplateDeclarationOrInstance(type: TemplatedType): boolean {
|
|||
*/
|
||||
export function isGlobalNamespace(
|
||||
program: Program,
|
||||
namespace: Namespace
|
||||
namespace: Namespace,
|
||||
): namespace is Namespace & { name: ""; namespace: undefined } {
|
||||
return program.getGlobalNamespaceType() === namespace;
|
||||
}
|
||||
|
@ -166,7 +166,7 @@ export function isGlobalNamespace(
|
|||
export function isDeclaredInNamespace(
|
||||
type: Model | Operation | Interface | Namespace | Enum,
|
||||
namespace: Namespace,
|
||||
options: { recursive?: boolean } = { recursive: true }
|
||||
options: { recursive?: boolean } = { recursive: true },
|
||||
) {
|
||||
let candidateNs = type.namespace;
|
||||
while (candidateNs) {
|
||||
|
@ -189,7 +189,7 @@ export function isDeclaredInNamespace(
|
|||
|
||||
export function getFullyQualifiedSymbolName(
|
||||
sym: Sym | undefined,
|
||||
options?: { useGlobalPrefixAtTopLevel?: boolean }
|
||||
options?: { useGlobalPrefixAtTopLevel?: boolean },
|
||||
): string {
|
||||
if (!sym) return "";
|
||||
if (sym.symbolSource) sym = sym.symbolSource;
|
||||
|
|
|
@ -2374,11 +2374,11 @@ export interface DiagnosticCreator<T extends { [code: string]: DiagnosticMessage
|
|||
readonly type: T;
|
||||
readonly diagnostics: DiagnosticMap<T>;
|
||||
createDiagnostic<C extends keyof T, M extends keyof T[C] = "default">(
|
||||
diag: DiagnosticReport<T, C, M>
|
||||
diag: DiagnosticReport<T, C, M>,
|
||||
): Diagnostic;
|
||||
reportDiagnostic<C extends keyof T, M extends keyof T[C] = "default">(
|
||||
program: Program,
|
||||
diag: DiagnosticReport<T, C, M>
|
||||
diag: DiagnosticReport<T, C, M>,
|
||||
): void;
|
||||
}
|
||||
|
||||
|
@ -2400,7 +2400,7 @@ export interface JSONSchemaValidator {
|
|||
*/
|
||||
validate(
|
||||
config: unknown,
|
||||
target: YamlScript | YamlPathTarget | SourceFile | typeof NoTarget
|
||||
target: YamlScript | YamlPathTarget | SourceFile | typeof NoTarget,
|
||||
): Diagnostic[];
|
||||
}
|
||||
|
||||
|
@ -2586,10 +2586,10 @@ export interface TypeSpecLibrary<
|
|||
|
||||
reportDiagnostic<C extends keyof T, M extends keyof T[C]>(
|
||||
program: Program,
|
||||
diag: DiagnosticReport<T, C, M>
|
||||
diag: DiagnosticReport<T, C, M>,
|
||||
): void;
|
||||
createDiagnostic<C extends keyof T, M extends keyof T[C]>(
|
||||
diag: DiagnosticReport<T, C, M>
|
||||
diag: DiagnosticReport<T, C, M>,
|
||||
): Diagnostic;
|
||||
|
||||
/**
|
||||
|
|
|
@ -40,7 +40,7 @@ interface ReferenceChainEntry {
|
|||
export function createAssetEmitter<T, TOptions extends object>(
|
||||
program: Program,
|
||||
TypeEmitterClass: typeof TypeEmitter<T, TOptions>,
|
||||
emitContext: EmitContext<TOptions>
|
||||
emitContext: EmitContext<TOptions>,
|
||||
): AssetEmitter<T, TOptions> {
|
||||
const sourceFiles: SourceFile<T>[] = [];
|
||||
|
||||
|
@ -66,7 +66,7 @@ export function createAssetEmitter<T, TOptions extends object>(
|
|||
const typeToEmitEntity = new CustomKeyMap<[string, Type, ContextState], EmitEntity<T>>(
|
||||
([method, type, context]) => {
|
||||
return `${method}-${typeId.getKey(type)}-${contextId.getKey(context)}`;
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// When we encounter a circular reference, this map will hold a callback
|
||||
|
@ -91,7 +91,7 @@ export function createAssetEmitter<T, TOptions extends object>(
|
|||
const knownContexts = new CustomKeyMap<[LexicalTypeStackEntry, ContextState], ContextState>(
|
||||
([entry, context]) => {
|
||||
return `${entryId.getKey(entry)}-${contextId.getKey(context)}`;
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// The stack of types that the currently emitted type is lexically
|
||||
|
@ -149,7 +149,7 @@ export function createAssetEmitter<T, TOptions extends object>(
|
|||
const scope = currentScope();
|
||||
compilerAssert(
|
||||
scope,
|
||||
"Emit context must have a scope set in order to create declarations. Consider setting scope to a new source file's global scope in the `programContext` method of `TypeEmitter`."
|
||||
"Emit context must have a scope set in order to create declarations. Consider setting scope to a new source file's global scope in the `programContext` method of `TypeEmitter`.",
|
||||
);
|
||||
return new Declaration(name, scope, value);
|
||||
},
|
||||
|
@ -246,7 +246,7 @@ export function createAssetEmitter<T, TOptions extends object>(
|
|||
this,
|
||||
resolvedEntity,
|
||||
true,
|
||||
resolveReferenceCycle(typeChainSnapshot, entity, typeToEmitEntity as any)
|
||||
resolveReferenceCycle(typeChainSnapshot, entity, typeToEmitEntity as any),
|
||||
),
|
||||
});
|
||||
|
||||
|
@ -260,7 +260,7 @@ export function createAssetEmitter<T, TOptions extends object>(
|
|||
assetEmitter: AssetEmitter<T, TOptions>,
|
||||
entity: EmitEntity<T>,
|
||||
circular: boolean,
|
||||
cycle?: ReferenceCycle
|
||||
cycle?: ReferenceCycle,
|
||||
): EmitEntity<T> {
|
||||
let ref;
|
||||
const scope = currentScope();
|
||||
|
@ -273,11 +273,11 @@ export function createAssetEmitter<T, TOptions extends object>(
|
|||
}
|
||||
compilerAssert(
|
||||
scope,
|
||||
"Emit context must have a scope set in order to create references to declarations."
|
||||
"Emit context must have a scope set in order to create references to declarations.",
|
||||
);
|
||||
const { pathUp, pathDown, commonScope } = resolveDeclarationReferenceScope(
|
||||
entity,
|
||||
scope
|
||||
scope,
|
||||
);
|
||||
ref = typeEmitter.reference(entity, pathUp, pathDown, commonScope);
|
||||
}
|
||||
|
@ -291,13 +291,13 @@ export function createAssetEmitter<T, TOptions extends object>(
|
|||
// the target declaration is finished being emitted.
|
||||
compilerAssert(
|
||||
ref.kind !== "circular",
|
||||
"TypeEmitter `reference` returned circular emit"
|
||||
"TypeEmitter `reference` returned circular emit",
|
||||
);
|
||||
|
||||
// this could presumably be allowed if we want.
|
||||
compilerAssert(
|
||||
ref.kind === "none" || !(ref.value instanceof Placeholder),
|
||||
"TypeEmitter's `reference` method cannot return a placeholder."
|
||||
"TypeEmitter's `reference` method cannot return a placeholder.",
|
||||
);
|
||||
|
||||
switch (ref.kind) {
|
||||
|
@ -546,7 +546,7 @@ export function createAssetEmitter<T, TOptions extends object>(
|
|||
}
|
||||
|
||||
function isInternalMethod(
|
||||
method: TypeEmitterMethod
|
||||
method: TypeEmitterMethod,
|
||||
): method is Exclude<
|
||||
TypeEmitterMethod,
|
||||
| "interfaceDeclarationOperations"
|
||||
|
@ -577,7 +577,7 @@ export function createAssetEmitter<T, TOptions extends object>(
|
|||
*/
|
||||
function setContextForType<TMethod extends TypeEmitterMethod>(
|
||||
method: TMethod,
|
||||
args: Parameters<TypeEmitter<T, TOptions>[TMethod]>
|
||||
args: Parameters<TypeEmitter<T, TOptions>[TMethod]>,
|
||||
) {
|
||||
const type = args[0];
|
||||
let newTypeStack: LexicalTypeStackEntry[];
|
||||
|
@ -590,7 +590,7 @@ export function createAssetEmitter<T, TOptions extends object>(
|
|||
while (ns) {
|
||||
if (ns.name === "") break;
|
||||
newTypeStack.unshift(
|
||||
stackEntryInterner.intern({ method: "namespace", args: stackEntryInterner.intern([ns]) })
|
||||
stackEntryInterner.intern({ method: "namespace", args: stackEntryInterner.intern([ns]) }),
|
||||
);
|
||||
ns = ns.namespace;
|
||||
}
|
||||
|
@ -638,14 +638,14 @@ export function createAssetEmitter<T, TOptions extends object>(
|
|||
if (keyHasContext(entry.method)) {
|
||||
compilerAssert(
|
||||
(typeEmitter as any)[lexicalKey],
|
||||
`TypeEmitter doesn't have a method named ${lexicalKey}`
|
||||
`TypeEmitter doesn't have a method named ${lexicalKey}`,
|
||||
);
|
||||
}
|
||||
|
||||
if (keyHasReferenceContext(entry.method)) {
|
||||
compilerAssert(
|
||||
(typeEmitter as any)[referenceKey],
|
||||
`TypeEmitter doesn't have a method named ${referenceKey}`
|
||||
`TypeEmitter doesn't have a method named ${referenceKey}`,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -691,7 +691,7 @@ export function createAssetEmitter<T, TOptions extends object>(
|
|||
function withTypeContext<TMethod extends TypeEmitterMethod>(
|
||||
method: TMethod,
|
||||
args: Parameters<TypeEmitter<T, TOptions>[TMethod]>,
|
||||
cb: () => void
|
||||
cb: () => void,
|
||||
) {
|
||||
const oldContext = context;
|
||||
const oldTypeStack = lexicalTypeStack;
|
||||
|
@ -708,7 +708,7 @@ export function createAssetEmitter<T, TOptions extends object>(
|
|||
|
||||
function withPatchedReferenceContext<T>(
|
||||
referenceContext: Record<string, any> | undefined,
|
||||
cb: () => T
|
||||
cb: () => T,
|
||||
): T {
|
||||
if (referenceContext !== undefined) {
|
||||
const oldContext = context;
|
||||
|
@ -915,7 +915,7 @@ function keyHasReferenceContext(key: keyof TypeEmitter<any, any>): boolean {
|
|||
function resolveReferenceCycle(
|
||||
stack: ReferenceChainEntry[],
|
||||
entity: CircularEmit,
|
||||
typeToEmitEntity: CustomKeyMap<[string, Type, ContextState], EmitEntity<unknown>>
|
||||
typeToEmitEntity: CustomKeyMap<[string, Type, ContextState], EmitEntity<unknown>>,
|
||||
): ReferenceCycle {
|
||||
for (let i = stack.length - 1; i >= 0; i--) {
|
||||
if (stack[i].type === entity.emitEntityKey[1]) {
|
||||
|
@ -925,11 +925,11 @@ function resolveReferenceCycle(
|
|||
type: x.type,
|
||||
entity: typeToEmitEntity.get([x.method, x.type, x.context])!,
|
||||
};
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
throw new Error(
|
||||
`Couldn't resolve the circular reference stack for ${getTypeName(entity.emitEntityKey[1])}`
|
||||
`Couldn't resolve the circular reference stack for ${getTypeName(entity.emitEntityKey[1])}`,
|
||||
);
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ export class ObjectBuilder<T> {
|
|||
initializer:
|
||||
| Record<string, unknown>
|
||||
| Placeholder<Record<string, unknown>>
|
||||
| ObjectBuilder<T> = {}
|
||||
| ObjectBuilder<T> = {},
|
||||
) {
|
||||
const copyProperties = (source: Record<string, unknown>) => {
|
||||
for (const [key, value] of Object.entries(source)) {
|
||||
|
|
|
@ -18,7 +18,7 @@ export function scopeChain<T>(scope: Scope<T> | null) {
|
|||
*/
|
||||
export function resolveDeclarationReferenceScope<T>(
|
||||
target: Declaration<T>,
|
||||
currentScope: Scope<T>
|
||||
currentScope: Scope<T>,
|
||||
) {
|
||||
const targetScope = target.scope;
|
||||
const targetChain = scopeChain(targetScope);
|
||||
|
|
|
@ -724,7 +724,7 @@ export class TypeEmitter<T, TOptions extends object = Record<string, never>> {
|
|||
targetDeclaration: Declaration<T>,
|
||||
pathUp: Scope<T>[],
|
||||
pathDown: Scope<T>[],
|
||||
commonScope: Scope<T> | null
|
||||
commonScope: Scope<T> | null,
|
||||
): EmitEntity<T> | T {
|
||||
return this.emitter.result.none();
|
||||
}
|
||||
|
@ -739,11 +739,11 @@ export class TypeEmitter<T, TOptions extends object = Record<string, never>> {
|
|||
circularReference(
|
||||
target: EmitEntity<T>,
|
||||
scope: Scope<T> | undefined,
|
||||
cycle: ReferenceCycle
|
||||
cycle: ReferenceCycle,
|
||||
): EmitEntity<T> | T {
|
||||
if (!cycle.containsDeclaration) {
|
||||
throw new Error(
|
||||
`Circular references to non-declarations are not supported by this emitter. Cycle:\n${cycle}`
|
||||
`Circular references to non-declarations are not supported by this emitter. Cycle:\n${cycle}`,
|
||||
);
|
||||
}
|
||||
if (target.kind !== "declaration") {
|
||||
|
@ -751,7 +751,7 @@ export class TypeEmitter<T, TOptions extends object = Record<string, never>> {
|
|||
}
|
||||
compilerAssert(
|
||||
scope,
|
||||
"Emit context must have a scope set in order to create references to declarations."
|
||||
"Emit context must have a scope set in order to create references to declarations.",
|
||||
);
|
||||
const { pathUp, pathDown, commonScope } = resolveDeclarationReferenceScope(target, scope);
|
||||
return this.reference(target, pathUp, pathDown, commonScope);
|
||||
|
@ -760,7 +760,7 @@ export class TypeEmitter<T, TOptions extends object = Record<string, never>> {
|
|||
declarationName(declarationType: TypeSpecDeclaration): string | undefined {
|
||||
compilerAssert(
|
||||
declarationType.name !== undefined,
|
||||
"Can't emit a declaration that doesn't have a name."
|
||||
"Can't emit a declaration that doesn't have a name.",
|
||||
);
|
||||
|
||||
if (declarationType.kind === "Enum" || declarationType.kind === "Intrinsic") {
|
||||
|
@ -846,7 +846,7 @@ export class CodeTypeEmitter<TOptions extends object = Record<string, never>> ex
|
|||
for (const op of iface.operations.values()) {
|
||||
i++;
|
||||
builder.push(
|
||||
code`${this.emitter.emitInterfaceOperation(op)}${i < iface.operations.size ? "," : ""}`
|
||||
code`${this.emitter.emitInterfaceOperation(op)}${i < iface.operations.size ? "," : ""}`,
|
||||
);
|
||||
}
|
||||
return builder.reduce();
|
||||
|
@ -886,7 +886,7 @@ export class CodeTypeEmitter<TOptions extends object = Record<string, never>> ex
|
|||
targetDeclaration: Declaration<string>,
|
||||
pathUp: Scope<string>[],
|
||||
pathDown: Scope<string>[],
|
||||
commonScope: Scope<string> | null
|
||||
commonScope: Scope<string> | null,
|
||||
): string | EmitEntity<string> {
|
||||
const basePath = pathDown.map((s) => s.name).join(".");
|
||||
return basePath
|
||||
|
|
|
@ -114,7 +114,7 @@ export class Declaration<T> extends EmitterResult {
|
|||
constructor(
|
||||
public name: string,
|
||||
public scope: Scope<T>,
|
||||
public value: T | Placeholder<T>
|
||||
public value: T | Placeholder<T>,
|
||||
) {
|
||||
if (value instanceof Placeholder) {
|
||||
value.onValue((v) => (this.value = v));
|
||||
|
|
|
@ -15,7 +15,7 @@ export function parse(text: string, options: ParserOptions<any>): TypeSpecScript
|
|||
}
|
||||
// Remove doc comments as those are handled directly.
|
||||
mutate(result).comments = result.comments.filter(
|
||||
(x) => !(x.kind === SyntaxKind.BlockComment && x.parsedAsDocs)
|
||||
(x) => !(x.kind === SyntaxKind.BlockComment && x.parsedAsDocs),
|
||||
);
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -23,11 +23,11 @@ export const commentHandler: Printer<Node>["handleComments"] = {
|
|||
].some((x) => x({ comment, text, options, ast: ast as TypeSpecScriptNode, isLastComment })),
|
||||
remaining: (comment, text, options, ast, isLastComment) =>
|
||||
[handleOnlyComments].some((x) =>
|
||||
x({ comment, text, options, ast: ast as TypeSpecScriptNode, isLastComment })
|
||||
x({ comment, text, options, ast: ast as TypeSpecScriptNode, isLastComment }),
|
||||
),
|
||||
endOfLine: (comment, text, options, ast, isLastComment) =>
|
||||
[handleOnlyComments].some((x) =>
|
||||
x({ comment, text, options, ast: ast as TypeSpecScriptNode, isLastComment })
|
||||
x({ comment, text, options, ast: ast as TypeSpecScriptNode, isLastComment }),
|
||||
),
|
||||
};
|
||||
|
||||
|
|
|
@ -124,7 +124,7 @@ export function printTypeSpec(
|
|||
// Path to the AST node to print
|
||||
path: AstPath<Node>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
): Doc {
|
||||
const node = path.node;
|
||||
const docs = printDocComments(path, options, print);
|
||||
|
@ -150,7 +150,7 @@ export function printNode(
|
|||
// Path to the AST node to print
|
||||
path: AstPath<Node>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
): Doc {
|
||||
const node: Node = path.node;
|
||||
|
||||
|
@ -169,19 +169,19 @@ export function printNode(
|
|||
return printOperationSignatureDeclaration(
|
||||
path as AstPath<OperationSignatureDeclarationNode>,
|
||||
options,
|
||||
print
|
||||
print,
|
||||
);
|
||||
case SyntaxKind.OperationSignatureReference:
|
||||
return printOperationSignatureReference(
|
||||
path as AstPath<OperationSignatureReferenceNode>,
|
||||
options,
|
||||
print
|
||||
print,
|
||||
);
|
||||
case SyntaxKind.NamespaceStatement:
|
||||
return printNamespaceStatement(
|
||||
path as AstPath<FlattenedNamespaceStatementNode>,
|
||||
options,
|
||||
print
|
||||
print,
|
||||
);
|
||||
case SyntaxKind.ModelStatement:
|
||||
return printModelStatement(path as AstPath<ModelStatementNode>, options, print);
|
||||
|
@ -244,7 +244,7 @@ export function printNode(
|
|||
return printTemplateParameterDeclaration(
|
||||
path as AstPath<TemplateParameterDeclarationNode>,
|
||||
options,
|
||||
print
|
||||
print,
|
||||
);
|
||||
case SyntaxKind.ModelSpreadProperty:
|
||||
return printModelSpread(path as AstPath<ModelSpreadPropertyNode>, options, print);
|
||||
|
@ -252,19 +252,19 @@ export function printNode(
|
|||
return printDecoratorDeclarationStatement(
|
||||
path as AstPath<DecoratorDeclarationStatementNode>,
|
||||
options,
|
||||
print
|
||||
print,
|
||||
);
|
||||
case SyntaxKind.FunctionDeclarationStatement:
|
||||
return printFunctionDeclarationStatement(
|
||||
path as AstPath<FunctionDeclarationStatementNode>,
|
||||
options,
|
||||
print
|
||||
print,
|
||||
);
|
||||
case SyntaxKind.FunctionParameter:
|
||||
return printFunctionParameterDeclaration(
|
||||
path as AstPath<FunctionParameterNode>,
|
||||
options,
|
||||
print
|
||||
print,
|
||||
);
|
||||
case SyntaxKind.ExternKeyword:
|
||||
return "extern";
|
||||
|
@ -300,31 +300,31 @@ export function printNode(
|
|||
return printProjectionParameterDeclaration(
|
||||
path as AstPath<ProjectionParameterDeclarationNode>,
|
||||
options,
|
||||
print
|
||||
print,
|
||||
);
|
||||
case SyntaxKind.ProjectionExpressionStatement:
|
||||
return printProjectionExpressionStatement(
|
||||
path as AstPath<ProjectionExpressionStatementNode>,
|
||||
options,
|
||||
print
|
||||
print,
|
||||
);
|
||||
case SyntaxKind.ProjectionIfExpression:
|
||||
return printProjectionIfExpressionNode(
|
||||
path as AstPath<ProjectionIfExpressionNode>,
|
||||
options,
|
||||
print
|
||||
print,
|
||||
);
|
||||
case SyntaxKind.ProjectionBlockExpression:
|
||||
return printProjectionBlockExpressionNode(
|
||||
path as AstPath<ProjectionBlockExpressionNode>,
|
||||
options,
|
||||
print
|
||||
print,
|
||||
);
|
||||
case SyntaxKind.ProjectionMemberExpression:
|
||||
return printProjectionMemberExpression(
|
||||
path as AstPath<ProjectionMemberExpressionNode>,
|
||||
options,
|
||||
print
|
||||
print,
|
||||
);
|
||||
case SyntaxKind.ProjectionLogicalExpression:
|
||||
case SyntaxKind.ProjectionEqualityExpression:
|
||||
|
@ -333,31 +333,31 @@ export function printNode(
|
|||
return printProjectionLeftRightExpression(
|
||||
path as AstPath<ProjectionLogicalExpressionNode>,
|
||||
options,
|
||||
print
|
||||
print,
|
||||
);
|
||||
case SyntaxKind.ProjectionUnaryExpression:
|
||||
return printProjectionUnaryExpression(
|
||||
path as AstPath<ProjectionUnaryExpressionNode>,
|
||||
options,
|
||||
print
|
||||
print,
|
||||
);
|
||||
case SyntaxKind.ProjectionCallExpression:
|
||||
return printProjectionCallExpression(
|
||||
path as AstPath<ProjectionCallExpressionNode>,
|
||||
options,
|
||||
print
|
||||
print,
|
||||
);
|
||||
case SyntaxKind.ProjectionLambdaExpression:
|
||||
return printProjectionLambdaExpression(
|
||||
path as AstPath<ProjectionLambdaExpressionNode>,
|
||||
options,
|
||||
print
|
||||
print,
|
||||
);
|
||||
case SyntaxKind.ProjectionLambdaParameterDeclaration:
|
||||
return printProjectionLambdaParameterDeclaration(
|
||||
path as AstPath<ProjectionLambdaParameterDeclarationNode>,
|
||||
options,
|
||||
print
|
||||
print,
|
||||
);
|
||||
case SyntaxKind.ProjectionModelExpression:
|
||||
return printModelExpression(path as AstPath<ProjectionModelExpressionNode>, options, print);
|
||||
|
@ -383,7 +383,7 @@ export function printNode(
|
|||
// https://github.com/microsoft/typespec/issues/1319 Tracks pretty-printing doc comments.
|
||||
compilerAssert(
|
||||
false,
|
||||
"Currently, doc comments are only handled as regular comments and we do not opt in to parsing them so we shouldn't reach here."
|
||||
"Currently, doc comments are only handled as regular comments and we do not opt in to parsing them so we shouldn't reach here.",
|
||||
);
|
||||
return "";
|
||||
case SyntaxKind.EmptyStatement:
|
||||
|
@ -392,7 +392,7 @@ export function printNode(
|
|||
return printStringTemplateExpression(
|
||||
path as AstPath<StringTemplateExpressionNode>,
|
||||
options,
|
||||
print
|
||||
print,
|
||||
);
|
||||
case SyntaxKind.ObjectLiteral:
|
||||
return printObjectLiteral(path as AstPath<ObjectLiteralNode>, options, print);
|
||||
|
@ -402,7 +402,7 @@ export function printNode(
|
|||
return printObjectLiteralSpreadProperty(
|
||||
path as AstPath<ObjectLiteralSpreadPropertyNode>,
|
||||
options,
|
||||
print
|
||||
print,
|
||||
);
|
||||
case SyntaxKind.ArrayLiteral:
|
||||
return printArrayLiteral(path as AstPath<ArrayLiteralNode>, options, print);
|
||||
|
@ -430,7 +430,7 @@ export function printNode(
|
|||
export function printTypeSpecScript(
|
||||
path: AstPath<TypeSpecScriptNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
): Doc {
|
||||
const node = path.node;
|
||||
const nodeHasComments = hasComments(node, CommentCheckFlags.Dangling);
|
||||
|
@ -445,7 +445,7 @@ export function printTypeSpecScript(
|
|||
export function printAliasStatement(
|
||||
path: AstPath<AliasStatementNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const id = path.call(print, "id");
|
||||
const template = printTemplateParameters(path, options, print, "templateParameters");
|
||||
|
@ -455,7 +455,7 @@ export function printAliasStatement(
|
|||
export function printConstStatement(
|
||||
path: AstPath<ConstStatementNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
const id = path.call(print, "id");
|
||||
|
@ -466,7 +466,7 @@ export function printConstStatement(
|
|||
export function printCallExpression(
|
||||
path: AstPath<CallExpressionNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const args = printCallLikeArgs(path, options, print);
|
||||
return [path.call(print, "target"), args];
|
||||
|
@ -476,7 +476,7 @@ function printTemplateParameters<T extends Node>(
|
|||
path: AstPath<T>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint,
|
||||
propertyName: keyof T
|
||||
propertyName: keyof T,
|
||||
) {
|
||||
const node = path.node;
|
||||
const args = node[propertyName] as any as TemplateParameterDeclarationNode[];
|
||||
|
@ -505,13 +505,13 @@ export function canAttachComment(node: Node): boolean {
|
|||
kind !== SyntaxKind.DocTemplateTag &&
|
||||
kind !== SyntaxKind.DocText &&
|
||||
kind !== SyntaxKind.DocUnknownTag &&
|
||||
!(node.flags & NodeFlags.Synthetic)
|
||||
!(node.flags & NodeFlags.Synthetic),
|
||||
);
|
||||
}
|
||||
|
||||
export function printComment(
|
||||
commentPath: AstPath<Node | Comment>,
|
||||
options: TypeSpecPrettierOptions
|
||||
options: TypeSpecPrettierOptions,
|
||||
): Doc {
|
||||
const comment = commentPath.node;
|
||||
(comment as any).printed = true;
|
||||
|
@ -554,8 +554,8 @@ function printIndentableBlockCommentContent(rawComment: string): Doc {
|
|||
lines.map((line, index) =>
|
||||
index === 0
|
||||
? line.trimEnd()
|
||||
: " " + (index < lines.length - 1 ? line.trim() : line.trimStart())
|
||||
)
|
||||
: " " + (index < lines.length - 1 ? line.trim() : line.trimStart()),
|
||||
),
|
||||
),
|
||||
];
|
||||
}
|
||||
|
@ -564,7 +564,7 @@ function printIndentableBlockCommentContent(rawComment: string): Doc {
|
|||
function printDoc(
|
||||
path: AstPath<DocNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
const rawComment = options.originalText.slice(node.pos + 3, node.end - 2);
|
||||
|
@ -581,7 +581,7 @@ export function printDecorators(
|
|||
path: AstPath<DecorableNode>,
|
||||
options: object,
|
||||
print: PrettierChildPrint,
|
||||
{ tryInline }: { tryInline: boolean }
|
||||
{ tryInline }: { tryInline: boolean },
|
||||
): { decorators: Doc; multiline: boolean } {
|
||||
const node = path.node;
|
||||
if (node.decorators.length === 0) {
|
||||
|
@ -601,7 +601,7 @@ export function printDecorators(
|
|||
function shouldDecoratorBreakLine(
|
||||
path: AstPath<DecorableNode>,
|
||||
options: object,
|
||||
{ tryInline }: { tryInline: boolean }
|
||||
{ tryInline }: { tryInline: boolean },
|
||||
) {
|
||||
const node = path.node;
|
||||
|
||||
|
@ -620,7 +620,7 @@ function hasNewlineBetweenOrAfterDecorators(node: DecorableNode, options: any) {
|
|||
export function printDecorator(
|
||||
path: AstPath<DecoratorExpressionNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const args = printDecoratorArgs(path, options, print);
|
||||
return ["@", path.call(print, "target"), args];
|
||||
|
@ -629,7 +629,7 @@ export function printDecorator(
|
|||
export function printAugmentDecorator(
|
||||
path: AstPath<AugmentDecoratorStatementNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const args = printAugmentDecoratorArgs(path, options, print);
|
||||
return ["@@", path.call(print, "target"), args, ";"];
|
||||
|
@ -638,7 +638,7 @@ export function printAugmentDecorator(
|
|||
function printAugmentDecoratorArgs(
|
||||
path: AstPath<AugmentDecoratorStatementNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
return [
|
||||
"(",
|
||||
|
@ -647,7 +647,7 @@ function printAugmentDecoratorArgs(
|
|||
join(", ", [
|
||||
path.call(print, "targetType"),
|
||||
...path.map((arg) => [softline, print(arg)], "arguments"),
|
||||
])
|
||||
]),
|
||||
),
|
||||
softline,
|
||||
]),
|
||||
|
@ -679,7 +679,7 @@ export function printDirectives(path: AstPath<Node>, options: object, print: Pre
|
|||
export function printDirective(
|
||||
path: AstPath<DirectiveExpressionNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const args = printDirectiveArgs(path, options, print);
|
||||
return ["#", path.call(print, "target"), " ", args];
|
||||
|
@ -688,7 +688,7 @@ export function printDirective(
|
|||
function printDecoratorArgs(
|
||||
path: AstPath<DecoratorExpressionNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
if (node.arguments.length === 0) {
|
||||
|
@ -701,7 +701,7 @@ function printDecoratorArgs(
|
|||
function printCallLikeArgs(
|
||||
path: AstPath<DecoratorExpressionNode | CallExpressionNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
|
||||
|
@ -722,7 +722,7 @@ function printCallLikeArgs(
|
|||
"(",
|
||||
join(
|
||||
", ",
|
||||
path.map((arg) => [print(arg)], "arguments")
|
||||
path.map((arg) => [print(arg)], "arguments"),
|
||||
),
|
||||
")",
|
||||
];
|
||||
|
@ -734,8 +734,8 @@ function printCallLikeArgs(
|
|||
indent(
|
||||
join(
|
||||
", ",
|
||||
path.map((arg) => [softline, print(arg)], "arguments")
|
||||
)
|
||||
path.map((arg) => [softline, print(arg)], "arguments"),
|
||||
),
|
||||
),
|
||||
softline,
|
||||
]),
|
||||
|
@ -746,7 +746,7 @@ function printCallLikeArgs(
|
|||
export function printDirectiveArgs(
|
||||
path: AstPath<DirectiveExpressionNode>,
|
||||
options: object,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
|
||||
|
@ -756,14 +756,14 @@ export function printDirectiveArgs(
|
|||
|
||||
return join(
|
||||
" ",
|
||||
path.map((arg) => [print(arg)], "arguments")
|
||||
path.map((arg) => [print(arg)], "arguments"),
|
||||
);
|
||||
}
|
||||
|
||||
export function printEnumStatement(
|
||||
path: AstPath<EnumStatementNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const { decorators } = printDecorators(path, options, print, { tryInline: false });
|
||||
const id = path.call(print, "id");
|
||||
|
@ -773,7 +773,7 @@ export function printEnumStatement(
|
|||
function printEnumBlock(
|
||||
path: AstPath<EnumStatementNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
if (node.members.length === 0) {
|
||||
|
@ -787,7 +787,7 @@ function printEnumBlock(
|
|||
export function printEnumMember(
|
||||
path: AstPath<EnumMemberNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
const id = path.call(print, "id");
|
||||
|
@ -801,7 +801,7 @@ export function printEnumMember(
|
|||
function printEnumSpreadMember(
|
||||
path: AstPath<EnumSpreadMemberNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
): Doc {
|
||||
return ["...", path.call(print, "target")];
|
||||
}
|
||||
|
@ -809,7 +809,7 @@ function printEnumSpreadMember(
|
|||
export function printUnionStatement(
|
||||
path: AstPath<UnionStatementNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const id = path.call(print, "id");
|
||||
const { decorators } = printDecorators(path, options, print, { tryInline: false });
|
||||
|
@ -820,7 +820,7 @@ export function printUnionStatement(
|
|||
export function printUnionVariantsBlock(
|
||||
path: AstPath<UnionStatementNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
if (node.options.length === 0) {
|
||||
|
@ -834,7 +834,7 @@ export function printUnionVariantsBlock(
|
|||
export function printUnionVariant(
|
||||
path: AstPath<UnionVariantNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const id = path.node.id === undefined ? "" : [path.call(print, "id"), ": "];
|
||||
const { decorators } = printDecorators(path, options, print, {
|
||||
|
@ -846,7 +846,7 @@ export function printUnionVariant(
|
|||
export function printInterfaceStatement(
|
||||
path: AstPath<InterfaceStatementNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const id = path.call(print, "id");
|
||||
const { decorators } = printDecorators(path, options, print, { tryInline: false });
|
||||
|
@ -867,7 +867,7 @@ export function printInterfaceStatement(
|
|||
function printInterfaceExtends(
|
||||
path: AstPath<InterfaceStatementNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
): Doc {
|
||||
const node = path.node;
|
||||
if (node.extends.length === 0) {
|
||||
|
@ -881,7 +881,7 @@ function printInterfaceExtends(
|
|||
export function printInterfaceMembers(
|
||||
path: AstPath<InterfaceStatementNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
const hasOperations = node.operations.length > 0;
|
||||
|
@ -919,7 +919,7 @@ export function printInterfaceMembers(
|
|||
function printDanglingComments(
|
||||
path: AstPath<any>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
{ sameIndent }: { sameIndent: boolean }
|
||||
{ sameIndent }: { sameIndent: boolean },
|
||||
) {
|
||||
const node = path.node;
|
||||
const parts: Doc[] = [];
|
||||
|
@ -955,7 +955,7 @@ function printDanglingComments(
|
|||
export function printIntersection(
|
||||
path: AstPath<IntersectionExpressionNode>,
|
||||
options: object,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
const types = path.map(print, "options");
|
||||
|
@ -988,7 +988,7 @@ function isModelNode(node: Node) {
|
|||
export function printArray(
|
||||
path: AstPath<ArrayExpressionNode>,
|
||||
options: object,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
): Doc {
|
||||
return [path.call(print, "elementType"), "[]"];
|
||||
}
|
||||
|
@ -996,15 +996,15 @@ export function printArray(
|
|||
export function printTuple(
|
||||
path: AstPath<TupleExpressionNode | ProjectionTupleExpressionNode>,
|
||||
options: object,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
): Doc {
|
||||
return group([
|
||||
"[",
|
||||
indent(
|
||||
join(
|
||||
", ",
|
||||
path.map((arg) => [softline, print(arg)], "values")
|
||||
)
|
||||
path.map((arg) => [softline, print(arg)], "values"),
|
||||
),
|
||||
),
|
||||
softline,
|
||||
"]",
|
||||
|
@ -1014,7 +1014,7 @@ export function printTuple(
|
|||
export function printMemberExpression(
|
||||
path: AstPath<MemberExpressionNode>,
|
||||
options: object,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
): Doc {
|
||||
const node = path.node;
|
||||
|
||||
|
@ -1024,7 +1024,7 @@ export function printMemberExpression(
|
|||
export function printModelExpression(
|
||||
path: AstPath<ModelExpressionNode | ProjectionModelExpressionNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const inBlock = isModelExpressionInBlock(path);
|
||||
const node = path.node;
|
||||
|
@ -1035,7 +1035,7 @@ export function printModelExpression(
|
|||
node.properties.length === 0
|
||||
? ""
|
||||
: indent(
|
||||
joinMembersInBlock(path, "properties", options, print, ifBreak(",", ", "), softline)
|
||||
joinMembersInBlock(path, "properties", options, print, ifBreak(",", ", "), softline),
|
||||
);
|
||||
return group([properties, softline]);
|
||||
}
|
||||
|
@ -1044,7 +1044,7 @@ export function printModelExpression(
|
|||
export function printObjectLiteral(
|
||||
path: AstPath<ObjectLiteralNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
const hasProperties = node.properties && node.properties.length > 0;
|
||||
|
@ -1065,7 +1065,7 @@ export function printObjectLiteral(
|
|||
export function printObjectLiteralProperty(
|
||||
path: AstPath<ObjectLiteralPropertyNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
const id = printIdentifier(node.id, options);
|
||||
|
@ -1075,7 +1075,7 @@ export function printObjectLiteralProperty(
|
|||
export function printObjectLiteralSpreadProperty(
|
||||
path: AstPath<ObjectLiteralSpreadPropertyNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
return [printDirectives(path, options, print), "...", path.call(print, "target")];
|
||||
}
|
||||
|
@ -1083,15 +1083,15 @@ export function printObjectLiteralSpreadProperty(
|
|||
export function printArrayLiteral(
|
||||
path: AstPath<ArrayLiteralNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
return group([
|
||||
"#[",
|
||||
indent(
|
||||
join(
|
||||
", ",
|
||||
path.map((arg) => [softline, print(arg)], "values")
|
||||
)
|
||||
path.map((arg) => [softline, print(arg)], "values"),
|
||||
),
|
||||
),
|
||||
softline,
|
||||
"]",
|
||||
|
@ -1101,7 +1101,7 @@ export function printArrayLiteral(
|
|||
export function printModelStatement(
|
||||
path: AstPath<ModelStatementNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
const id = path.call(print, "id");
|
||||
|
@ -1135,7 +1135,7 @@ function printModelPropertiesBlock(
|
|||
}
|
||||
>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
const hasProperties = node.properties && node.properties.length > 0;
|
||||
|
@ -1169,7 +1169,7 @@ function joinMembersInBlock<T extends Node>(
|
|||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint,
|
||||
separator: Doc,
|
||||
regularLine: Doc = hardline
|
||||
regularLine: Doc = hardline,
|
||||
): Doc {
|
||||
const doc: Doc[] = [regularLine];
|
||||
const propertyContainerNode = path.node;
|
||||
|
@ -1217,7 +1217,7 @@ function shouldWrapMemberInNewLines(
|
|||
| ObjectLiteralPropertyNode
|
||||
| ObjectLiteralSpreadPropertyNode
|
||||
>,
|
||||
options: any
|
||||
options: any,
|
||||
): boolean {
|
||||
const node = path.node;
|
||||
return (
|
||||
|
@ -1258,7 +1258,7 @@ function isModelAValue(path: AstPath<Node>): boolean {
|
|||
export function printModelProperty(
|
||||
path: AstPath<ModelPropertyNode | ProjectionModelPropertyNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
const { decorators } = printDecorators(path as AstPath<DecorableNode>, options, print, {
|
||||
|
@ -1280,7 +1280,7 @@ function printIdentifier(id: IdentifierNode, options: TypeSpecPrettierOptions) {
|
|||
}
|
||||
|
||||
function isModelExpressionInBlock(
|
||||
path: AstPath<ModelExpressionNode | ProjectionModelExpressionNode>
|
||||
path: AstPath<ModelExpressionNode | ProjectionModelExpressionNode>,
|
||||
) {
|
||||
const parent: Node | null = path.getParentNode() as any;
|
||||
|
||||
|
@ -1295,7 +1295,7 @@ function isModelExpressionInBlock(
|
|||
function printScalarStatement(
|
||||
path: AstPath<ScalarStatementNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
const id = path.call(print, "id");
|
||||
|
@ -1321,7 +1321,7 @@ function printScalarStatement(
|
|||
function printScalarBody(
|
||||
path: AstPath<ScalarStatementNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
const hasProperties = node.members && node.members.length > 0;
|
||||
|
@ -1339,7 +1339,7 @@ function printScalarBody(
|
|||
function printScalarConstructor(
|
||||
path: AstPath<ScalarConstructorNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const id = path.call(print, "id");
|
||||
const parameters = [
|
||||
|
@ -1347,8 +1347,8 @@ function printScalarConstructor(
|
|||
indent(
|
||||
join(
|
||||
", ",
|
||||
path.map((arg) => [softline, print(arg)], "parameters")
|
||||
)
|
||||
path.map((arg) => [softline, print(arg)], "parameters"),
|
||||
),
|
||||
),
|
||||
softline,
|
||||
]),
|
||||
|
@ -1359,7 +1359,7 @@ function printScalarConstructor(
|
|||
export function printNamespaceStatement(
|
||||
path: AstPath<FlattenedNamespaceStatementNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const names = path.map(print, "ids");
|
||||
const currentNode = path.getNode();
|
||||
|
@ -1381,7 +1381,7 @@ export function printNamespaceStatement(
|
|||
export function printOperationSignatureDeclaration(
|
||||
path: AstPath<OperationSignatureDeclarationNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
return ["(", path.call(print, "parameters"), "): ", path.call(print, "returnType")];
|
||||
}
|
||||
|
@ -1389,7 +1389,7 @@ export function printOperationSignatureDeclaration(
|
|||
export function printOperationSignatureReference(
|
||||
path: AstPath<OperationSignatureReferenceNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
return [" is ", path.call(print, "baseOperation")];
|
||||
}
|
||||
|
@ -1397,7 +1397,7 @@ export function printOperationSignatureReference(
|
|||
export function printOperationStatement(
|
||||
path: AstPath<OperationStatementNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const inInterface = (path.getParentNode()?.kind as any) === SyntaxKind.InterfaceStatement;
|
||||
const templateParams = printTemplateParameters(path, options, print, "templateParameters");
|
||||
|
@ -1419,7 +1419,7 @@ export function printStatementSequence<T extends Node>(
|
|||
path: AstPath<T>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint,
|
||||
property: keyof T
|
||||
property: keyof T,
|
||||
) {
|
||||
const node = path.node;
|
||||
const parts: Doc[] = [];
|
||||
|
@ -1460,7 +1460,7 @@ function getLastStatement(statements: Statement[]): Statement | undefined {
|
|||
export function printUnion(
|
||||
path: AstPath<UnionExpressionNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
const shouldHug = shouldHugType(node);
|
||||
|
@ -1492,7 +1492,7 @@ function shouldHugType(node: Node) {
|
|||
export function printTypeReference(
|
||||
path: AstPath<TypeReferenceNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
): Doc {
|
||||
const type = path.call(print, "target");
|
||||
const template = printTemplateParameters(path, options, print, "arguments");
|
||||
|
@ -1502,7 +1502,7 @@ export function printTypeReference(
|
|||
export function printTemplateArgument(
|
||||
path: AstPath<TemplateArgumentNode>,
|
||||
_options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
): Doc {
|
||||
if (path.node.name !== undefined) {
|
||||
const name = path.call(print, "name");
|
||||
|
@ -1517,7 +1517,7 @@ export function printTemplateArgument(
|
|||
export function printValueOfExpression(
|
||||
path: AstPath<ValueOfExpressionNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
): Doc {
|
||||
const type = path.call(print, "target");
|
||||
return ["valueof ", type];
|
||||
|
@ -1525,7 +1525,7 @@ export function printValueOfExpression(
|
|||
export function printTypeOfExpression(
|
||||
path: AstPath<TypeOfExpressionNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
): Doc {
|
||||
const type = path.call(print, "target");
|
||||
return ["typeof ", type];
|
||||
|
@ -1534,7 +1534,7 @@ export function printTypeOfExpression(
|
|||
function printTemplateParameterDeclaration(
|
||||
path: AstPath<TemplateParameterDeclarationNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
): Doc {
|
||||
const node = path.node;
|
||||
return [
|
||||
|
@ -1547,7 +1547,7 @@ function printTemplateParameterDeclaration(
|
|||
function printModelSpread(
|
||||
path: AstPath<ModelSpreadPropertyNode | ProjectionModelSpreadPropertyNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
): Doc {
|
||||
return ["...", path.call(print, "target")];
|
||||
}
|
||||
|
@ -1555,7 +1555,7 @@ function printModelSpread(
|
|||
function printDecoratorDeclarationStatement(
|
||||
path: AstPath<DecoratorDeclarationStatementNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
): Doc {
|
||||
const id = path.call(print, "id");
|
||||
const parameters = [
|
||||
|
@ -1564,7 +1564,7 @@ function printDecoratorDeclarationStatement(
|
|||
join(", ", [
|
||||
[softline, path.call(print, "target")],
|
||||
...path.map((arg) => [softline, print(arg)], "parameters"),
|
||||
])
|
||||
]),
|
||||
),
|
||||
softline,
|
||||
]),
|
||||
|
@ -1575,7 +1575,7 @@ function printDecoratorDeclarationStatement(
|
|||
function printFunctionDeclarationStatement(
|
||||
path: AstPath<FunctionDeclarationStatementNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
): Doc {
|
||||
const node = path.node;
|
||||
const id = path.call(print, "id");
|
||||
|
@ -1584,8 +1584,8 @@ function printFunctionDeclarationStatement(
|
|||
indent(
|
||||
join(
|
||||
", ",
|
||||
path.map((arg) => [softline, print(arg)], "parameters")
|
||||
)
|
||||
path.map((arg) => [softline, print(arg)], "parameters"),
|
||||
),
|
||||
),
|
||||
softline,
|
||||
]),
|
||||
|
@ -1597,7 +1597,7 @@ function printFunctionDeclarationStatement(
|
|||
function printFunctionParameterDeclaration(
|
||||
path: AstPath<FunctionParameterNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
): Doc {
|
||||
const node = path.node;
|
||||
const id = path.call(print, "id");
|
||||
|
@ -1616,7 +1616,7 @@ function printFunctionParameterDeclaration(
|
|||
export function printModifiers(
|
||||
path: AstPath<DecoratorDeclarationStatementNode | FunctionDeclarationStatementNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
): Doc {
|
||||
const node = path.node;
|
||||
if (node.modifiers.length === 0) {
|
||||
|
@ -1628,7 +1628,7 @@ export function printModifiers(
|
|||
|
||||
function printStringLiteral(
|
||||
path: AstPath<StringLiteralNode>,
|
||||
options: TypeSpecPrettierOptions
|
||||
options: TypeSpecPrettierOptions,
|
||||
): Doc {
|
||||
const node = path.node;
|
||||
const multiline = isMultiline(node, options);
|
||||
|
@ -1646,7 +1646,7 @@ function printStringLiteral(
|
|||
|
||||
function isMultiline(
|
||||
node: StringLiteralNode | StringTemplateExpressionNode,
|
||||
options: TypeSpecPrettierOptions
|
||||
options: TypeSpecPrettierOptions,
|
||||
) {
|
||||
return (
|
||||
options.originalText[node.pos] &&
|
||||
|
@ -1657,7 +1657,7 @@ function isMultiline(
|
|||
|
||||
function printNumberLiteral(
|
||||
path: AstPath<NumericLiteralNode>,
|
||||
options: TypeSpecPrettierOptions
|
||||
options: TypeSpecPrettierOptions,
|
||||
): Doc {
|
||||
const node = path.node;
|
||||
return getRawText(node, options);
|
||||
|
@ -1665,7 +1665,7 @@ function printNumberLiteral(
|
|||
|
||||
function printBooleanLiteral(
|
||||
path: AstPath<BooleanLiteralNode>,
|
||||
options: TypeSpecPrettierOptions
|
||||
options: TypeSpecPrettierOptions,
|
||||
): Doc {
|
||||
const node = path.node;
|
||||
return node.value ? "true" : "false";
|
||||
|
@ -1674,7 +1674,7 @@ function printBooleanLiteral(
|
|||
function printProjectionStatement(
|
||||
path: AstPath<ProjectionStatementNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const selector = path.call(print, "selector");
|
||||
const id = path.call(print, "id");
|
||||
|
@ -1694,7 +1694,7 @@ function printProjectionStatement(
|
|||
function printProjection(
|
||||
path: AstPath<ProjectionNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
const params = printProjectionParameters(path, options, print);
|
||||
|
@ -1713,7 +1713,7 @@ function printProjection(
|
|||
function printProjectionParameters(
|
||||
path: AstPath<ProjectionNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
const params = node.parameters;
|
||||
|
@ -1734,7 +1734,7 @@ function printProjectionExpressionStatements<T extends Node>(
|
|||
path: AstPath<T>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint,
|
||||
key: keyof T
|
||||
key: keyof T,
|
||||
) {
|
||||
const parts: Doc[] = [hardline];
|
||||
const lastIndex = (path.node[key] as any).length - 1;
|
||||
|
@ -1762,7 +1762,7 @@ function printProjectionExpressionStatements<T extends Node>(
|
|||
function printProjectionParameterDeclaration(
|
||||
path: AstPath<ProjectionParameterDeclarationNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
return path.call(print, "id");
|
||||
}
|
||||
|
@ -1770,14 +1770,14 @@ function printProjectionParameterDeclaration(
|
|||
function printProjectionExpressionStatement(
|
||||
path: AstPath<ProjectionExpressionStatementNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
return path.call(print, "expr");
|
||||
}
|
||||
function printProjectionIfExpressionNode(
|
||||
path: AstPath<ProjectionIfExpressionNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
const test = path.call(print, "test");
|
||||
|
@ -1789,7 +1789,7 @@ function printProjectionIfExpressionNode(
|
|||
export function printProjectionBlockExpressionNode(
|
||||
path: AstPath<ProjectionBlockExpressionNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
if (node.statements.length === 0) {
|
||||
|
@ -1806,7 +1806,7 @@ export function printProjectionBlockExpressionNode(
|
|||
export function printProjectionMemberExpression(
|
||||
path: AstPath<ProjectionMemberExpressionNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
return [path.call(print, "base"), node.selector, path.call(print, "id")];
|
||||
|
@ -1820,7 +1820,7 @@ export function printProjectionLeftRightExpression(
|
|||
| ProjectionArithmeticExpressionNode
|
||||
>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
return [path.call(print, "left"), " ", node.op, " ", path.call(print, "right")];
|
||||
|
@ -1829,7 +1829,7 @@ export function printProjectionLeftRightExpression(
|
|||
export function printProjectionUnaryExpression(
|
||||
path: AstPath<ProjectionUnaryExpressionNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
return ["!", path.call(print, "target")];
|
||||
}
|
||||
|
@ -1837,7 +1837,7 @@ export function printProjectionUnaryExpression(
|
|||
export function printProjectionCallExpression(
|
||||
path: AstPath<ProjectionCallExpressionNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
const target = path.call(print, "target");
|
||||
|
@ -1853,7 +1853,7 @@ export function printProjectionCallExpression(
|
|||
export function printProjectionLambdaExpression(
|
||||
path: AstPath<ProjectionLambdaExpressionNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
return [
|
||||
"(",
|
||||
|
@ -1867,7 +1867,7 @@ export function printProjectionLambdaExpression(
|
|||
export function printProjectionLambdaParameterDeclaration(
|
||||
path: AstPath<ProjectionLambdaParameterDeclarationNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
return path.call(print, "id");
|
||||
}
|
||||
|
@ -1875,7 +1875,7 @@ export function printProjectionLambdaParameterDeclaration(
|
|||
export function printReturnExpression(
|
||||
path: AstPath<ReturnExpressionNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
return ["return ", path.call(print, "value")];
|
||||
}
|
||||
|
@ -1883,7 +1883,7 @@ export function printReturnExpression(
|
|||
export function printStringTemplateExpression(
|
||||
path: AstPath<StringTemplateExpressionNode>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint
|
||||
print: PrettierChildPrint,
|
||||
) {
|
||||
const node = path.node;
|
||||
const multiline = isMultiline(node, options);
|
||||
|
@ -1965,7 +1965,7 @@ function printItemList<T extends Node>(
|
|||
path: AstPath<T>,
|
||||
options: TypeSpecPrettierOptions,
|
||||
print: PrettierChildPrint,
|
||||
key: keyof T
|
||||
key: keyof T,
|
||||
) {
|
||||
return join(", ", path.map(print, key as any));
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ export interface InitTypeSpecProjectOptions {
|
|||
export async function initTypeSpecProject(
|
||||
host: CompilerHost,
|
||||
directory: string,
|
||||
options: InitTypeSpecProjectOptions = {}
|
||||
options: InitTypeSpecProjectOptions = {},
|
||||
) {
|
||||
if (!(await confirmDirectoryEmpty(directory))) {
|
||||
return;
|
||||
|
@ -118,7 +118,7 @@ async function confirmDirectoryEmpty(directory: string) {
|
|||
}
|
||||
|
||||
return confirm(
|
||||
`Folder '${directory}' is not empty. Are you sure you want to initialize a new project here?`
|
||||
`Folder '${directory}' is not empty. Are you sure you want to initialize a new project here?`,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -218,7 +218,7 @@ async function validateTemplate(template: any, loaded: LoadedTemplate): Promise<
|
|||
const confirmationMessage = `The template you selected is designed for tsp version ${template.compilerVersion}. You are currently using tsp version ${currentCompilerVersion}.`;
|
||||
if (
|
||||
await confirm(
|
||||
`${confirmationMessage} The project created may not be correct. Do you want to continue?`
|
||||
`${confirmationMessage} The project created may not be correct. Do you want to continue?`,
|
||||
)
|
||||
) {
|
||||
// 2.1 If user choose to continue, proceed with relaxed validation
|
||||
|
@ -233,7 +233,7 @@ async function validateTemplate(template: any, loaded: LoadedTemplate): Promise<
|
|||
logDiagnostics(validationResult.diagnostics);
|
||||
|
||||
return await confirm(
|
||||
"Template schema failed. The project created may not be correct. Do you want to continue?"
|
||||
"Template schema failed. The project created may not be correct. Do you want to continue?",
|
||||
);
|
||||
}
|
||||
return true;
|
||||
|
@ -281,7 +281,7 @@ export class InitTemplateError extends Error {
|
|||
function validateTemplateDefinitions(
|
||||
template: unknown,
|
||||
templateName: SourceFile,
|
||||
strictValidation: boolean
|
||||
strictValidation: boolean,
|
||||
): ValidationResult {
|
||||
const validator = createJSONSchemaValidator(InitTemplateSchema, {
|
||||
strict: strictValidation,
|
||||
|
|
|
@ -62,7 +62,7 @@ export function normalizeLibrary(library: InitTemplateLibrary): InitTemplateLibr
|
|||
|
||||
export function makeScaffoldingConfig(
|
||||
template: InitTemplate,
|
||||
config: Partial<ScaffoldingConfig>
|
||||
config: Partial<ScaffoldingConfig>,
|
||||
): ScaffoldingConfig {
|
||||
return {
|
||||
template,
|
||||
|
@ -128,7 +128,7 @@ async function writePackageJson(host: CompilerHost, config: ScaffoldingConfig) {
|
|||
|
||||
return host.writeFile(
|
||||
joinPaths(config.directory, "package.json"),
|
||||
JSON.stringify(packageJson, null, 2)
|
||||
JSON.stringify(packageJson, null, 2),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -209,7 +209,7 @@ async function writeFile(
|
|||
host: CompilerHost,
|
||||
config: ScaffoldingConfig,
|
||||
context: FileTemplatingContext,
|
||||
file: InitTemplateFile
|
||||
file: InitTemplateFile,
|
||||
) {
|
||||
const baseDir = config.baseUri + "/";
|
||||
const template = await readUrlOrPath(host, resolveRelativeUrlOrPath(baseDir, file.path));
|
||||
|
|
|
@ -143,7 +143,7 @@ export const $summary: SummaryDecorator = (
|
|||
context: DecoratorContext,
|
||||
target: Type,
|
||||
text: string,
|
||||
sourceObject?: Type
|
||||
sourceObject?: Type,
|
||||
) => {
|
||||
if (sourceObject) {
|
||||
text = replaceTemplatedStringFromProperties(text, sourceObject);
|
||||
|
@ -168,7 +168,7 @@ export const $doc: DocDecorator = (
|
|||
context: DecoratorContext,
|
||||
target: Type,
|
||||
text: string,
|
||||
sourceObject?: Type
|
||||
sourceObject?: Type,
|
||||
) => {
|
||||
validateDecoratorUniqueOnNode(context, target, $doc);
|
||||
if (sourceObject) {
|
||||
|
@ -190,7 +190,7 @@ export function getDoc(program: Program, target: Type): string | undefined {
|
|||
export const $returnsDoc: ReturnsDocDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Operation,
|
||||
text: string
|
||||
text: string,
|
||||
) => {
|
||||
validateDecoratorUniqueOnNode(context, target, $doc);
|
||||
setDocData(context.program, target, "returns", { value: text, source: "decorator" });
|
||||
|
@ -219,7 +219,7 @@ export function getReturnsDoc(program: Program, target: Operation): string | und
|
|||
export const $errorsDoc: ErrorsDocDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Operation,
|
||||
text: string
|
||||
text: string,
|
||||
) => {
|
||||
validateDecoratorUniqueOnNode(context, target, $doc);
|
||||
setDocData(context.program, target, "errors", { value: text, source: "decorator" });
|
||||
|
@ -292,7 +292,7 @@ function isTypeIn(type: Type, condition: (type: Type) => boolean): boolean {
|
|||
function validateTargetingANumeric(
|
||||
context: DecoratorContext,
|
||||
target: Scalar | ModelProperty,
|
||||
decoratorName: string
|
||||
decoratorName: string,
|
||||
) {
|
||||
const valid = isTypeIn(getPropertyType(target), (x) => isNumericType(context.program, x));
|
||||
if (!valid) {
|
||||
|
@ -314,7 +314,7 @@ function validateTargetingANumeric(
|
|||
function validateTargetingAString(
|
||||
context: DecoratorContext,
|
||||
target: Scalar | ModelProperty,
|
||||
decoratorName: string
|
||||
decoratorName: string,
|
||||
) {
|
||||
const valid = isTypeIn(getPropertyType(target), (x) => isStringType(context.program, x));
|
||||
if (!valid) {
|
||||
|
@ -380,7 +380,7 @@ const formatValuesKey = createStateSymbol("formatValues");
|
|||
export const $format: FormatDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Scalar | ModelProperty,
|
||||
format: string
|
||||
format: string,
|
||||
) => {
|
||||
validateDecoratorUniqueOnNode(context, target, $format);
|
||||
|
||||
|
@ -392,7 +392,7 @@ export const $format: FormatDecorator = (
|
|||
reportDeprecated(
|
||||
context.program,
|
||||
"Using `@format` on a bytes scalar is deprecated. Use `@encode` instead. https://github.com/microsoft/typespec/issues/1873",
|
||||
target
|
||||
target,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -416,7 +416,7 @@ export const $pattern: PatternDecorator = (
|
|||
context: DecoratorContext,
|
||||
target: Scalar | ModelProperty,
|
||||
pattern: string,
|
||||
validationMessage?: string
|
||||
validationMessage?: string,
|
||||
) => {
|
||||
validateDecoratorUniqueOnNode(context, target, $pattern);
|
||||
|
||||
|
@ -462,7 +462,7 @@ export function getPatternData(program: Program, target: Type): PatternData | un
|
|||
export const $minLength: MinLengthDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Scalar | ModelProperty,
|
||||
minLength: Numeric
|
||||
minLength: Numeric,
|
||||
) => {
|
||||
validateDecoratorUniqueOnNode(context, target, $minLength);
|
||||
|
||||
|
@ -480,7 +480,7 @@ export const $minLength: MinLengthDecorator = (
|
|||
export const $maxLength: MaxLengthDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Scalar | ModelProperty,
|
||||
maxLength: Numeric
|
||||
maxLength: Numeric,
|
||||
) => {
|
||||
validateDecoratorUniqueOnNode(context, target, $maxLength);
|
||||
|
||||
|
@ -499,7 +499,7 @@ export const $maxLength: MaxLengthDecorator = (
|
|||
export const $minItems: MinItemsDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Type,
|
||||
minItems: Numeric
|
||||
minItems: Numeric,
|
||||
) => {
|
||||
validateDecoratorUniqueOnNode(context, target, $minItems);
|
||||
|
||||
|
@ -526,7 +526,7 @@ export const $minItems: MinItemsDecorator = (
|
|||
export const $maxItems: MaxItemsDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Type,
|
||||
maxItems: Numeric
|
||||
maxItems: Numeric,
|
||||
) => {
|
||||
validateDecoratorUniqueOnNode(context, target, $maxItems);
|
||||
|
||||
|
@ -552,7 +552,7 @@ export const $maxItems: MaxItemsDecorator = (
|
|||
export const $minValue: MinValueDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Scalar | ModelProperty,
|
||||
minValue: Numeric
|
||||
minValue: Numeric,
|
||||
) => {
|
||||
validateDecoratorUniqueOnNode(context, target, $minValue);
|
||||
validateDecoratorNotOnType(context, target, $minValueExclusive, $minValue);
|
||||
|
@ -567,7 +567,7 @@ export const $minValue: MinValueDecorator = (
|
|||
context,
|
||||
minValue,
|
||||
getMaxValueAsNumeric(context.program, target) ??
|
||||
getMaxValueExclusiveAsNumeric(context.program, target)
|
||||
getMaxValueExclusiveAsNumeric(context.program, target),
|
||||
)
|
||||
) {
|
||||
return;
|
||||
|
@ -580,7 +580,7 @@ export const $minValue: MinValueDecorator = (
|
|||
export const $maxValue: MaxValueDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Scalar | ModelProperty,
|
||||
maxValue: Numeric
|
||||
maxValue: Numeric,
|
||||
) => {
|
||||
validateDecoratorUniqueOnNode(context, target, $maxValue);
|
||||
validateDecoratorNotOnType(context, target, $maxValueExclusive, $maxValue);
|
||||
|
@ -594,7 +594,7 @@ export const $maxValue: MaxValueDecorator = (
|
|||
context,
|
||||
getMinValueAsNumeric(context.program, target) ??
|
||||
getMinValueExclusiveAsNumeric(context.program, target),
|
||||
maxValue
|
||||
maxValue,
|
||||
)
|
||||
) {
|
||||
return;
|
||||
|
@ -607,7 +607,7 @@ export const $maxValue: MaxValueDecorator = (
|
|||
export const $minValueExclusive: MinValueExclusiveDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Scalar | ModelProperty,
|
||||
minValueExclusive: Numeric
|
||||
minValueExclusive: Numeric,
|
||||
) => {
|
||||
validateDecoratorUniqueOnNode(context, target, $minValueExclusive);
|
||||
validateDecoratorNotOnType(context, target, $minValue, $minValueExclusive);
|
||||
|
@ -622,7 +622,7 @@ export const $minValueExclusive: MinValueExclusiveDecorator = (
|
|||
context,
|
||||
minValueExclusive,
|
||||
getMaxValueAsNumeric(context.program, target) ??
|
||||
getMaxValueExclusiveAsNumeric(context.program, target)
|
||||
getMaxValueExclusiveAsNumeric(context.program, target),
|
||||
)
|
||||
) {
|
||||
return;
|
||||
|
@ -635,7 +635,7 @@ export const $minValueExclusive: MinValueExclusiveDecorator = (
|
|||
export const $maxValueExclusive: MaxValueExclusiveDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Scalar | ModelProperty,
|
||||
maxValueExclusive: Numeric
|
||||
maxValueExclusive: Numeric,
|
||||
) => {
|
||||
validateDecoratorUniqueOnNode(context, target, $maxValueExclusive);
|
||||
validateDecoratorNotOnType(context, target, $maxValue, $maxValueExclusive);
|
||||
|
@ -649,7 +649,7 @@ export const $maxValueExclusive: MaxValueExclusiveDecorator = (
|
|||
context,
|
||||
getMinValueAsNumeric(context.program, target) ??
|
||||
getMinValueExclusiveAsNumeric(context.program, target),
|
||||
maxValueExclusive
|
||||
maxValueExclusive,
|
||||
)
|
||||
) {
|
||||
return;
|
||||
|
@ -667,7 +667,7 @@ const secretTypesKey = createStateSymbol("secretTypes");
|
|||
*/
|
||||
export const $secret: SecretDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Scalar | ModelProperty
|
||||
target: Scalar | ModelProperty,
|
||||
) => {
|
||||
validateDecoratorUniqueOnNode(context, target, $secret);
|
||||
|
||||
|
@ -699,7 +699,7 @@ export const $encode: EncodeDecorator = (
|
|||
context: DecoratorContext,
|
||||
target: Scalar | ModelProperty,
|
||||
encoding: string | EnumValue | Scalar,
|
||||
encodeAs?: Scalar
|
||||
encodeAs?: Scalar,
|
||||
) => {
|
||||
validateDecoratorUniqueOnNode(context, target, $encode);
|
||||
|
||||
|
@ -715,7 +715,7 @@ export const $encode: EncodeDecorator = (
|
|||
function computeEncoding(
|
||||
program: Program,
|
||||
encodingOrEncodeAs: string | EnumValue | Scalar,
|
||||
encodeAs: Scalar | undefined
|
||||
encodeAs: Scalar | undefined,
|
||||
): EncodeData | undefined {
|
||||
const strType = program.checker.getStdType("string");
|
||||
const resolvedEncodeAs = encodeAs ?? strType;
|
||||
|
@ -749,9 +749,9 @@ function validateEncodeData(context: DecoratorContext, target: Type, encodeData:
|
|||
const isTargetValid = isTypeIn(target.projectionBase ?? target, (type) =>
|
||||
validTargets.some((validTarget) => {
|
||||
return ignoreDiagnostics(
|
||||
checker.isTypeAssignableTo(type, checker.getStdType(validTarget), target)
|
||||
checker.isTypeAssignableTo(type, checker.getStdType(validTarget), target),
|
||||
);
|
||||
})
|
||||
}),
|
||||
);
|
||||
|
||||
if (!isTargetValid) {
|
||||
|
@ -771,8 +771,8 @@ function validateEncodeData(context: DecoratorContext, target: Type, encodeData:
|
|||
checker.isTypeAssignableTo(
|
||||
encodeData.type.projectionBase ?? encodeData.type,
|
||||
checker.getStdType(validEncoding),
|
||||
target
|
||||
)
|
||||
target,
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -814,7 +814,7 @@ function validateEncodeData(context: DecoratorContext, target: Type, encodeData:
|
|||
|
||||
export function getEncode(
|
||||
program: Program,
|
||||
target: Scalar | ModelProperty
|
||||
target: Scalar | ModelProperty,
|
||||
): EncodeData | undefined {
|
||||
return program.stateMap(encodeKey).get(target);
|
||||
}
|
||||
|
@ -853,7 +853,7 @@ export const $withVisibility: WithVisibilityDecorator = (
|
|||
export function isVisible(
|
||||
program: Program,
|
||||
property: ModelProperty,
|
||||
visibilities: readonly string[]
|
||||
visibilities: readonly string[],
|
||||
) {
|
||||
const propertyVisibilities = getVisibility(program, property);
|
||||
return !propertyVisibilities || propertyVisibilities.some((v) => visibilities.includes(v));
|
||||
|
@ -871,7 +871,7 @@ function filterModelPropertiesInPlace(model: Model, filter: (prop: ModelProperty
|
|||
|
||||
export const $withOptionalProperties: WithOptionalPropertiesDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Model
|
||||
target: Model,
|
||||
) => {
|
||||
// Make all properties of the target type optional
|
||||
target.properties.forEach((p) => (p.optional = true));
|
||||
|
@ -881,7 +881,7 @@ export const $withOptionalProperties: WithOptionalPropertiesDecorator = (
|
|||
|
||||
export const $withUpdateableProperties: WithUpdateablePropertiesDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Type
|
||||
target: Type,
|
||||
) => {
|
||||
if (!validateDecoratorTarget(context, target, "@withUpdateableProperties", "Model")) {
|
||||
return;
|
||||
|
@ -895,7 +895,7 @@ export const $withUpdateableProperties: WithUpdateablePropertiesDecorator = (
|
|||
export const $withoutOmittedProperties: WithoutOmittedPropertiesDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Model,
|
||||
omitProperties: Type
|
||||
omitProperties: Type,
|
||||
) => {
|
||||
// Get the property or properties to omit
|
||||
const omitNames = new Set<string>();
|
||||
|
@ -918,7 +918,7 @@ export const $withoutOmittedProperties: WithoutOmittedPropertiesDecorator = (
|
|||
export const $withPickedProperties: WithPickedPropertiesDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Model,
|
||||
pickedProperties: Type
|
||||
pickedProperties: Type,
|
||||
) => {
|
||||
// Get the property or properties to pick
|
||||
const pickedNames = new Set<string>();
|
||||
|
@ -940,7 +940,7 @@ export const $withPickedProperties: WithPickedPropertiesDecorator = (
|
|||
|
||||
export const $withoutDefaultValues: WithoutDefaultValuesDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Model
|
||||
target: Model,
|
||||
) => {
|
||||
// remove all read-only properties from the target type
|
||||
target.properties.forEach((p) => {
|
||||
|
@ -960,7 +960,7 @@ const listPropertiesKey = createStateSymbol("listProperties");
|
|||
export const $list: ListDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Operation,
|
||||
listedType?: Type
|
||||
listedType?: Type,
|
||||
) => {
|
||||
if (listedType && listedType.kind === "TemplateParameter") {
|
||||
// Silently return because this is probably being used in a templated interface
|
||||
|
@ -1000,7 +1000,7 @@ const tagPropertiesKey = createStateSymbol("tagProperties");
|
|||
export const $tag: TagDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Operation | Namespace | Interface,
|
||||
tag: string
|
||||
tag: string,
|
||||
) => {
|
||||
const tags = context.program.stateMap(tagPropertiesKey).get(target);
|
||||
if (tags) {
|
||||
|
@ -1019,7 +1019,7 @@ export function getTags(program: Program, target: Type): string[] {
|
|||
// interface it resides within.
|
||||
export function getAllTags(
|
||||
program: Program,
|
||||
target: Namespace | Interface | Operation
|
||||
target: Namespace | Interface | Operation,
|
||||
): string[] | undefined {
|
||||
const tags = new Set<string>();
|
||||
|
||||
|
@ -1049,7 +1049,7 @@ export const $friendlyName: FriendlyNameDecorator = (
|
|||
context: DecoratorContext,
|
||||
target: Type,
|
||||
friendlyName: string,
|
||||
sourceObject: Type | undefined
|
||||
sourceObject: Type | undefined,
|
||||
) => {
|
||||
// workaround for current lack of functionality in compiler
|
||||
// https://github.com/microsoft/typespec/issues/2717
|
||||
|
@ -1058,8 +1058,8 @@ export const $friendlyName: FriendlyNameDecorator = (
|
|||
if (
|
||||
ignoreDiagnostics(
|
||||
context.program.checker.resolveTypeReference(
|
||||
(context.decoratorTarget as AugmentDecoratorStatementNode).targetType
|
||||
)
|
||||
(context.decoratorTarget as AugmentDecoratorStatementNode).targetType,
|
||||
),
|
||||
)?.node !== target.node
|
||||
) {
|
||||
return;
|
||||
|
@ -1099,7 +1099,7 @@ const knownValuesKey = createStateSymbol("knownValues");
|
|||
export const $knownValues = (
|
||||
context: DecoratorContext,
|
||||
target: Scalar | ModelProperty,
|
||||
knownValues: Enum
|
||||
knownValues: Enum,
|
||||
) => {
|
||||
const type = getPropertyType(target);
|
||||
if (!isStringType(context.program, type) && !isNumericType(context.program, type)) {
|
||||
|
@ -1108,7 +1108,7 @@ export const $knownValues = (
|
|||
code: "decorator-wrong-target",
|
||||
format: { decorator: "@knownValues", to: "type, it is not a string or numeric" },
|
||||
target,
|
||||
})
|
||||
}),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
@ -1159,7 +1159,7 @@ const keyKey = createStateSymbol("key");
|
|||
export const $key: KeyDecorator = (
|
||||
context: DecoratorContext,
|
||||
entity: ModelProperty,
|
||||
altName?: string
|
||||
altName?: string,
|
||||
) => {
|
||||
// Ensure that the key property is not marked as optional
|
||||
if (entity.optional) {
|
||||
|
@ -1187,7 +1187,7 @@ export function getKeyName(program: Program, property: ModelProperty): string {
|
|||
export const $withDefaultKeyVisibility: WithDefaultKeyVisibilityDecorator = (
|
||||
context: DecoratorContext,
|
||||
entity: Model,
|
||||
visibility: string
|
||||
visibility: string,
|
||||
) => {
|
||||
const keyProperties: ModelProperty[] = [];
|
||||
entity.properties.forEach((prop: ModelProperty) => {
|
||||
|
@ -1212,7 +1212,7 @@ export const $withDefaultKeyVisibility: WithDefaultKeyVisibilityDecorator = (
|
|||
],
|
||||
},
|
||||
],
|
||||
})
|
||||
}),
|
||||
);
|
||||
});
|
||||
};
|
||||
|
@ -1231,7 +1231,7 @@ export const $withDefaultKeyVisibility: WithDefaultKeyVisibilityDecorator = (
|
|||
export const $deprecated: DeprecatedDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Type,
|
||||
message: string
|
||||
message: string,
|
||||
) => {
|
||||
markDeprecated(context.program, target, { message });
|
||||
};
|
||||
|
@ -1257,20 +1257,20 @@ const overloadsOperationKey = createStateSymbol("overloadsOperation");
|
|||
export const $overload: OverloadDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Operation,
|
||||
overloadBase: Operation
|
||||
overloadBase: Operation,
|
||||
) => {
|
||||
// Ensure that the overloaded method arguments are a subtype of the original operation.
|
||||
const [paramValid, paramDiagnostics] = context.program.checker.isTypeAssignableTo(
|
||||
target.parameters.projectionBase ?? target.parameters,
|
||||
overloadBase.parameters.projectionBase ?? overloadBase.parameters,
|
||||
target
|
||||
target,
|
||||
);
|
||||
if (!paramValid) context.program.reportDiagnostics(paramDiagnostics);
|
||||
|
||||
const [returnTypeValid, returnTypeDiagnostics] = context.program.checker.isTypeAssignableTo(
|
||||
target.returnType.projectionBase ?? target.returnType,
|
||||
overloadBase.returnType.projectionBase ?? overloadBase.returnType,
|
||||
target
|
||||
target,
|
||||
);
|
||||
if (!returnTypeValid) context.program.reportDiagnostics(returnTypeDiagnostics);
|
||||
|
||||
|
@ -1298,7 +1298,7 @@ function areOperationsInSameContainer(op1: Operation, op2: Operation): boolean {
|
|||
// the interfaces to their unprojected form before comparison.
|
||||
function equalsWithoutProjection(
|
||||
interface1: Interface | undefined,
|
||||
interface2: Interface | undefined
|
||||
interface2: Interface | undefined,
|
||||
): boolean {
|
||||
if (interface1 === undefined || interface2 === undefined) return false;
|
||||
return getBaseInterface(interface1) === getBaseInterface(interface2);
|
||||
|
@ -1328,7 +1328,7 @@ export function getOverloads(program: Program, operation: Operation): Operation[
|
|||
*/
|
||||
export function getOverloadedOperation(
|
||||
program: Program,
|
||||
operation: Operation
|
||||
operation: Operation,
|
||||
): Operation | undefined {
|
||||
return program.stateMap(overloadsOperationKey).get(operation);
|
||||
}
|
||||
|
@ -1346,7 +1346,7 @@ export const $projectedName: ProjectedNameDecorator = (
|
|||
context: DecoratorContext,
|
||||
target: Type,
|
||||
projectionName: string,
|
||||
projectedName: string
|
||||
projectedName: string,
|
||||
) => {
|
||||
let map: Map<string, string> = context.program.stateMap(projectedNameKey).get(target);
|
||||
if (map === undefined) {
|
||||
|
@ -1363,7 +1363,7 @@ export const $projectedName: ProjectedNameDecorator = (
|
|||
*/
|
||||
export function getProjectedNames(
|
||||
program: Program,
|
||||
target: Type
|
||||
target: Type,
|
||||
): ReadonlyMap<string, string> | undefined {
|
||||
return program.stateMap(projectedNameKey).get(target);
|
||||
}
|
||||
|
@ -1377,7 +1377,7 @@ export function getProjectedNames(
|
|||
export function getProjectedName(
|
||||
program: Program,
|
||||
target: Type,
|
||||
projectionName: string
|
||||
projectionName: string,
|
||||
): string | undefined {
|
||||
return getProjectedNames(program, target)?.get(projectionName);
|
||||
}
|
||||
|
@ -1395,7 +1395,7 @@ export function hasProjectedName(program: Program, target: Type, projectionName:
|
|||
function validateRange(
|
||||
context: DecoratorContext,
|
||||
min: Numeric | undefined,
|
||||
max: Numeric | undefined
|
||||
max: Numeric | undefined,
|
||||
): boolean {
|
||||
if (min === undefined || max === undefined) {
|
||||
return true;
|
||||
|
@ -1414,7 +1414,7 @@ function validateRange(
|
|||
export const $discriminator: DiscriminatorDecorator = (
|
||||
context: DecoratorContext,
|
||||
entity: Model | Union,
|
||||
propertyName: string
|
||||
propertyName: string,
|
||||
) => {
|
||||
const discriminator: Discriminator = { propertyName };
|
||||
|
||||
|
@ -1482,10 +1482,10 @@ export const $example: ExampleDecorator = (
|
|||
context: DecoratorContext,
|
||||
target: Model | Scalar | Enum | Union | ModelProperty | UnionVariant,
|
||||
_example: unknown,
|
||||
options?: ExampleOptions
|
||||
options?: ExampleOptions,
|
||||
) => {
|
||||
const decorator = target.decorators.find(
|
||||
(d) => d.decorator === $example && d.node === context.decoratorTarget
|
||||
(d) => d.decorator === $example && d.node === context.decoratorTarget,
|
||||
);
|
||||
compilerAssert(decorator, `Couldn't find @example decorator`, context.decoratorTarget);
|
||||
const rawExample = decorator.args[0].value as Value;
|
||||
|
@ -1496,7 +1496,7 @@ export const $example: ExampleDecorator = (
|
|||
context.program,
|
||||
rawExample,
|
||||
target.kind === "ModelProperty" ? target.type : target,
|
||||
context.getArgumentTarget(0)!
|
||||
context.getArgumentTarget(0)!,
|
||||
)
|
||||
) {
|
||||
return;
|
||||
|
@ -1513,7 +1513,7 @@ export const $example: ExampleDecorator = (
|
|||
|
||||
export function getExamples(
|
||||
program: Program,
|
||||
target: Model | Scalar | Enum | Union | ModelProperty
|
||||
target: Model | Scalar | Enum | Union | ModelProperty,
|
||||
): readonly Example[] {
|
||||
return program.stateMap(exampleKey).get(target) ?? [];
|
||||
}
|
||||
|
@ -1523,10 +1523,10 @@ export const $opExample: OpExampleDecorator = (
|
|||
context: DecoratorContext,
|
||||
target: Operation,
|
||||
_example: unknown,
|
||||
options?: unknown // TODO: change `options?: ExampleOptions` when tspd supports it
|
||||
options?: unknown, // TODO: change `options?: ExampleOptions` when tspd supports it
|
||||
) => {
|
||||
const decorator = target.decorators.find(
|
||||
(d) => d.decorator === $opExample && d.node === context.decoratorTarget
|
||||
(d) => d.decorator === $opExample && d.node === context.decoratorTarget,
|
||||
);
|
||||
compilerAssert(decorator, `Couldn't find @opExample decorator`, context.decoratorTarget);
|
||||
const rawExampleConfig = decorator.args[0].value as ObjectValue;
|
||||
|
@ -1541,7 +1541,7 @@ export const $opExample: OpExampleDecorator = (
|
|||
context.program,
|
||||
parameters,
|
||||
target.parameters,
|
||||
context.getArgumentTarget(0)!
|
||||
context.getArgumentTarget(0)!,
|
||||
)
|
||||
) {
|
||||
return;
|
||||
|
@ -1552,7 +1552,7 @@ export const $opExample: OpExampleDecorator = (
|
|||
context.program,
|
||||
returnType,
|
||||
target.returnType,
|
||||
context.getArgumentTarget(0)!
|
||||
context.getArgumentTarget(0)!,
|
||||
)
|
||||
) {
|
||||
return;
|
||||
|
@ -1571,13 +1571,13 @@ function checkExampleValid(
|
|||
program: Program,
|
||||
value: Value,
|
||||
target: Type,
|
||||
diagnosticTarget: DiagnosticTarget
|
||||
diagnosticTarget: DiagnosticTarget,
|
||||
): boolean {
|
||||
const exactType = program.checker.getValueExactType(value);
|
||||
const [assignable, diagnostics] = program.checker.isTypeAssignableTo(
|
||||
exactType ?? value.type,
|
||||
target,
|
||||
diagnosticTarget
|
||||
diagnosticTarget,
|
||||
);
|
||||
if (!assignable) {
|
||||
program.reportDiagnostics(diagnostics);
|
||||
|
|
|
@ -14,7 +14,7 @@ export function $encodedName(
|
|||
context: DecoratorContext,
|
||||
target: Type,
|
||||
mimeType: string,
|
||||
name: string
|
||||
name: string,
|
||||
) {
|
||||
let existing = context.program.stateMap(encodedNameKey).get(target);
|
||||
if (existing === undefined) {
|
||||
|
@ -76,7 +76,7 @@ function getEncodedName(program: Program, target: Type, mimeType: string): strin
|
|||
export function resolveEncodedName(
|
||||
program: Program,
|
||||
target: Type & { name: string },
|
||||
mimeType: string
|
||||
mimeType: string,
|
||||
): string {
|
||||
return getEncodedName(program, target, mimeType) ?? target.name;
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ export function serializeValueAsJson(
|
|||
program: Program,
|
||||
value: Value,
|
||||
type: Type,
|
||||
encodeAs?: EncodeData
|
||||
encodeAs?: EncodeData,
|
||||
): unknown {
|
||||
if (type.kind === "ModelProperty") {
|
||||
return serializeValueAsJson(program, value, type.type, encodeAs ?? getEncode(program, type));
|
||||
|
@ -42,8 +42,8 @@ export function serializeValueAsJson(
|
|||
v,
|
||||
type.kind === "Model" && isArrayModelType(program, type)
|
||||
? type.indexer.value
|
||||
: program.checker.anyType
|
||||
)
|
||||
: program.checker.anyType,
|
||||
),
|
||||
);
|
||||
case "ObjectValue":
|
||||
return serializeObjectValueAsJson(program, value, type);
|
||||
|
@ -79,8 +79,8 @@ function resolveUnions(program: Program, value: ObjectValue, type: Type): Type |
|
|||
program.checker.isTypeAssignableTo(
|
||||
value,
|
||||
{ entityKind: "MixedParameterConstraint", valueType: variant.type },
|
||||
value
|
||||
)
|
||||
value,
|
||||
),
|
||||
)
|
||||
) {
|
||||
return variant.type;
|
||||
|
@ -92,7 +92,7 @@ function resolveUnions(program: Program, value: ObjectValue, type: Type): Type |
|
|||
function serializeObjectValueAsJson(
|
||||
program: Program,
|
||||
value: ObjectValue,
|
||||
type: Type
|
||||
type: Type,
|
||||
): Record<string, unknown> {
|
||||
type = resolveUnions(program, value, type) ?? type;
|
||||
const obj: Record<string, unknown> = {};
|
||||
|
@ -107,7 +107,7 @@ function serializeObjectValueAsJson(
|
|||
|
||||
function resolveKnownScalar(
|
||||
program: Program,
|
||||
scalar: Scalar
|
||||
scalar: Scalar,
|
||||
):
|
||||
| {
|
||||
scalar: Scalar & {
|
||||
|
@ -141,7 +141,7 @@ function serializeScalarValueAsJson(
|
|||
program: Program,
|
||||
value: ScalarValue,
|
||||
type: Type,
|
||||
encodeAs: EncodeData | undefined
|
||||
encodeAs: EncodeData | undefined,
|
||||
): unknown {
|
||||
const result = resolveKnownScalar(program, value.scalar);
|
||||
if (result === undefined) {
|
||||
|
|
|
@ -21,7 +21,7 @@ export const $docFromComment = (
|
|||
context: DecoratorContext,
|
||||
target: Type,
|
||||
key: DocTarget,
|
||||
text: string
|
||||
text: string,
|
||||
) => {
|
||||
setDocData(context.program, target, key, { value: text, source: "comment" });
|
||||
};
|
||||
|
|
|
@ -58,7 +58,7 @@ export function isService(program: Program, namespace: Namespace): boolean {
|
|||
export function addService(
|
||||
program: Program,
|
||||
namespace: Namespace,
|
||||
details: ServiceDetails = {}
|
||||
details: ServiceDetails = {},
|
||||
): void {
|
||||
const serviceMap = getServiceMap(program);
|
||||
const existing = serviceMap.get(namespace) ?? {};
|
||||
|
@ -68,7 +68,7 @@ export function addService(
|
|||
export const $service: ServiceDecorator = (
|
||||
context: DecoratorContext,
|
||||
target: Namespace,
|
||||
options?: Type
|
||||
options?: Type,
|
||||
) => {
|
||||
validateDecoratorUniqueOnNode(context, target, $service);
|
||||
|
||||
|
@ -99,7 +99,7 @@ export const $service: ServiceDecorator = (
|
|||
reportDeprecated(
|
||||
context.program,
|
||||
"version: property is deprecated in @service. If wanting to describe a service versioning you can use the `@typespec/versioning` library. If wanting to describe the project version you can use the package.json version.",
|
||||
versionProp
|
||||
versionProp,
|
||||
);
|
||||
if (version.kind === "String") {
|
||||
// eslint-disable-next-line @typescript-eslint/no-deprecated
|
||||
|
|
|
@ -22,7 +22,7 @@ export async function runScript(relativePath: string, backupPath: string): Promi
|
|||
await import(scriptUrl);
|
||||
} else {
|
||||
throw new Error(
|
||||
"Couldn't resolve TypeSpec compiler root. This is unexpected. Please file an issue at https://github.com/microsoft/typespec."
|
||||
"Couldn't resolve TypeSpec compiler root. This is unexpected. Please file an issue at https://github.com/microsoft/typespec.",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -49,7 +49,7 @@ async function resolvePackageRoot(): Promise<string> {
|
|||
});
|
||||
if (resolved.type !== "module") {
|
||||
throw new Error(
|
||||
`Error resolving "@typespec/compiler", expected to find a node module but found a file: "${resolved.path}".`
|
||||
`Error resolving "@typespec/compiler", expected to find a node module but found a file: "${resolved.path}".`,
|
||||
);
|
||||
}
|
||||
return resolved.path;
|
||||
|
|
|
@ -93,7 +93,7 @@ export function getSemanticTokens(ast: TypeSpecScriptNode): SemanticToken[] {
|
|||
} else {
|
||||
compilerAssert(
|
||||
token === Token.StringTemplateMiddle,
|
||||
"Should have been a template middle."
|
||||
"Should have been a template middle.",
|
||||
);
|
||||
classifyStringTemplate(token, {
|
||||
pos: scanner.tokenPosition,
|
||||
|
@ -126,7 +126,7 @@ export function getSemanticTokens(ast: TypeSpecScriptNode): SemanticToken[] {
|
|||
|
||||
function classifyStringTemplate(
|
||||
token: Token.StringTemplateHead | Token.StringTemplateMiddle | Token.StringTemplateTail,
|
||||
range: TextRange
|
||||
range: TextRange,
|
||||
) {
|
||||
const stringStart = token === Token.StringTemplateHead ? range.pos : range.pos + 1;
|
||||
const stringEnd = token === Token.StringTemplateTail ? range.end : range.end - 2;
|
||||
|
|
|
@ -95,7 +95,7 @@ export function createCompileService({
|
|||
}
|
||||
|
||||
async function compile(
|
||||
document: TextDocument | TextDocumentIdentifier
|
||||
document: TextDocument | TextDocumentIdentifier,
|
||||
): Promise<CompileResult | undefined> {
|
||||
const path = await fileService.getPath(document);
|
||||
const mainFile = await getMainFileForDocument(path);
|
||||
|
@ -190,7 +190,7 @@ export function createCompileService({
|
|||
}
|
||||
|
||||
async function getScript(
|
||||
document: TextDocument | TextDocumentIdentifier
|
||||
document: TextDocument | TextDocumentIdentifier,
|
||||
): Promise<TypeSpecScriptNode> {
|
||||
const file = await compilerHost.readFile(await fileService.getPath(document));
|
||||
const cached = compilerHost.parseCache?.get(file);
|
||||
|
@ -241,7 +241,7 @@ export function createCompileService({
|
|||
pkgPath,
|
||||
JSON.parse,
|
||||
logMainFileSearchDiagnostic,
|
||||
options
|
||||
options,
|
||||
);
|
||||
await fileSystemCache.setData(pkgPath, pkg ?? {});
|
||||
}
|
||||
|
@ -260,7 +260,7 @@ export function createCompileService({
|
|||
() => compilerHost.stat(candidate),
|
||||
candidate,
|
||||
logMainFileSearchDiagnostic,
|
||||
options
|
||||
options,
|
||||
);
|
||||
|
||||
if (stat?.isFile()) {
|
||||
|
|
|
@ -44,7 +44,7 @@ export type CompletionContext = {
|
|||
|
||||
export async function resolveCompletion(
|
||||
context: CompletionContext,
|
||||
posDetail: PositionDetail
|
||||
posDetail: PositionDetail,
|
||||
): Promise<CompletionList> {
|
||||
let node: Node | undefined = posDetail.node;
|
||||
|
||||
|
@ -78,7 +78,7 @@ export async function resolveCompletion(
|
|||
|
||||
function addCompletionByLookingBackward(
|
||||
posDetail: PositionDetail,
|
||||
context: CompletionContext
|
||||
context: CompletionContext,
|
||||
): boolean {
|
||||
if (posDetail.triviaStartPosition === 0) {
|
||||
return false;
|
||||
|
@ -96,7 +96,7 @@ function addCompletionByLookingBackward(
|
|||
n.kind === SyntaxKind.OperationStatement ||
|
||||
n.kind === SyntaxKind.InterfaceStatement ||
|
||||
n.kind === SyntaxKind.TemplateParameterDeclaration,
|
||||
true /*includeSelf*/
|
||||
true /*includeSelf*/,
|
||||
);
|
||||
|
||||
return node !== undefined && addCompletionByLookingBackwardNode(node, posDetail, context);
|
||||
|
@ -105,7 +105,7 @@ function addCompletionByLookingBackward(
|
|||
function addCompletionByLookingBackwardNode(
|
||||
preNode: Node,
|
||||
posDetail: PositionDetail,
|
||||
context: CompletionContext
|
||||
context: CompletionContext,
|
||||
): boolean {
|
||||
const getIdentifierEndPos = (n: IdentifierNode) => {
|
||||
// n.pos === n.end, it means it's a missing identifier, just return -1;
|
||||
|
@ -151,7 +151,7 @@ async function AddCompletionNonTrivia(
|
|||
node: Node | undefined,
|
||||
context: CompletionContext,
|
||||
posDetail: PositionDetail,
|
||||
lookBackward: boolean = true
|
||||
lookBackward: boolean = true,
|
||||
) {
|
||||
if (
|
||||
node === undefined ||
|
||||
|
@ -263,14 +263,14 @@ async function isTspLibraryPackage(host: CompilerHost, dir: string) {
|
|||
|
||||
async function addLibraryImportCompletion(
|
||||
{ program, file, completions }: CompletionContext,
|
||||
node: StringLiteralNode
|
||||
node: StringLiteralNode,
|
||||
) {
|
||||
const documentPath = file.file.path;
|
||||
const projectRoot = await findProjectRoot(program.host.stat, documentPath);
|
||||
if (projectRoot !== undefined) {
|
||||
const packagejson = await loadPackageJson(
|
||||
program.host,
|
||||
resolvePath(projectRoot, "package.json")
|
||||
resolvePath(projectRoot, "package.json"),
|
||||
);
|
||||
let dependencies: string[] = [];
|
||||
if (packagejson.dependencies !== undefined) {
|
||||
|
@ -317,7 +317,7 @@ async function tryListItemInDir(host: CompilerHost, path: string): Promise<strin
|
|||
|
||||
async function addRelativePathCompletion(
|
||||
{ program, completions, file }: CompletionContext,
|
||||
node: StringLiteralNode
|
||||
node: StringLiteralNode,
|
||||
) {
|
||||
const documentPath = file.file.path;
|
||||
const documentFile = getBaseFileName(documentPath);
|
||||
|
@ -327,7 +327,7 @@ async function addRelativePathCompletion(
|
|||
: getDirectoryPath(node.value);
|
||||
const currentAbsolutePath = resolvePath(documentDir, currentRelativePath);
|
||||
const files = (await tryListItemInDir(program.host, currentAbsolutePath)).filter(
|
||||
(x) => x !== documentFile && x !== "node_modules"
|
||||
(x) => x !== documentFile && x !== "node_modules",
|
||||
);
|
||||
|
||||
const lastSlash = node.value.lastIndexOf("/");
|
||||
|
@ -401,7 +401,7 @@ function addModelCompletion(context: CompletionContext, posDetail: PositionDetai
|
|||
*/
|
||||
function addIdentifierCompletion(
|
||||
{ program, completions }: CompletionContext,
|
||||
node: IdentifierNode
|
||||
node: IdentifierNode,
|
||||
) {
|
||||
const result = program.checker.resolveCompletions(node);
|
||||
if (result.size === 0) {
|
||||
|
|
|
@ -73,7 +73,7 @@ function main() {
|
|||
break;
|
||||
default:
|
||||
connection.console.error(
|
||||
`Log Message with invalid LogLevel (${log.level}). Raw Message: ${fullMessage}`
|
||||
`Log Message with invalid LogLevel (${log.level}). Raw Message: ${fullMessage}`,
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -219,7 +219,7 @@ export function createServer(host: ServerHost): Server {
|
|||
uri: params.rootUri,
|
||||
path: ensureTrailingDirectorySeparator(
|
||||
// eslint-disable-next-line @typescript-eslint/no-deprecated
|
||||
await fileService.fileURLToRealPath(params.rootUri)
|
||||
await fileService.fileURLToRealPath(params.rootUri),
|
||||
),
|
||||
},
|
||||
];
|
||||
|
@ -232,7 +232,7 @@ export function createServer(host: ServerHost): Server {
|
|||
uri: compilerHost.pathToFileURL(params.rootPath),
|
||||
path: ensureTrailingDirectorySeparator(
|
||||
// eslint-disable-next-line @typescript-eslint/no-deprecated
|
||||
await getNormalizedRealPath(compilerHost, params.rootPath)
|
||||
await getNormalizedRealPath(compilerHost, params.rootPath),
|
||||
),
|
||||
},
|
||||
];
|
||||
|
@ -333,7 +333,7 @@ export function createServer(host: ServerHost): Server {
|
|||
}
|
||||
|
||||
async function findDocumentHighlight(
|
||||
params: DocumentHighlightParams
|
||||
params: DocumentHighlightParams,
|
||||
): Promise<DocumentHighlight[]> {
|
||||
const result = await compileService.compile(params.textDocument);
|
||||
if (result === undefined) {
|
||||
|
@ -344,7 +344,7 @@ export function createServer(host: ServerHost): Server {
|
|||
program,
|
||||
script,
|
||||
document.offsetAt(params.position),
|
||||
[script]
|
||||
[script],
|
||||
);
|
||||
return identifiers.map((identifier) => ({
|
||||
range: getRange(identifier, script.file),
|
||||
|
@ -410,7 +410,7 @@ export function createServer(host: ServerHost): Server {
|
|||
const diagnostics = diagnosticMap.get(diagDocument);
|
||||
compilerAssert(
|
||||
diagnostics,
|
||||
"Diagnostic reported against a source file that was not added to the program."
|
||||
"Diagnostic reported against a source file that was not added to the program.",
|
||||
);
|
||||
diagnostics.push(diagnostic);
|
||||
currentDiagnosticIndex.set(diagnostic.data.id, each);
|
||||
|
@ -467,10 +467,10 @@ export function createServer(host: ServerHost): Server {
|
|||
function getSignatureHelpForTemplate(
|
||||
program: Program,
|
||||
node: TypeReferenceNode,
|
||||
argumentIndex: number
|
||||
argumentIndex: number,
|
||||
): SignatureHelp | undefined {
|
||||
const sym = program.checker.resolveIdentifier(
|
||||
node.target.kind === SyntaxKind.MemberExpression ? node.target.id : node.target
|
||||
node.target.kind === SyntaxKind.MemberExpression ? node.target.id : node.target,
|
||||
);
|
||||
const templateDeclNode = sym?.declarations[0];
|
||||
if (
|
||||
|
@ -517,10 +517,10 @@ export function createServer(host: ServerHost): Server {
|
|||
function getSignatureHelpForDecorator(
|
||||
program: Program,
|
||||
node: DecoratorExpressionNode | AugmentDecoratorStatementNode,
|
||||
argumentIndex: number
|
||||
argumentIndex: number,
|
||||
): SignatureHelp | undefined {
|
||||
const sym = program.checker.resolveIdentifier(
|
||||
node.target.kind === SyntaxKind.MemberExpression ? node.target.id : node.target
|
||||
node.target.kind === SyntaxKind.MemberExpression ? node.target.id : node.target,
|
||||
);
|
||||
if (!sym) {
|
||||
return undefined;
|
||||
|
@ -528,7 +528,7 @@ export function createServer(host: ServerHost): Server {
|
|||
|
||||
const decoratorDeclNode: DecoratorDeclarationStatementNode | undefined = sym.declarations.find(
|
||||
(x): x is DecoratorDeclarationStatementNode =>
|
||||
x.kind === SyntaxKind.DecoratorDeclarationStatement
|
||||
x.kind === SyntaxKind.DecoratorDeclarationStatement,
|
||||
);
|
||||
if (decoratorDeclNode === undefined) {
|
||||
return undefined;
|
||||
|
@ -565,7 +565,7 @@ export function createServer(host: ServerHost): Server {
|
|||
info.documentation = { kind: MarkupKind.Markdown, value: doc };
|
||||
}
|
||||
return info;
|
||||
})
|
||||
}),
|
||||
);
|
||||
|
||||
const help: SignatureHelp = {
|
||||
|
@ -665,7 +665,7 @@ export function createServer(host: ServerHost): Server {
|
|||
|
||||
async function getImportLocation(
|
||||
importPath: string,
|
||||
currentFile: TypeSpecScriptNode
|
||||
currentFile: TypeSpecScriptNode,
|
||||
): Promise<Location> {
|
||||
const host: ResolveModuleHost = {
|
||||
realpath: compilerHost.realpath,
|
||||
|
@ -706,7 +706,7 @@ export function createServer(host: ServerHost): Server {
|
|||
completions,
|
||||
params,
|
||||
},
|
||||
posDetail
|
||||
posDetail,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -721,7 +721,7 @@ export function createServer(host: ServerHost): Server {
|
|||
const identifiers = findReferenceIdentifiers(
|
||||
result.program,
|
||||
result.script,
|
||||
result.document.offsetAt(params.position)
|
||||
result.document.offsetAt(params.position),
|
||||
);
|
||||
return getLocations(identifiers);
|
||||
}
|
||||
|
@ -742,7 +742,7 @@ export function createServer(host: ServerHost): Server {
|
|||
const identifiers = findReferenceIdentifiers(
|
||||
result.program,
|
||||
result.script,
|
||||
result.document.offsetAt(params.position)
|
||||
result.document.offsetAt(params.position),
|
||||
);
|
||||
for (const id of identifiers) {
|
||||
const location = getLocation(id);
|
||||
|
@ -764,7 +764,7 @@ export function createServer(host: ServerHost): Server {
|
|||
program: Program,
|
||||
file: TypeSpecScriptNode,
|
||||
pos: number,
|
||||
searchFiles: Iterable<TypeSpecScriptNode> = program.sourceFiles.values()
|
||||
searchFiles: Iterable<TypeSpecScriptNode> = program.sourceFiles.values(),
|
||||
): IdentifierNode[] {
|
||||
const id = getNodeAtPosition(file, pos);
|
||||
if (id?.kind !== SyntaxKind.Identifier) {
|
||||
|
@ -836,7 +836,7 @@ export function createServer(host: ServerHost): Server {
|
|||
command: Commands.APPLY_CODE_FIX,
|
||||
arguments: [params.textDocument.uri, vsDiag.data?.id, fix.id],
|
||||
},
|
||||
CodeActionKind.QuickFix
|
||||
CodeActionKind.QuickFix,
|
||||
),
|
||||
diagnostics: [vsDiag],
|
||||
};
|
||||
|
@ -1008,7 +1008,7 @@ type SignatureHelpNode =
|
|||
|
||||
function getSignatureHelpNodeAtPosition(
|
||||
script: TypeSpecScriptNode,
|
||||
position: number
|
||||
position: number,
|
||||
): { node: SignatureHelpNode; argumentIndex: number } | undefined {
|
||||
// Move back over any trailing trivia. Otherwise, if there is no
|
||||
// closing paren/angle bracket, we can find ourselves outside the desired
|
||||
|
@ -1045,7 +1045,7 @@ function getSignatureHelpNodeAtPosition(
|
|||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
if (!node) {
|
||||
|
@ -1063,7 +1063,7 @@ function getSignatureHelpNodeAtPosition(
|
|||
function getSignatureHelpArgumentIndex(
|
||||
script: TypeSpecScriptNode,
|
||||
node: SignatureHelpNode,
|
||||
position: number
|
||||
position: number,
|
||||
) {
|
||||
// Normalize arguments into a single list to avoid special case for
|
||||
// augment decorators.
|
||||
|
@ -1097,7 +1097,7 @@ function getSignatureHelpArgumentIndex(
|
|||
export function getCompletionNodeAtPosition(
|
||||
script: TypeSpecScriptNode,
|
||||
position: number,
|
||||
filter: (node: Node) => boolean = (node: Node) => true
|
||||
filter: (node: Node) => boolean = (node: Node) => true,
|
||||
): PositionDetail {
|
||||
return getNodeAtPositionDetail(script, position, filter);
|
||||
}
|
||||
|
|
|
@ -23,13 +23,13 @@ export function getSymbolStructure(ast: TypeSpecScriptNode): DocumentSymbol[] {
|
|||
}
|
||||
const fileNamespaceSymbol = getForNamespace(fileNamespace);
|
||||
fileNamespaceSymbol.children = getForStatements(
|
||||
ast.statements.filter((x) => x !== fileNamespace)
|
||||
ast.statements.filter((x) => x !== fileNamespace),
|
||||
);
|
||||
return [fileNamespaceSymbol];
|
||||
|
||||
function findFileNamespace(ast: TypeSpecScriptNode): NamespaceStatementNode | undefined {
|
||||
const firstNamespace: NamespaceStatementNode | undefined = ast.statements.find(
|
||||
(x) => x.kind === SyntaxKind.NamespaceStatement
|
||||
(x) => x.kind === SyntaxKind.NamespaceStatement,
|
||||
) as any;
|
||||
if (firstNamespace === undefined) {
|
||||
return undefined;
|
||||
|
@ -104,7 +104,7 @@ export function getSymbolStructure(ast: TypeSpecScriptNode): DocumentSymbol[] {
|
|||
node: Node,
|
||||
name: string,
|
||||
kind: SymbolKind,
|
||||
symbols?: DocumentSymbol[]
|
||||
symbols?: DocumentSymbol[],
|
||||
) {
|
||||
const start = file.getLineAndCharacterOfPosition(node.pos);
|
||||
const end = file.getLineAndCharacterOfPosition(node.end);
|
||||
|
|
|
@ -23,7 +23,7 @@ export function getSymbolDetails(
|
|||
options = {
|
||||
includeSignature: true,
|
||||
includeParameterTags: true,
|
||||
}
|
||||
},
|
||||
): string {
|
||||
const lines = [];
|
||||
if (options.includeSignature) {
|
||||
|
@ -44,7 +44,7 @@ export function getSymbolDetails(
|
|||
}
|
||||
lines.push(
|
||||
//prettier-ignore
|
||||
`_@${tag.tagName.sv}_${"paramName" in tag ? ` \`${tag.paramName.sv}\`` : ""} —\n${getDocContent(tag.content)}`
|
||||
`_@${tag.tagName.sv}_${"paramName" in tag ? ` \`${tag.paramName.sv}\`` : ""} —\n${getDocContent(tag.content)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -96,7 +96,7 @@ export function getParameterDocumentation(program: Program, type: Type): Map<str
|
|||
|
||||
/** @internal */
|
||||
export function getTemplateParameterDocumentation(
|
||||
node: Node & TemplateDeclarationNode
|
||||
node: Node & TemplateDeclarationNode,
|
||||
): Map<string, string> {
|
||||
const map = new Map<string, string>();
|
||||
for (const d of node?.docs ?? []) {
|
||||
|
@ -114,7 +114,7 @@ function getDocContent(content: readonly DocContent[]) {
|
|||
for (const node of content) {
|
||||
compilerAssert(
|
||||
node.kind === SyntaxKind.DocText,
|
||||
"No other doc content node kinds exist yet. Update this code appropriately when more are added."
|
||||
"No other doc content node kinds exist yet. Update this code appropriately when more are added.",
|
||||
);
|
||||
docs.push(node.text);
|
||||
}
|
||||
|
|
|
@ -104,7 +104,7 @@ function getFunctionSignature(type: FunctionType) {
|
|||
const ns = getQualifier(type.namespace);
|
||||
const parameters = type.parameters.map((x) => getFunctionParameterSignature(x));
|
||||
return `fn ${ns}${printIdentifier(type.name)}(${parameters.join(", ")}): ${getPrintableTypeName(
|
||||
type.returnType
|
||||
type.returnType,
|
||||
)}`;
|
||||
}
|
||||
|
||||
|
|
|
@ -53,7 +53,7 @@ export interface ServerHost {
|
|||
readonly sendDiagnostics: (params: PublishDiagnosticsParams) => void;
|
||||
readonly log: (log: ServerLog) => void;
|
||||
readonly applyEdit: (
|
||||
paramOrEdit: ApplyWorkspaceEditParams | WorkspaceEdit
|
||||
paramOrEdit: ApplyWorkspaceEditParams | WorkspaceEdit,
|
||||
) => Promise<ApplyWorkspaceEditResult>;
|
||||
}
|
||||
|
||||
|
|
|
@ -63,7 +63,7 @@ export function expectCodeFixOnAst(code: string, callback: (node: Node) => CodeF
|
|||
updatedContent = value;
|
||||
},
|
||||
},
|
||||
codefix
|
||||
codefix,
|
||||
);
|
||||
ok(updatedContent);
|
||||
strictEqual(trimBlankLines(updatedContent), trimBlankLines(expectedCode));
|
||||
|
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче