Implement semantic coloring (#483)
This commit is contained in:
Родитель
e4c4edba9b
Коммит
192816fadc
|
@ -36,7 +36,7 @@
|
|||
"label": "watch-tmlanguage",
|
||||
"type": "process",
|
||||
"command": "node",
|
||||
"args": ["${workspaceFolder}/packages/cadl-vscode/scripts/watch-tmlanguage.js"],
|
||||
"args": ["${workspaceFolder}/packages/compiler/scripts/watch-tmlanguage.js"],
|
||||
"problemMatcher": {
|
||||
"base": "$msCompile",
|
||||
"background": {
|
||||
|
@ -47,7 +47,7 @@
|
|||
},
|
||||
"isBackground": true,
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/packages/cadl-vscode"
|
||||
"cwd": "${workspaceFolder}/packages/compiler"
|
||||
},
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"changes": [
|
||||
{
|
||||
"packageName": "@cadl-lang/compiler",
|
||||
"comment": "Add semantic colorization",
|
||||
"type": "minor"
|
||||
}
|
||||
],
|
||||
"packageName": "@cadl-lang/compiler"
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"changes": [
|
||||
{
|
||||
"packageName": "cadl-vscode",
|
||||
"comment": "Add semantic colorization",
|
||||
"type": "minor"
|
||||
}
|
||||
],
|
||||
"packageName": "cadl-vscode"
|
||||
}
|
|
@ -3843,7 +3843,7 @@ packages:
|
|||
dev: false
|
||||
|
||||
/strip-json-comments/2.0.1:
|
||||
resolution: {integrity: sha1-PFMZQukIwml8DsNEhYwobHygpgo=}
|
||||
resolution: {integrity: sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
dev: false
|
||||
|
||||
|
@ -4489,7 +4489,7 @@ packages:
|
|||
dev: false
|
||||
|
||||
file:projects/cadl-vscode.tgz:
|
||||
resolution: {integrity: sha512-fbWb3ZPbNJk+GZr+2ianvI+VYT8TKPNfKLG34o2l6Lu6L1HG5Rfx7VCvzDJD9avXw47/6/AIc6l6rHrWcipshw==, tarball: file:projects/cadl-vscode.tgz}
|
||||
resolution: {integrity: sha512-68kXFqY44huSWrukCumhFMpzpocmdMZ8wIbQ/zbxuf1LdP8+eFRK1oRFeWRvv7YlJTqE6N26H+R68w6Oa/i2Qw==, tarball: file:projects/cadl-vscode.tgz}
|
||||
name: '@rush-temp/cadl-vscode'
|
||||
version: 0.0.0
|
||||
dependencies:
|
||||
|
@ -4515,7 +4515,7 @@ packages:
|
|||
dev: false
|
||||
|
||||
file:projects/compiler.tgz:
|
||||
resolution: {integrity: sha512-nWe7pOFLS23j2yB9ff6Sy8jXiU9Kit7D/GuFlXY3aTfgOESPRgOQ0iYHUXjJGVqVAXShIbuCK4Le1QHeUBON5g==, tarball: file:projects/compiler.tgz}
|
||||
resolution: {integrity: sha512-2qKHtYjKAjODqLE5rKLe+2vQdglIzHLuLUTjUyYRmaLaAi/rt1GGe2R7S8LxtWtNdHaNnOrVwcuh/lj3+obs2A==, tarball: file:projects/compiler.tgz}
|
||||
name: '@rush-temp/compiler'
|
||||
version: 0.0.0
|
||||
dependencies:
|
||||
|
@ -4550,6 +4550,8 @@ packages:
|
|||
typescript: 4.7.2
|
||||
vscode-languageserver: 7.0.0
|
||||
vscode-languageserver-textdocument: 1.0.5
|
||||
vscode-oniguruma: 1.6.2
|
||||
vscode-textmate: 6.0.0
|
||||
yargs: 17.3.1
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
|
|
@ -87,22 +87,31 @@
|
|||
"title": "Restart Cadl server",
|
||||
"category": "Cadl"
|
||||
}
|
||||
],
|
||||
"semanticTokenScopes": [
|
||||
{
|
||||
"scopes": {
|
||||
"keyword": [
|
||||
"keyword.other.cadl"
|
||||
],
|
||||
"macro": [
|
||||
"entity.name.tag.cadl"
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"scripts": {
|
||||
"clean": "rimraf ./dist ./dist-dev ./temp",
|
||||
"build": "npm run compile && npm run rollup && npm run generate-tmlanguage && npm run generate-language-configuration && npm run generate-third-party-notices && npm run package-vsix",
|
||||
"build": "npm run compile && npm run rollup && npm run copy-tmlanguage && npm run generate-language-configuration && npm run generate-third-party-notices && npm run package-vsix",
|
||||
"compile": "tsc -p .",
|
||||
"watch": "tsc -p . --watch",
|
||||
"watch-tmlanguage": "node scripts/watch-tmlanguage.js",
|
||||
"dogfood": "node scripts/dogfood.js",
|
||||
"generate-tmlanguage": "node scripts/generate-tmlanguage.js",
|
||||
"copy-tmlanguage": "node scripts/copy-tmlanguage.js",
|
||||
"generate-language-configuration": "node scripts/generate-language-configuration.js",
|
||||
"generate-third-party-notices": "cadl-build-tool generate-third-party-notices",
|
||||
"rollup": "rollup --config --failAfterWarnings 2>&1",
|
||||
"package-vsix": "vsce package --yarn",
|
||||
"test": "mocha",
|
||||
"test-official": "mocha --forbid-only"
|
||||
"package-vsix": "vsce package --yarn"
|
||||
},
|
||||
"dependencies": {},
|
||||
"devDependencies": {
|
||||
|
@ -121,11 +130,8 @@
|
|||
"mocha": "~9.2.0",
|
||||
"rimraf": "~3.0.2",
|
||||
"rollup": "~2.70.1",
|
||||
"tmlanguage-generator": "~0.3.0",
|
||||
"typescript": "~4.7.2",
|
||||
"vsce": "~2.6.7",
|
||||
"vscode-languageclient": "~8.0.0",
|
||||
"vscode-oniguruma": "~1.6.1",
|
||||
"vscode-textmate": "~6.0.0"
|
||||
"vscode-languageclient": "~8.0.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
import { copyFile } from "fs/promises";
|
||||
|
||||
await copyFile(
|
||||
"node_modules/@cadl-lang/compiler/dist/cadl.tmLanguage",
|
||||
"../cadl-vscode/dist/cadl.tmLanguage"
|
||||
);
|
|
@ -1,12 +0,0 @@
|
|||
import { createRequire } from "module";
|
||||
import { resolve } from "path";
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
const script = resolve("dist-dev/src/tmlanguage.js");
|
||||
|
||||
require(script)
|
||||
.main()
|
||||
.catch((err) => {
|
||||
console.error(err.stack);
|
||||
process.exit(1);
|
||||
});
|
|
@ -1,26 +0,0 @@
|
|||
import { deepStrictEqual } from "assert";
|
||||
import { Token, tokenize } from "./utils";
|
||||
|
||||
describe("vscode: tmlanguage: alias", () => {
|
||||
it("simple alias", async () => {
|
||||
const tokens = await tokenize("alias Foo = string");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.alias,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.operators.assignement,
|
||||
Token.identifiers.type("string"),
|
||||
]);
|
||||
});
|
||||
it("templated alias", async () => {
|
||||
const tokens = await tokenize("alias Foo<T> = T");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.alias,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.typeParameters.begin,
|
||||
Token.identifiers.type("T"),
|
||||
Token.punctuation.typeParameters.end,
|
||||
Token.operators.assignement,
|
||||
Token.identifiers.type("T"),
|
||||
]);
|
||||
});
|
||||
});
|
|
@ -1,28 +0,0 @@
|
|||
import { deepStrictEqual } from "assert";
|
||||
import { Token, tokenize } from "./utils";
|
||||
|
||||
describe("vscode: tmlanguage: decorators", () => {
|
||||
it("simple parameterless decorator", async () => {
|
||||
const tokens = await tokenize("@foo");
|
||||
deepStrictEqual(tokens, [Token.identifiers.tag("@foo")]);
|
||||
});
|
||||
|
||||
it("fully qualified decorator name", async () => {
|
||||
const tokens = await tokenize("@Foo.bar");
|
||||
deepStrictEqual(tokens, [Token.identifiers.tag("@Foo.bar")]);
|
||||
});
|
||||
|
||||
it("decorator with parameters", async () => {
|
||||
const tokens = await tokenize(`@foo("param1", 123)`);
|
||||
deepStrictEqual(tokens, [
|
||||
Token.identifiers.tag("@foo"),
|
||||
Token.punctuation.openParen,
|
||||
Token.punctuation.string.doubleQuote,
|
||||
Token.literals.string("param1"),
|
||||
Token.punctuation.string.doubleQuote,
|
||||
Token.punctuation.comma,
|
||||
Token.literals.numeric("123"),
|
||||
Token.punctuation.closeParen,
|
||||
]);
|
||||
});
|
||||
});
|
|
@ -1,109 +0,0 @@
|
|||
import { deepStrictEqual } from "assert";
|
||||
import { Token, tokenize } from "./utils";
|
||||
|
||||
describe("vscode: tmlanguage: interfaces", () => {
|
||||
it("empty interface", async () => {
|
||||
const tokens = await tokenize("interface Foo {}");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.interface,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("interface with single extends", async () => {
|
||||
const tokens = await tokenize("interface Foo extends Bar {}");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.interface,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.keywords.extends,
|
||||
Token.identifiers.type("Bar"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("interface with multiple extends", async () => {
|
||||
const tokens = await tokenize("interface Foo extends Bar1, Bar2 {}");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.interface,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.keywords.extends,
|
||||
Token.identifiers.type("Bar1"),
|
||||
Token.punctuation.comma,
|
||||
Token.identifiers.type("Bar2"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("template interface", async () => {
|
||||
const tokens = await tokenize("interface Foo<T> {}");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.interface,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.typeParameters.begin,
|
||||
Token.identifiers.type("T"),
|
||||
Token.punctuation.typeParameters.end,
|
||||
Token.punctuation.openBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("template interface with extends", async () => {
|
||||
const tokens = await tokenize("interface Foo<T> extends Bar<T> {}");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.interface,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.typeParameters.begin,
|
||||
Token.identifiers.type("T"),
|
||||
Token.punctuation.typeParameters.end,
|
||||
Token.keywords.extends,
|
||||
Token.identifiers.type("Bar"),
|
||||
Token.punctuation.typeParameters.begin,
|
||||
Token.identifiers.type("T"),
|
||||
Token.punctuation.typeParameters.end,
|
||||
Token.punctuation.openBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("interface with operations", async () => {
|
||||
const tokens = await tokenize(`
|
||||
interface Foo {
|
||||
bar(): string;
|
||||
}`);
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.interface,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.identifiers.functionName("bar"),
|
||||
Token.punctuation.openParen,
|
||||
Token.punctuation.closeParen,
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("string"),
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("interface operation that copies the signature of another operation", async () => {
|
||||
const tokens = await tokenize(`
|
||||
interface Foo {
|
||||
bar is ResourceRead<Widget>
|
||||
}`);
|
||||
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.interface,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.identifiers.functionName("bar"),
|
||||
Token.keywords.is,
|
||||
Token.identifiers.type("ResourceRead"),
|
||||
Token.punctuation.typeParameters.begin,
|
||||
Token.identifiers.type("Widget"),
|
||||
Token.punctuation.typeParameters.end,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
});
|
|
@ -1,170 +0,0 @@
|
|||
import { deepStrictEqual } from "assert";
|
||||
import { Token, tokenize } from "./utils";
|
||||
|
||||
describe("vscode: tmlanguage: Models", () => {
|
||||
it("simple model", async () => {
|
||||
const tokens = await tokenize("model Foo {}");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.model,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("model with extends", async () => {
|
||||
const tokens = await tokenize("model Foo extends Bar {}");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.model,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.keywords.extends,
|
||||
Token.identifiers.type("Bar"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("model with is", async () => {
|
||||
const tokens = await tokenize("model Foo is Bar {}");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.model,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.keywords.is,
|
||||
Token.identifiers.type("Bar"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("single template argument model", async () => {
|
||||
const tokens = await tokenize("model Foo<T> {}");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.model,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.typeParameters.begin,
|
||||
Token.identifiers.type("T"),
|
||||
Token.punctuation.typeParameters.end,
|
||||
Token.punctuation.openBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("multiple template argument model", async () => {
|
||||
const tokens = await tokenize("model Foo<A, B, C> {}");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.model,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.typeParameters.begin,
|
||||
Token.identifiers.type("A"),
|
||||
Token.punctuation.comma,
|
||||
Token.identifiers.type("B"),
|
||||
Token.punctuation.comma,
|
||||
Token.identifiers.type("C"),
|
||||
Token.punctuation.typeParameters.end,
|
||||
Token.punctuation.openBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("model with basic properties", async () => {
|
||||
const tokens = await tokenize(`
|
||||
model Foo {
|
||||
prop1: string;
|
||||
prop2: int32;
|
||||
}`);
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.model,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.identifiers.variable("prop1"),
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("string"),
|
||||
Token.punctuation.semicolon,
|
||||
Token.identifiers.variable("prop2"),
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("int32"),
|
||||
Token.punctuation.semicolon,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("model with optional properties", async () => {
|
||||
const tokens = await tokenize(`
|
||||
model Foo {
|
||||
prop1?: string;
|
||||
}`);
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.model,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.identifiers.variable("prop1"),
|
||||
Token.operators.optional,
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("string"),
|
||||
Token.punctuation.semicolon,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("model with properties with default value", async () => {
|
||||
const tokens = await tokenize(`
|
||||
model Foo {
|
||||
prop1?: string = "my-default";
|
||||
}`);
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.model,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.identifiers.variable("prop1"),
|
||||
Token.operators.optional,
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("string"),
|
||||
Token.operators.assignement,
|
||||
Token.punctuation.string.doubleQuote,
|
||||
Token.literals.string("my-default"),
|
||||
Token.punctuation.string.doubleQuote,
|
||||
Token.punctuation.semicolon,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("model nested model ", async () => {
|
||||
const tokens = await tokenize(`
|
||||
model Foo {
|
||||
nested: {
|
||||
prop1: string;
|
||||
};
|
||||
}`);
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.model,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.identifiers.variable("nested"),
|
||||
Token.operators.typeAnnotation,
|
||||
Token.punctuation.openBrace,
|
||||
Token.identifiers.variable("prop1"),
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("string"),
|
||||
Token.punctuation.semicolon,
|
||||
Token.punctuation.closeBrace,
|
||||
Token.punctuation.semicolon,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("model with spread property", async () => {
|
||||
const tokens = await tokenize(`
|
||||
model Foo {
|
||||
...Bar;
|
||||
}`);
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.model,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.operators.spread,
|
||||
Token.identifiers.type("Bar"),
|
||||
Token.punctuation.semicolon,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
});
|
|
@ -1,56 +0,0 @@
|
|||
import { deepStrictEqual } from "assert";
|
||||
import { Token, tokenize } from "./utils";
|
||||
|
||||
describe("vscode: tmlanguage: Namespace", () => {
|
||||
it("simple global namespace", async () => {
|
||||
const tokens = await tokenize("namespace Foo;");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.namespace,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.semicolon,
|
||||
]);
|
||||
});
|
||||
|
||||
it("subnamespace global namespace", async () => {
|
||||
const tokens = await tokenize("namespace Foo.Bar;");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.namespace,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.accessor,
|
||||
Token.identifiers.type("Bar"),
|
||||
Token.punctuation.semicolon,
|
||||
]);
|
||||
});
|
||||
|
||||
it("simple namespace", async () => {
|
||||
const tokens = await tokenize(`
|
||||
namespace Foo {
|
||||
|
||||
}`);
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.namespace,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("nested namespace", async () => {
|
||||
const tokens = await tokenize(`
|
||||
namespace Foo {
|
||||
namespace Bar {
|
||||
|
||||
}
|
||||
}`);
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.namespace,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.keywords.namespace,
|
||||
Token.identifiers.type("Bar"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
});
|
|
@ -1,118 +0,0 @@
|
|||
import { deepStrictEqual } from "assert";
|
||||
import { Token, tokenize } from "./utils";
|
||||
|
||||
describe("vscode: tmlanguage: Operations", () => {
|
||||
it("simple operation", async () => {
|
||||
const tokens = await tokenize("op foo(): string");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.operation,
|
||||
Token.identifiers.functionName("foo"),
|
||||
Token.punctuation.openParen,
|
||||
Token.punctuation.closeParen,
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("string"),
|
||||
]);
|
||||
});
|
||||
|
||||
it("operation with parameters", async () => {
|
||||
const tokens = await tokenize("op foo(param1: string, param2: int32): string");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.operation,
|
||||
Token.identifiers.functionName("foo"),
|
||||
Token.punctuation.openParen,
|
||||
|
||||
Token.identifiers.variable("param1"),
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("string"),
|
||||
Token.punctuation.comma,
|
||||
|
||||
Token.identifiers.variable("param2"),
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("int32"),
|
||||
|
||||
Token.punctuation.closeParen,
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("string"),
|
||||
]);
|
||||
});
|
||||
|
||||
it("model with properties with default value", async () => {
|
||||
const tokens = await tokenize(`op foo(param1?: string = "my-default"): string`);
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.operation,
|
||||
Token.identifiers.functionName("foo"),
|
||||
Token.punctuation.openParen,
|
||||
|
||||
Token.identifiers.variable("param1"),
|
||||
Token.operators.optional,
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("string"),
|
||||
Token.operators.assignement,
|
||||
Token.punctuation.string.doubleQuote,
|
||||
Token.literals.string("my-default"),
|
||||
Token.punctuation.string.doubleQuote,
|
||||
|
||||
Token.punctuation.closeParen,
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("string"),
|
||||
]);
|
||||
});
|
||||
|
||||
it("operation with decorated parameters", async () => {
|
||||
const tokens = await tokenize("op foo(@path param1: string, @query param2?: int32): string");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.operation,
|
||||
Token.identifiers.functionName("foo"),
|
||||
Token.punctuation.openParen,
|
||||
|
||||
Token.identifiers.tag("@path"),
|
||||
Token.identifiers.variable("param1"),
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("string"),
|
||||
Token.punctuation.comma,
|
||||
|
||||
Token.identifiers.tag("@query"),
|
||||
Token.identifiers.variable("param2"),
|
||||
Token.operators.optional,
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("int32"),
|
||||
|
||||
Token.punctuation.closeParen,
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("string"),
|
||||
]);
|
||||
});
|
||||
|
||||
it("operation that copies the signature of another operation", async () => {
|
||||
const tokens = await tokenize("op foo is ResourceRead<Widget>");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.operation,
|
||||
Token.identifiers.functionName("foo"),
|
||||
Token.keywords.is,
|
||||
Token.identifiers.type("ResourceRead"),
|
||||
Token.punctuation.typeParameters.begin,
|
||||
Token.identifiers.type("Widget"),
|
||||
Token.punctuation.typeParameters.end,
|
||||
]);
|
||||
});
|
||||
|
||||
it("defining a templated operation signature", async () => {
|
||||
const tokens = await tokenize(
|
||||
"op ResourceRead<TResource> is ResourceReadBase<TResource, DefaultOptions>"
|
||||
);
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.operation,
|
||||
Token.identifiers.functionName("ResourceRead"),
|
||||
Token.punctuation.typeParameters.begin,
|
||||
Token.identifiers.type("TResource"),
|
||||
Token.punctuation.typeParameters.end,
|
||||
Token.keywords.is,
|
||||
Token.identifiers.type("ResourceReadBase"),
|
||||
Token.punctuation.typeParameters.begin,
|
||||
Token.identifiers.type("TResource"),
|
||||
Token.punctuation.comma,
|
||||
Token.identifiers.type("DefaultOptions"),
|
||||
Token.punctuation.typeParameters.end,
|
||||
]);
|
||||
});
|
||||
});
|
|
@ -1,163 +0,0 @@
|
|||
import { readFile } from "fs/promises";
|
||||
import * as path from "path";
|
||||
import { createOnigScanner, createOnigString, loadWASM } from "vscode-oniguruma";
|
||||
import { IOnigLib, parseRawGrammar, Registry, StackElement } from "vscode-textmate";
|
||||
import { CadlScope } from "../src/tmlanguage.js";
|
||||
|
||||
async function createOnigLib(): Promise<IOnigLib> {
|
||||
const onigWasm = await readFile(`${path.dirname(require.resolve("vscode-oniguruma"))}/onig.wasm`);
|
||||
|
||||
await loadWASM(onigWasm.buffer);
|
||||
|
||||
return {
|
||||
createOnigScanner: (sources) => createOnigScanner(sources),
|
||||
createOnigString,
|
||||
};
|
||||
}
|
||||
|
||||
const registry = new Registry({
|
||||
onigLib: createOnigLib(),
|
||||
loadGrammar: async (scopeName) => {
|
||||
const data = await readFile(path.resolve(__dirname, "../../dist/cadl.tmLanguage"), "utf-8");
|
||||
return parseRawGrammar(data);
|
||||
},
|
||||
});
|
||||
|
||||
export type MetaScope = `meta.${string}.cadl`;
|
||||
export type TokenScope = CadlScope | MetaScope;
|
||||
export interface Token {
|
||||
text: string;
|
||||
type: TokenScope;
|
||||
}
|
||||
|
||||
const excludedTypes = ["source.cadl"];
|
||||
|
||||
export async function tokenize(
|
||||
input: string | Input,
|
||||
excludeTypes: boolean = true
|
||||
): Promise<Token[]> {
|
||||
if (typeof input === "string") {
|
||||
input = Input.FromText(input);
|
||||
}
|
||||
|
||||
let tokens: Token[] = [];
|
||||
let previousStack: StackElement | null = null;
|
||||
const grammar = await registry.loadGrammar("source.cadl");
|
||||
|
||||
if (grammar === null) {
|
||||
throw new Error("Unexpected null grammar");
|
||||
}
|
||||
|
||||
for (let lineIndex = 0; lineIndex < input.lines.length; lineIndex++) {
|
||||
const line = input.lines[lineIndex];
|
||||
|
||||
let lineResult = grammar.tokenizeLine(line, previousStack);
|
||||
previousStack = lineResult.ruleStack;
|
||||
|
||||
if (lineIndex < input.span.startLine || lineIndex > input.span.endLine) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const token of lineResult.tokens) {
|
||||
if (
|
||||
(lineIndex === input.span.startLine && token.startIndex < input.span.startIndex) ||
|
||||
(lineIndex === input.span.endLine && token.endIndex > input.span.endIndex)
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const text = line.substring(token.startIndex, token.endIndex);
|
||||
const type = token.scopes[token.scopes.length - 1] as TokenScope;
|
||||
|
||||
if (excludeTypes === false || !excludeType(type)) {
|
||||
tokens.push(createToken(text, type));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return tokens;
|
||||
}
|
||||
|
||||
function excludeType(type: TokenScope): type is CadlScope {
|
||||
return excludedTypes.includes(type) || type.startsWith("meta.");
|
||||
}
|
||||
|
||||
interface Span {
|
||||
startLine: number;
|
||||
startIndex: number;
|
||||
endLine: number;
|
||||
endIndex: number;
|
||||
}
|
||||
|
||||
export class Input {
|
||||
private constructor(public lines: string[], public span: Span) {}
|
||||
|
||||
public static FromText(text: string) {
|
||||
// ensure consistent line-endings irrelevant of OS
|
||||
text = text.replace("\r\n", "\n");
|
||||
let lines = text.split("\n");
|
||||
|
||||
return new Input(lines, {
|
||||
startLine: 0,
|
||||
startIndex: 0,
|
||||
endLine: lines.length - 1,
|
||||
endIndex: lines[lines.length - 1].length,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function createToken(text: string, type: TokenScope) {
|
||||
return { text, type };
|
||||
}
|
||||
|
||||
export const Token = {
|
||||
keywords: {
|
||||
model: createToken("model", "keyword.other.cadl"),
|
||||
operation: createToken("op", "keyword.other.cadl"),
|
||||
namespace: createToken("namespace", "keyword.other.cadl"),
|
||||
interface: createToken("interface", "keyword.other.cadl"),
|
||||
alias: createToken("alias", "keyword.other.cadl"),
|
||||
extends: createToken("extends", "keyword.other.cadl"),
|
||||
is: createToken("is", "keyword.other.cadl"),
|
||||
other: (text: string) => createToken(text, "keyword.other.cadl"),
|
||||
},
|
||||
meta: (text: string, meta: string) => createToken(text, `meta.${meta}.cadl`),
|
||||
identifiers: {
|
||||
variable: (name: string) => createToken(name, "variable.name.cadl"),
|
||||
functionName: (name: string) => createToken(name, "entity.name.function.cadl"),
|
||||
tag: (name: string) => createToken(name, "entity.name.tag.cadl"),
|
||||
type: (name: string) => createToken(name, "entity.name.type.cadl"),
|
||||
},
|
||||
|
||||
operators: {
|
||||
assignement: createToken("=", "keyword.operator.assignment.cadl"),
|
||||
optional: createToken("?", "keyword.operator.optional.cadl"),
|
||||
typeAnnotation: createToken(":", "keyword.operator.type.annotation.cadl"),
|
||||
spread: createToken("...", "keyword.operator.spread.cadl"),
|
||||
},
|
||||
|
||||
punctuation: {
|
||||
comma: createToken(",", "punctuation.comma.cadl"),
|
||||
accessor: createToken(".", "punctuation.accessor.cadl"),
|
||||
openBracket: createToken("[", "punctuation.squarebracket.open.cadl"),
|
||||
closeBracket: createToken("]", "punctuation.squarebracket.close.cadl"),
|
||||
openBrace: createToken("{", "punctuation.curlybrace.open.cadl"),
|
||||
closeBrace: createToken("}", "punctuation.curlybrace.close.cadl"),
|
||||
openParen: createToken("(", "punctuation.parenthesis.open.cadl"),
|
||||
closeParen: createToken(")", "punctuation.parenthesis.close.cadl"),
|
||||
semicolon: createToken(";", "punctuation.terminator.statement.cadl"),
|
||||
|
||||
string: {
|
||||
doubleQuote: createToken('"', "string.quoted.double.cadl"),
|
||||
},
|
||||
typeParameters: {
|
||||
begin: createToken("<", "punctuation.definition.typeparameters.begin.cadl"),
|
||||
end: createToken(">", "punctuation.definition.typeparameters.end.cadl"),
|
||||
},
|
||||
},
|
||||
|
||||
literals: {
|
||||
numeric: (text: string) => createToken(text, "constant.numeric.cadl"),
|
||||
string: (text: string) => createToken(text, "string.quoted.double.cadl"),
|
||||
},
|
||||
} as const;
|
|
@ -43,9 +43,11 @@
|
|||
],
|
||||
"scripts": {
|
||||
"clean": "rimraf ./dist ./temp",
|
||||
"build": "npm run gen-manifest && npm run compile",
|
||||
"build": "npm run gen-manifest && npm run compile && npm run generate-tmlanguage",
|
||||
"compile": "tsc -p .",
|
||||
"watch": "tsc -p . --watch",
|
||||
"watch-tmlanguage": "node scripts/watch-tmlanguage.js",
|
||||
"generate-tmlanguage": "node scripts/generate-tmlanguage.js",
|
||||
"dogfood": "node scripts/dogfood.js",
|
||||
"test": "mocha",
|
||||
"test-official": "c8 mocha --forbid-only",
|
||||
|
@ -91,6 +93,9 @@
|
|||
"prettier-plugin-organize-imports": "~2.3.4",
|
||||
"source-map-support": "~0.5.19",
|
||||
"rimraf": "~3.0.2",
|
||||
"typescript": "~4.7.2"
|
||||
"tmlanguage-generator": "~0.3.0",
|
||||
"typescript": "~4.7.2",
|
||||
"vscode-oniguruma": "~1.6.1",
|
||||
"vscode-textmate": "~6.0.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
import { main } from "../dist/server/tmlanguage.js";
|
||||
await main();
|
|
@ -1,25 +1,25 @@
|
|||
import { runWatch } from "@cadl-lang/internal-build-utils";
|
||||
import { createRequire } from "module";
|
||||
import { copyFile } from "fs/promises";
|
||||
import mkdirp from "mkdirp";
|
||||
import { resolve } from "path";
|
||||
import { pathToFileURL } from "url";
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
const script = resolve("dist-dev/src/tmlanguage.js");
|
||||
let count = 0;
|
||||
const scriptPath = resolve("dist/server/tmlanguage.js");
|
||||
|
||||
async function regenerate() {
|
||||
// For perf, we don't want to shell out to a new process every build and we
|
||||
// particularly want to avoid reinitialzing onigasm, which is relatively slow.
|
||||
// So we purge the script from the require cache and re-run it with changes
|
||||
// in-proc.
|
||||
delete require.cache[script];
|
||||
await require(script).main();
|
||||
const script = await import(`${pathToFileURL(scriptPath)}?q=${count++}`);
|
||||
await script.main();
|
||||
await mkdirp("../cadl-vscode/dist");
|
||||
await copyFile("dist/cadl.tmLanguage", "../cadl-vscode/dist/cadl.tmLanguage");
|
||||
}
|
||||
|
||||
runWatch("dist-dev/src", regenerate, {
|
||||
runWatch("dist/server", regenerate, {
|
||||
// This filter doesn't do as much as one might hope because tsc writes out all
|
||||
// the files on recompilation. So tmlanguage.js changes when other .ts files
|
||||
// in cadl-vscode change but tmlanguage.ts has not changed. We could check the
|
||||
// tmlanguage.ts timestamp to fix it, but it didn't seem worth the complexity.
|
||||
// We can't just watch tmlanguage.ts because we need to wait for tsc to
|
||||
// compile it.
|
||||
filter: (file) => file === script,
|
||||
filter: (file) => file === scriptPath,
|
||||
});
|
|
@ -70,6 +70,8 @@ function main() {
|
|||
connection.onReferences(s.findReferences);
|
||||
connection.onRenameRequest(s.rename);
|
||||
connection.onPrepareRename(s.prepareRename);
|
||||
connection.languages.semanticTokens.on(s.buildSemanticTokens);
|
||||
|
||||
documents.onDidChangeContent(s.checkChange);
|
||||
documents.onDidClose(s.documentClosed);
|
||||
|
||||
|
|
|
@ -19,6 +19,10 @@ import {
|
|||
Range,
|
||||
ReferenceParams,
|
||||
RenameParams,
|
||||
SemanticTokens,
|
||||
SemanticTokensBuilder,
|
||||
SemanticTokensLegend,
|
||||
SemanticTokensParams,
|
||||
ServerCapabilities,
|
||||
TextDocumentChangeEvent,
|
||||
TextDocumentIdentifier,
|
||||
|
@ -36,7 +40,7 @@ import {
|
|||
getSourceLocation,
|
||||
} from "../core/diagnostics.js";
|
||||
import { CompilerOptions } from "../core/options.js";
|
||||
import { getNodeAtPosition, visitChildren } from "../core/parser.js";
|
||||
import { getNodeAtPosition, parse, visitChildren } from "../core/parser.js";
|
||||
import {
|
||||
ensureTrailingDirectorySeparator,
|
||||
getDirectoryPath,
|
||||
|
@ -44,12 +48,14 @@ import {
|
|||
resolvePath,
|
||||
} from "../core/path-utils.js";
|
||||
import { createProgram, Program } from "../core/program.js";
|
||||
import { createScanner, isKeyword, isPunctuation, Token } from "../core/scanner.js";
|
||||
import {
|
||||
CadlScriptNode,
|
||||
CompilerHost,
|
||||
Diagnostic as CadlDiagnostic,
|
||||
DiagnosticTarget,
|
||||
IdentifierNode,
|
||||
Node,
|
||||
SourceFile,
|
||||
SymbolFlags,
|
||||
SyntaxKind,
|
||||
|
@ -77,6 +83,8 @@ export interface Server {
|
|||
findReferences(params: ReferenceParams): Promise<Location[]>;
|
||||
prepareRename(params: PrepareRenameParams): Promise<Range | undefined>;
|
||||
rename(params: RenameParams): Promise<WorkspaceEdit>;
|
||||
getSemanticTokens(params: SemanticTokensParams): Promise<SemanticToken[]>;
|
||||
buildSemanticTokens(params: SemanticTokensParams): Promise<SemanticTokens>;
|
||||
checkChange(change: TextDocumentChangeEvent<TextDocument>): Promise<void>;
|
||||
documentClosed(change: TextDocumentChangeEvent<TextDocument>): void;
|
||||
log(message: string, details?: any): void;
|
||||
|
@ -95,6 +103,37 @@ export interface ServerWorkspaceFolder extends WorkspaceFolder {
|
|||
path: string;
|
||||
}
|
||||
|
||||
export enum SemanticTokenKind {
|
||||
Namespace,
|
||||
Type,
|
||||
Class,
|
||||
Enum,
|
||||
Interface,
|
||||
Struct,
|
||||
TypeParameter,
|
||||
Parameter,
|
||||
Variable,
|
||||
Property,
|
||||
EnumMember,
|
||||
Event,
|
||||
Function,
|
||||
Method,
|
||||
Macro,
|
||||
Keyword,
|
||||
Modifier,
|
||||
Comment,
|
||||
String,
|
||||
Number,
|
||||
Regexp,
|
||||
Operator,
|
||||
}
|
||||
|
||||
export interface SemanticToken {
|
||||
kind: SemanticTokenKind;
|
||||
pos: number;
|
||||
end: number;
|
||||
}
|
||||
|
||||
interface CachedFile {
|
||||
type: "file";
|
||||
file: SourceFile;
|
||||
|
@ -187,11 +226,20 @@ export function createServer(host: ServerHost): Server {
|
|||
findReferences,
|
||||
prepareRename,
|
||||
rename,
|
||||
getSemanticTokens,
|
||||
buildSemanticTokens,
|
||||
checkChange,
|
||||
log,
|
||||
};
|
||||
|
||||
function initialize(params: InitializeParams): InitializeResult {
|
||||
const tokenLegend: SemanticTokensLegend = {
|
||||
tokenTypes: Object.keys(SemanticTokenKind)
|
||||
.filter((x) => Number.isNaN(Number(x)))
|
||||
.map((x) => x.slice(0, 1).toLocaleLowerCase() + x.slice(1)),
|
||||
tokenModifiers: [],
|
||||
};
|
||||
|
||||
const capabilities: ServerCapabilities = {
|
||||
textDocumentSync: TextDocumentSyncKind.Incremental,
|
||||
definitionProvider: true,
|
||||
|
@ -200,6 +248,10 @@ export function createServer(host: ServerHost): Server {
|
|||
triggerCharacters: [".", "@"],
|
||||
allCommitCharacters: [".", ",", ";", "("],
|
||||
},
|
||||
semanticTokensProvider: {
|
||||
full: true,
|
||||
legend: tokenLegend,
|
||||
},
|
||||
referencesProvider: true,
|
||||
renameProvider: {
|
||||
prepareProvider: true,
|
||||
|
@ -589,6 +641,151 @@ export function createServer(host: ServerHost): Server {
|
|||
}
|
||||
}
|
||||
|
||||
async function getSemanticTokens(params: SemanticTokensParams): Promise<SemanticToken[]> {
|
||||
const ignore = -1;
|
||||
const defer = -2;
|
||||
const file = await compilerHost.readFile(getPath(params.textDocument));
|
||||
const tokens = mapTokens();
|
||||
const ast = parse(file);
|
||||
classifyNode(ast);
|
||||
return Array.from(tokens.values()).filter((t) => t.kind !== undefined);
|
||||
|
||||
function mapTokens() {
|
||||
const tokens = new Map<number, SemanticToken>();
|
||||
const scanner = createScanner(file, () => {});
|
||||
|
||||
while (scanner.scan() !== Token.EndOfFile) {
|
||||
const kind = classifyToken(scanner.token);
|
||||
if (kind === ignore) {
|
||||
continue;
|
||||
}
|
||||
tokens.set(scanner.tokenPosition, {
|
||||
kind: kind === defer ? undefined! : kind,
|
||||
pos: scanner.tokenPosition,
|
||||
end: scanner.position,
|
||||
});
|
||||
}
|
||||
return tokens;
|
||||
}
|
||||
|
||||
function classifyToken(token: Token): SemanticTokenKind | typeof defer | typeof ignore {
|
||||
switch (token) {
|
||||
case Token.Identifier:
|
||||
return defer;
|
||||
case Token.StringLiteral:
|
||||
return SemanticTokenKind.String;
|
||||
case Token.NumericLiteral:
|
||||
return SemanticTokenKind.Number;
|
||||
case Token.MultiLineComment:
|
||||
case Token.SingleLineComment:
|
||||
return SemanticTokenKind.Comment;
|
||||
default:
|
||||
if (isKeyword(token)) {
|
||||
return SemanticTokenKind.Keyword;
|
||||
}
|
||||
if (isPunctuation(token)) {
|
||||
return SemanticTokenKind.Operator;
|
||||
}
|
||||
return ignore;
|
||||
}
|
||||
}
|
||||
|
||||
function classifyNode(node: Node) {
|
||||
switch (node.kind) {
|
||||
case SyntaxKind.DirectiveExpression:
|
||||
classify(node.target, SemanticTokenKind.Keyword);
|
||||
break;
|
||||
case SyntaxKind.TemplateParameterDeclaration:
|
||||
classify(node.id, SemanticTokenKind.TypeParameter);
|
||||
break;
|
||||
case SyntaxKind.ModelProperty:
|
||||
case SyntaxKind.UnionVariant:
|
||||
classify(node.id, SemanticTokenKind.Property);
|
||||
break;
|
||||
case SyntaxKind.AliasStatement:
|
||||
classify(node.id, SemanticTokenKind.Struct);
|
||||
break;
|
||||
case SyntaxKind.ModelStatement:
|
||||
classify(node.id, SemanticTokenKind.Struct);
|
||||
break;
|
||||
case SyntaxKind.EnumStatement:
|
||||
classify(node.id, SemanticTokenKind.Enum);
|
||||
break;
|
||||
case SyntaxKind.EnumMember:
|
||||
classify(node.id, SemanticTokenKind.EnumMember);
|
||||
break;
|
||||
case SyntaxKind.NamespaceStatement:
|
||||
classify(node.id, SemanticTokenKind.Namespace);
|
||||
break;
|
||||
case SyntaxKind.InterfaceStatement:
|
||||
classify(node.id, SemanticTokenKind.Interface);
|
||||
break;
|
||||
case SyntaxKind.OperationStatement:
|
||||
classify(node.id, SemanticTokenKind.Function);
|
||||
break;
|
||||
case SyntaxKind.DecoratorExpression:
|
||||
classifyReference(node.target, SemanticTokenKind.Macro);
|
||||
break;
|
||||
case SyntaxKind.TypeReference:
|
||||
classifyReference(node.target);
|
||||
break;
|
||||
case SyntaxKind.MemberExpression:
|
||||
classifyReference(node);
|
||||
break;
|
||||
}
|
||||
visitChildren(node, classifyNode);
|
||||
}
|
||||
|
||||
function classify(node: Node, kind: SemanticTokenKind) {
|
||||
const token = tokens.get(node.pos);
|
||||
if (token && token.kind === undefined) {
|
||||
token.kind = kind;
|
||||
}
|
||||
}
|
||||
|
||||
function classifyReference(node: Node, kind = SemanticTokenKind.Type) {
|
||||
switch (node.kind) {
|
||||
case SyntaxKind.MemberExpression:
|
||||
classifyIdentifier(node.base, SemanticTokenKind.Namespace);
|
||||
classifyIdentifier(node.id, kind);
|
||||
break;
|
||||
case SyntaxKind.TypeReference:
|
||||
classifyIdentifier(node.target, kind);
|
||||
break;
|
||||
case SyntaxKind.Identifier:
|
||||
classify(node, kind);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
function classifyIdentifier(node: Node, kind: SemanticTokenKind) {
|
||||
if (node.kind === SyntaxKind.Identifier) {
|
||||
classify(node, kind);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function buildSemanticTokens(params: SemanticTokensParams): Promise<SemanticTokens> {
|
||||
const builder = new SemanticTokensBuilder();
|
||||
const tokens = await getSemanticTokens(params);
|
||||
const file = await compilerHost.readFile(getPath(params.textDocument));
|
||||
const starts = file.getLineStarts();
|
||||
|
||||
for (const token of tokens) {
|
||||
const start = file.getLineAndCharacterOfPosition(token.pos);
|
||||
const end = file.getLineAndCharacterOfPosition(token.end);
|
||||
|
||||
for (let pos = token.pos, line = start.line; line <= end.line; line++) {
|
||||
const endPos = line === end.line ? token.end : starts[line + 1];
|
||||
const character = line === start.line ? start.character : 0;
|
||||
builder.push(line, character, endPos - pos, token.kind, 0);
|
||||
pos = endPos;
|
||||
}
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
function documentClosed(change: TextDocumentChangeEvent<TextDocument>) {
|
||||
// clear diagnostics on file close
|
||||
sendDiagnostics(change.document, []);
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
// TextMate-based syntax highlighting is implemented in this file.
|
||||
// cadl.tmLanguage is generated by running this script.
|
||||
|
||||
import fs from "fs/promises";
|
||||
import { writeFile } from "fs/promises";
|
||||
import mkdirp from "mkdirp";
|
||||
import { resolve } from "path";
|
||||
import * as tm from "tmlanguage-generator";
|
||||
|
@ -469,7 +469,7 @@ const interfaceBody: BeginEndRule = {
|
|||
endCaptures: {
|
||||
"0": { scope: "punctuation.curlybrace.close.cadl" },
|
||||
},
|
||||
patterns: [token, directive, decorator, interfaceMember],
|
||||
patterns: [token, directive, decorator, interfaceMember, punctuationSemicolon],
|
||||
};
|
||||
|
||||
const interfaceStatement: BeginEndRule = {
|
||||
|
@ -555,5 +555,5 @@ export async function main() {
|
|||
errorSourceFilePath: resolve("./src/tmlanguage.ts"),
|
||||
});
|
||||
await mkdirp("./dist");
|
||||
await fs.writeFile("./dist/cadl.tmLanguage", plist);
|
||||
await writeFile("./dist/cadl.tmLanguage", plist);
|
||||
}
|
|
@ -5,7 +5,7 @@ import {
|
|||
CompletionItemTag,
|
||||
CompletionList,
|
||||
} from "vscode-languageserver/node.js";
|
||||
import { createTestServerHost, extractCursor } from "./test-server-host.js";
|
||||
import { createTestServerHost, extractCursor } from "../../testing/test-server-host.js";
|
||||
|
||||
describe("compiler: server: completion", () => {
|
||||
it("completes globals", async () => {
|
||||
|
|
|
@ -1,6 +1,10 @@
|
|||
import { deepStrictEqual, strictEqual } from "assert";
|
||||
import { Range } from "vscode-languageserver/node.js";
|
||||
import { createTestServerHost, extractCursor, getTestIdentifiers } from "./test-server-host.js";
|
||||
import {
|
||||
createTestServerHost,
|
||||
extractCursor,
|
||||
getTestIdentifiers,
|
||||
} from "../../testing/test-server-host.js";
|
||||
|
||||
describe("compiler: server: rename and find references", () => {
|
||||
// `┆` marks where the cursor is positioned
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { deepStrictEqual } from "assert";
|
||||
import { createTestServerHost } from "./test-server-host.js";
|
||||
import { createTestServerHost } from "../../testing/test-server-host.js";
|
||||
|
||||
describe("compiler: server: main file", () => {
|
||||
it("finds the main file", async () => {
|
||||
|
|
|
@ -0,0 +1,776 @@
|
|||
import { deepStrictEqual, ok } from "assert";
|
||||
import { readFile } from "fs/promises";
|
||||
import { createRequire } from "module";
|
||||
import path, { dirname, resolve } from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
import vscode_oniguruma from "vscode-oniguruma";
|
||||
import vscode_textmate, { IOnigLib, StackElement } from "vscode-textmate";
|
||||
import { createSourceFile } from "../../core/diagnostics.js";
|
||||
import { SemanticToken, SemanticTokenKind } from "../../server/serverlib.js";
|
||||
import { CadlScope } from "../../server/tmlanguage.js";
|
||||
import { createTestServerHost } from "../../testing/test-server-host.js";
|
||||
|
||||
const { parseRawGrammar, Registry } = vscode_textmate;
|
||||
const { createOnigScanner, createOnigString, loadWASM } = vscode_oniguruma;
|
||||
|
||||
export type MetaScope = `meta.${string}.cadl`;
|
||||
export type TokenScope = CadlScope | MetaScope;
|
||||
|
||||
interface Token {
|
||||
text: string;
|
||||
scope: TokenScope;
|
||||
}
|
||||
|
||||
function createToken(text: string, scope: TokenScope): Token {
|
||||
return { text, scope };
|
||||
}
|
||||
|
||||
type Tokenize = (input: string) => Promise<Token[]>;
|
||||
|
||||
const Token = {
|
||||
keywords: {
|
||||
model: createToken("model", "keyword.other.cadl"),
|
||||
operation: createToken("op", "keyword.other.cadl"),
|
||||
namespace: createToken("namespace", "keyword.other.cadl"),
|
||||
interface: createToken("interface", "keyword.other.cadl"),
|
||||
alias: createToken("alias", "keyword.other.cadl"),
|
||||
extends: createToken("extends", "keyword.other.cadl"),
|
||||
is: createToken("is", "keyword.other.cadl"),
|
||||
other: (text: string) => createToken(text, "keyword.other.cadl"),
|
||||
},
|
||||
|
||||
meta: (text: string, meta: string) => createToken(text, `meta.${meta}.cadl`),
|
||||
|
||||
identifiers: {
|
||||
variable: (name: string) => createToken(name, "variable.name.cadl"),
|
||||
functionName: (name: string) => createToken(name, "entity.name.function.cadl"),
|
||||
tag: (name: string) => createToken(name, "entity.name.tag.cadl"),
|
||||
type: (name: string) => createToken(name, "entity.name.type.cadl"),
|
||||
},
|
||||
|
||||
operators: {
|
||||
assignment: createToken("=", "keyword.operator.assignment.cadl"),
|
||||
optional: createToken("?", "keyword.operator.optional.cadl"),
|
||||
typeAnnotation: createToken(":", "keyword.operator.type.annotation.cadl"),
|
||||
spread: createToken("...", "keyword.operator.spread.cadl"),
|
||||
},
|
||||
|
||||
punctuation: {
|
||||
comma: createToken(",", "punctuation.comma.cadl"),
|
||||
accessor: createToken(".", "punctuation.accessor.cadl"),
|
||||
openBracket: createToken("[", "punctuation.squarebracket.open.cadl"),
|
||||
closeBracket: createToken("]", "punctuation.squarebracket.close.cadl"),
|
||||
openBrace: createToken("{", "punctuation.curlybrace.open.cadl"),
|
||||
closeBrace: createToken("}", "punctuation.curlybrace.close.cadl"),
|
||||
openParen: createToken("(", "punctuation.parenthesis.open.cadl"),
|
||||
closeParen: createToken(")", "punctuation.parenthesis.close.cadl"),
|
||||
semicolon: createToken(";", "punctuation.terminator.statement.cadl"),
|
||||
|
||||
typeParameters: {
|
||||
begin: createToken("<", "punctuation.definition.typeparameters.begin.cadl"),
|
||||
end: createToken(">", "punctuation.definition.typeparameters.end.cadl"),
|
||||
},
|
||||
},
|
||||
|
||||
literals: {
|
||||
numeric: (text: string) => createToken(text, "constant.numeric.cadl"),
|
||||
string: (text: string) =>
|
||||
createToken(text.startsWith('"') ? text : '"' + text + '"', "string.quoted.double.cadl"),
|
||||
},
|
||||
} as const;
|
||||
|
||||
testColorization("semantic colorization", tokenizeSemantic);
|
||||
testColorization("tmlanguage", tokenizeTMLanguage);
|
||||
|
||||
function testColorization(description: string, tokenize: Tokenize) {
|
||||
describe(`compiler: server: ${description}`, () => {
|
||||
describe("aliases", () => {
|
||||
it("simple alias", async () => {
|
||||
const tokens = await tokenize("alias Foo = string");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.alias,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.operators.assignment,
|
||||
Token.identifiers.type("string"),
|
||||
]);
|
||||
});
|
||||
it("templated alias", async () => {
|
||||
const tokens = await tokenize("alias Foo<T> = T");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.alias,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.typeParameters.begin,
|
||||
Token.identifiers.type("T"),
|
||||
Token.punctuation.typeParameters.end,
|
||||
Token.operators.assignment,
|
||||
Token.identifiers.type("T"),
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("decorators", () => {
|
||||
it("simple parameterless decorator", async () => {
|
||||
const tokens = await tokenize("@foo");
|
||||
deepStrictEqual(tokens, [Token.identifiers.tag("@foo")]);
|
||||
});
|
||||
|
||||
it("fully qualified decorator name", async () => {
|
||||
const tokens = await tokenize("@Foo.bar");
|
||||
if (tokenize === tokenizeTMLanguage) {
|
||||
deepStrictEqual(tokens, [Token.identifiers.tag("@Foo.bar")]);
|
||||
} else {
|
||||
deepStrictEqual(tokens, [
|
||||
Token.identifiers.tag("@"),
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.accessor,
|
||||
Token.identifiers.tag("bar"),
|
||||
]);
|
||||
}
|
||||
});
|
||||
|
||||
it("decorator with parameters", async () => {
|
||||
const tokens = await tokenize(`@foo("param1", 123)`);
|
||||
deepStrictEqual(tokens, [
|
||||
Token.identifiers.tag("@foo"),
|
||||
Token.punctuation.openParen,
|
||||
Token.literals.string("param1"),
|
||||
Token.punctuation.comma,
|
||||
Token.literals.numeric("123"),
|
||||
Token.punctuation.closeParen,
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("interfaces", () => {
|
||||
it("empty interface", async () => {
|
||||
const tokens = await tokenize("interface Foo {}");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.interface,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("interface with single extends", async () => {
|
||||
const tokens = await tokenize("interface Foo extends Bar {}");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.interface,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.keywords.extends,
|
||||
Token.identifiers.type("Bar"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("interface with multiple extends", async () => {
|
||||
const tokens = await tokenize("interface Foo extends Bar1, Bar2 {}");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.interface,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.keywords.extends,
|
||||
Token.identifiers.type("Bar1"),
|
||||
Token.punctuation.comma,
|
||||
Token.identifiers.type("Bar2"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("template interface", async () => {
|
||||
const tokens = await tokenize("interface Foo<T> {}");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.interface,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.typeParameters.begin,
|
||||
Token.identifiers.type("T"),
|
||||
Token.punctuation.typeParameters.end,
|
||||
Token.punctuation.openBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("template interface with extends", async () => {
|
||||
const tokens = await tokenize("interface Foo<T> extends Bar<T> {}");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.interface,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.typeParameters.begin,
|
||||
Token.identifiers.type("T"),
|
||||
Token.punctuation.typeParameters.end,
|
||||
Token.keywords.extends,
|
||||
Token.identifiers.type("Bar"),
|
||||
Token.punctuation.typeParameters.begin,
|
||||
Token.identifiers.type("T"),
|
||||
Token.punctuation.typeParameters.end,
|
||||
Token.punctuation.openBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("interface with operations", async () => {
|
||||
const tokens = await tokenize(`
|
||||
interface Foo {
|
||||
bar(): string;
|
||||
}`);
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.interface,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.identifiers.functionName("bar"),
|
||||
Token.punctuation.openParen,
|
||||
Token.punctuation.closeParen,
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("string"),
|
||||
Token.punctuation.semicolon,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("interface operation that copies the signature of another operation", async () => {
|
||||
const tokens = await tokenize(`
|
||||
interface Foo {
|
||||
bar is ResourceRead<Widget>
|
||||
}`);
|
||||
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.interface,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.identifiers.functionName("bar"),
|
||||
Token.keywords.is,
|
||||
Token.identifiers.type("ResourceRead"),
|
||||
Token.punctuation.typeParameters.begin,
|
||||
Token.identifiers.type("Widget"),
|
||||
Token.punctuation.typeParameters.end,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("models", () => {
|
||||
it("simple model", async () => {
|
||||
const tokens = await tokenize("model Foo {}");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.model,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("model with extends", async () => {
|
||||
const tokens = await tokenize("model Foo extends Bar {}");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.model,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.keywords.extends,
|
||||
Token.identifiers.type("Bar"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("model with is", async () => {
|
||||
const tokens = await tokenize("model Foo is Bar {}");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.model,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.keywords.is,
|
||||
Token.identifiers.type("Bar"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("single template argument model", async () => {
|
||||
const tokens = await tokenize("model Foo<T> {}");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.model,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.typeParameters.begin,
|
||||
Token.identifiers.type("T"),
|
||||
Token.punctuation.typeParameters.end,
|
||||
Token.punctuation.openBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("multiple template argument model", async () => {
|
||||
const tokens = await tokenize("model Foo<A, B, C> {}");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.model,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.typeParameters.begin,
|
||||
Token.identifiers.type("A"),
|
||||
Token.punctuation.comma,
|
||||
Token.identifiers.type("B"),
|
||||
Token.punctuation.comma,
|
||||
Token.identifiers.type("C"),
|
||||
Token.punctuation.typeParameters.end,
|
||||
Token.punctuation.openBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("model with basic properties", async () => {
|
||||
const tokens = await tokenize(`
|
||||
model Foo {
|
||||
prop1: string;
|
||||
prop2: int32;
|
||||
}`);
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.model,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.identifiers.variable("prop1"),
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("string"),
|
||||
Token.punctuation.semicolon,
|
||||
Token.identifiers.variable("prop2"),
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("int32"),
|
||||
Token.punctuation.semicolon,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("model with optional properties", async () => {
|
||||
const tokens = await tokenize(`
|
||||
model Foo {
|
||||
prop1?: string;
|
||||
}`);
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.model,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.identifiers.variable("prop1"),
|
||||
Token.operators.optional,
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("string"),
|
||||
Token.punctuation.semicolon,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("model with properties with default value", async () => {
|
||||
const tokens = await tokenize(`
|
||||
model Foo {
|
||||
prop1?: string = "my-default";
|
||||
}`);
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.model,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.identifiers.variable("prop1"),
|
||||
Token.operators.optional,
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("string"),
|
||||
Token.operators.assignment,
|
||||
Token.literals.string("my-default"),
|
||||
Token.punctuation.semicolon,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("nested model ", async () => {
|
||||
const tokens = await tokenize(`
|
||||
model Foo {
|
||||
nested: {
|
||||
prop1: string;
|
||||
};
|
||||
}`);
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.model,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.identifiers.variable("nested"),
|
||||
Token.operators.typeAnnotation,
|
||||
Token.punctuation.openBrace,
|
||||
Token.identifiers.variable("prop1"),
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("string"),
|
||||
Token.punctuation.semicolon,
|
||||
Token.punctuation.closeBrace,
|
||||
Token.punctuation.semicolon,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("model with spread property", async () => {
|
||||
const tokens = await tokenize(`
|
||||
model Foo {
|
||||
...Bar;
|
||||
}`);
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.model,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.operators.spread,
|
||||
Token.identifiers.type("Bar"),
|
||||
Token.punctuation.semicolon,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("namespaces", () => {
|
||||
it("simple global namespace", async () => {
|
||||
const tokens = await tokenize("namespace Foo;");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.namespace,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.semicolon,
|
||||
]);
|
||||
});
|
||||
|
||||
it("subnamespace global namespace", async () => {
|
||||
const tokens = await tokenize("namespace Foo.Bar;");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.namespace,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.accessor,
|
||||
Token.identifiers.type("Bar"),
|
||||
Token.punctuation.semicolon,
|
||||
]);
|
||||
});
|
||||
|
||||
it("simple namespace", async () => {
|
||||
const tokens = await tokenize(`
|
||||
namespace Foo {
|
||||
|
||||
}`);
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.namespace,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
|
||||
it("nested namespace", async () => {
|
||||
const tokens = await tokenize(`
|
||||
namespace Foo {
|
||||
namespace Bar {
|
||||
|
||||
}
|
||||
}`);
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.namespace,
|
||||
Token.identifiers.type("Foo"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.keywords.namespace,
|
||||
Token.identifiers.type("Bar"),
|
||||
Token.punctuation.openBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
Token.punctuation.closeBrace,
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("operations", () => {
|
||||
it("simple operation", async () => {
|
||||
const tokens = await tokenize("op foo(): string");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.operation,
|
||||
Token.identifiers.functionName("foo"),
|
||||
Token.punctuation.openParen,
|
||||
Token.punctuation.closeParen,
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("string"),
|
||||
]);
|
||||
});
|
||||
|
||||
it("operation with parameters", async () => {
|
||||
const tokens = await tokenize("op foo(param1: string, param2: int32): string");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.operation,
|
||||
Token.identifiers.functionName("foo"),
|
||||
Token.punctuation.openParen,
|
||||
|
||||
Token.identifiers.variable("param1"),
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("string"),
|
||||
Token.punctuation.comma,
|
||||
|
||||
Token.identifiers.variable("param2"),
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("int32"),
|
||||
|
||||
Token.punctuation.closeParen,
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("string"),
|
||||
]);
|
||||
});
|
||||
|
||||
it("model with properties with default value", async () => {
|
||||
const tokens = await tokenize(`op foo(param1?: string = "my-default"): string`);
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.operation,
|
||||
Token.identifiers.functionName("foo"),
|
||||
Token.punctuation.openParen,
|
||||
|
||||
Token.identifiers.variable("param1"),
|
||||
Token.operators.optional,
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("string"),
|
||||
Token.operators.assignment,
|
||||
Token.literals.string("my-default"),
|
||||
|
||||
Token.punctuation.closeParen,
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("string"),
|
||||
]);
|
||||
});
|
||||
|
||||
it("operation with decorated parameters", async () => {
|
||||
const tokens = await tokenize(
|
||||
"op foo(@path param1: string, @query param2?: int32): string"
|
||||
);
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.operation,
|
||||
Token.identifiers.functionName("foo"),
|
||||
Token.punctuation.openParen,
|
||||
|
||||
Token.identifiers.tag("@path"),
|
||||
Token.identifiers.variable("param1"),
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("string"),
|
||||
Token.punctuation.comma,
|
||||
|
||||
Token.identifiers.tag("@query"),
|
||||
Token.identifiers.variable("param2"),
|
||||
Token.operators.optional,
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("int32"),
|
||||
|
||||
Token.punctuation.closeParen,
|
||||
Token.operators.typeAnnotation,
|
||||
Token.identifiers.type("string"),
|
||||
]);
|
||||
});
|
||||
|
||||
it("operation that copies the signature of another operation", async () => {
|
||||
const tokens = await tokenize("op foo is ResourceRead<Widget>");
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.operation,
|
||||
Token.identifiers.functionName("foo"),
|
||||
Token.keywords.is,
|
||||
Token.identifiers.type("ResourceRead"),
|
||||
Token.punctuation.typeParameters.begin,
|
||||
Token.identifiers.type("Widget"),
|
||||
Token.punctuation.typeParameters.end,
|
||||
]);
|
||||
});
|
||||
|
||||
it("defining a templated operation signature", async () => {
|
||||
const tokens = await tokenize(
|
||||
"op ResourceRead<TResource> is ResourceReadBase<TResource, DefaultOptions>"
|
||||
);
|
||||
deepStrictEqual(tokens, [
|
||||
Token.keywords.operation,
|
||||
Token.identifiers.functionName("ResourceRead"),
|
||||
Token.punctuation.typeParameters.begin,
|
||||
Token.identifiers.type("TResource"),
|
||||
Token.punctuation.typeParameters.end,
|
||||
Token.keywords.is,
|
||||
Token.identifiers.type("ResourceReadBase"),
|
||||
Token.punctuation.typeParameters.begin,
|
||||
Token.identifiers.type("TResource"),
|
||||
Token.punctuation.comma,
|
||||
Token.identifiers.type("DefaultOptions"),
|
||||
Token.punctuation.typeParameters.end,
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
const punctuationMap = getPunctuationMap();
|
||||
|
||||
export async function tokenizeSemantic(input: string): Promise<Token[]> {
|
||||
const host = await createTestServerHost();
|
||||
const file = createSourceFile(input, "untitled:test");
|
||||
const document = host.addOrUpdateDocument("untitled:test", input);
|
||||
const semanticTokens = await host.server.getSemanticTokens({ textDocument: document });
|
||||
const tokens = [];
|
||||
|
||||
for (const semanticToken of semanticTokens) {
|
||||
const text = file.text.substring(semanticToken.pos, semanticToken.end);
|
||||
const token = convertSemanticToken(semanticToken, text);
|
||||
if (token) {
|
||||
tokens.push(token);
|
||||
}
|
||||
}
|
||||
|
||||
// Make @dec one token to match tmlanguage
|
||||
for (let i = 0; i < tokens.length - 1; i++) {
|
||||
if (
|
||||
tokens[i].scope === "entity.name.tag.cadl" &&
|
||||
tokens[i].text === "@" &&
|
||||
tokens[i + 1].scope === "entity.name.tag.cadl"
|
||||
) {
|
||||
tokens[i].text = "@" + tokens[i + 1].text;
|
||||
tokens.splice(i + 1, 1);
|
||||
}
|
||||
}
|
||||
return tokens;
|
||||
|
||||
function convertSemanticToken(token: SemanticToken, text: string): Token | undefined {
|
||||
switch (token.kind) {
|
||||
case SemanticTokenKind.Namespace:
|
||||
case SemanticTokenKind.Type:
|
||||
case SemanticTokenKind.Class:
|
||||
case SemanticTokenKind.Enum:
|
||||
case SemanticTokenKind.Interface:
|
||||
case SemanticTokenKind.Struct:
|
||||
case SemanticTokenKind.TypeParameter:
|
||||
return Token.identifiers.type(text);
|
||||
case SemanticTokenKind.Parameter:
|
||||
case SemanticTokenKind.Property:
|
||||
case SemanticTokenKind.Variable:
|
||||
case SemanticTokenKind.EnumMember:
|
||||
return Token.identifiers.variable(text);
|
||||
case SemanticTokenKind.Function:
|
||||
return Token.identifiers.functionName(text);
|
||||
case SemanticTokenKind.Macro:
|
||||
return Token.identifiers.tag(text);
|
||||
case SemanticTokenKind.Keyword:
|
||||
return Token.keywords.other(text);
|
||||
case SemanticTokenKind.String:
|
||||
return Token.literals.string(text);
|
||||
case SemanticTokenKind.Number:
|
||||
return Token.literals.numeric(text);
|
||||
case SemanticTokenKind.Operator:
|
||||
if (text === "@") return Token.identifiers.tag("@");
|
||||
const punctuation = punctuationMap.get(text);
|
||||
ok(punctuation, "No tmlanugage equivalent for punctuation: " + text);
|
||||
return punctuation;
|
||||
default:
|
||||
ok(false, "Unexpected SemanticTokenKind: " + SemanticTokenKind[token.kind]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function createOnigLib(): Promise<IOnigLib> {
|
||||
const require = createRequire(import.meta.url);
|
||||
const onigWasm = await readFile(`${path.dirname(require.resolve("vscode-oniguruma"))}/onig.wasm`);
|
||||
|
||||
await loadWASM(onigWasm.buffer);
|
||||
|
||||
return {
|
||||
createOnigScanner: (sources) => createOnigScanner(sources),
|
||||
createOnigString,
|
||||
};
|
||||
}
|
||||
|
||||
const registry = new Registry({
|
||||
onigLib: createOnigLib(),
|
||||
loadGrammar: async () => {
|
||||
const data = await readFile(
|
||||
resolve(dirname(fileURLToPath(import.meta.url)), "../../cadl.tmLanguage"),
|
||||
"utf-8"
|
||||
);
|
||||
return parseRawGrammar(data);
|
||||
},
|
||||
});
|
||||
|
||||
const excludedScopes = ["source.cadl"];
|
||||
|
||||
export async function tokenizeTMLanguage(input: string | Input): Promise<Token[]> {
|
||||
if (typeof input === "string") {
|
||||
input = Input.fromText(input);
|
||||
}
|
||||
|
||||
const tokens: Token[] = [];
|
||||
let previousStack: StackElement | null = null;
|
||||
const grammar = await registry.loadGrammar("source.cadl");
|
||||
|
||||
if (grammar === null) {
|
||||
throw new Error("Unexpected null grammar");
|
||||
}
|
||||
|
||||
for (let lineIndex = 0; lineIndex < input.lines.length; lineIndex++) {
|
||||
const line = input.lines[lineIndex];
|
||||
|
||||
const lineResult = grammar.tokenizeLine(line, previousStack);
|
||||
previousStack = lineResult.ruleStack;
|
||||
|
||||
if (lineIndex < input.span.startLine || lineIndex > input.span.endLine) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const token of lineResult.tokens) {
|
||||
if (
|
||||
(lineIndex === input.span.startLine && token.startIndex < input.span.startIndex) ||
|
||||
(lineIndex === input.span.endLine && token.endIndex > input.span.endIndex)
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const text = line.substring(token.startIndex, token.endIndex);
|
||||
const scope = token.scopes[token.scopes.length - 1];
|
||||
|
||||
if (!excludeScope(scope)) {
|
||||
tokens.push(createToken(text, scope as TokenScope));
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < tokens.length - 2; i++) {
|
||||
// For some reason we get strings as three tokens from API, combine them.
|
||||
// Inspect tokens in VS Code shows only one token as expected and as combined here.
|
||||
if (tokens[i].text === '"' && tokens[i + 2].text === '"') {
|
||||
tokens[i].text = '"' + tokens[i + 1].text + '"';
|
||||
tokens.splice(i + 1, 2);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return tokens;
|
||||
}
|
||||
|
||||
function excludeScope(scope: string) {
|
||||
return excludedScopes.includes(scope) || scope.startsWith("meta.");
|
||||
}
|
||||
|
||||
interface Span {
|
||||
startLine: number;
|
||||
startIndex: number;
|
||||
endLine: number;
|
||||
endIndex: number;
|
||||
}
|
||||
|
||||
class Input {
|
||||
private constructor(public lines: string[], public span: Span) {}
|
||||
|
||||
public static fromText(text: string) {
|
||||
// ensure consistent line-endings irrelevant of OS
|
||||
text = text.replace("\r\n", "\n");
|
||||
const lines = text.split("\n");
|
||||
|
||||
return new Input(lines, {
|
||||
startLine: 0,
|
||||
startIndex: 0,
|
||||
endLine: lines.length - 1,
|
||||
endIndex: lines[lines.length - 1].length,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function getPunctuationMap(): ReadonlyMap<string, Token> {
|
||||
const map = new Map();
|
||||
visit(Token.punctuation);
|
||||
visit(Token.operators);
|
||||
return map;
|
||||
|
||||
function visit(obj: Record<string, any>) {
|
||||
for (const value of Object.values(obj)) {
|
||||
if ("text" in value) {
|
||||
map.set(value.text, value);
|
||||
} else {
|
||||
visit(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -2,7 +2,7 @@ import { ok, strictEqual } from "assert";
|
|||
import { CadlScriptNode, SyntaxKind } from "../core/index.js";
|
||||
import { getNodeAtPosition, parse } from "../core/parser.js";
|
||||
import { Node } from "../core/types.js";
|
||||
import { extractCursor } from "./server/test-server-host.js";
|
||||
import { extractCursor } from "../testing/test-server-host.js";
|
||||
import { dumpAST } from "./test-parser.js";
|
||||
|
||||
describe("compiler: parser utils", () => {
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
export * from "../test/server/test-colorization.js";
|
||||
export * from "./expect.js";
|
||||
export * from "./test-host.js";
|
||||
export * from "./test-server-host.js";
|
||||
export * from "./test-utils.js";
|
||||
export * from "./types.js";
|
||||
|
|
|
@ -2,15 +2,11 @@ import { ok } from "assert";
|
|||
import { pathToFileURL } from "url";
|
||||
import { TextDocument } from "vscode-languageserver-textdocument";
|
||||
import { Diagnostic } from "vscode-languageserver/node.js";
|
||||
import { parse, visitChildren } from "../../core/parser.js";
|
||||
import { IdentifierNode, SyntaxKind } from "../../core/types.js";
|
||||
import { createServer, Server, ServerHost } from "../../server/index.js";
|
||||
import {
|
||||
createTestFileSystem,
|
||||
resolveVirtualPath,
|
||||
StandardTestLibrary,
|
||||
TestFileSystem,
|
||||
} from "../../testing/index.js";
|
||||
import { parse, visitChildren } from "../core/parser.js";
|
||||
import { IdentifierNode, SyntaxKind } from "../core/types.js";
|
||||
import { createServer, Server, ServerHost } from "../server/index.js";
|
||||
import { createTestFileSystem, resolveVirtualPath, StandardTestLibrary } from "./test-host.js";
|
||||
import { TestFileSystem } from "./types.js";
|
||||
|
||||
export interface TestServerHost extends ServerHost, TestFileSystem {
|
||||
server: Server;
|
|
@ -1,10 +1,4 @@
|
|||
import {
|
||||
CadlLanguageConfiguration,
|
||||
createScanner,
|
||||
createSourceFile,
|
||||
ServerHost,
|
||||
Token,
|
||||
} from "@cadl-lang/compiler";
|
||||
import { CadlLanguageConfiguration, ServerHost } from "@cadl-lang/compiler";
|
||||
import * as monaco from "monaco-editor";
|
||||
import * as lsp from "vscode-languageserver";
|
||||
import { TextDocument } from "vscode-languageserver-textdocument";
|
||||
|
@ -46,9 +40,15 @@ export async function attachServices(host: BrowserHost) {
|
|||
);
|
||||
}
|
||||
|
||||
function lspArgs(model: monaco.editor.ITextModel, pos: monaco.Position) {
|
||||
function lspDocumentArgs(model: monaco.editor.ITextModel) {
|
||||
return {
|
||||
textDocument: textDocumentForModel(model),
|
||||
};
|
||||
}
|
||||
|
||||
function lspArgs(model: monaco.editor.ITextModel, pos: monaco.Position) {
|
||||
return {
|
||||
...lspDocumentArgs(model),
|
||||
position: lspPosition(pos),
|
||||
};
|
||||
}
|
||||
|
@ -157,86 +157,45 @@ export async function attachServices(host: BrowserHost) {
|
|||
},
|
||||
});
|
||||
|
||||
const tokenTypes = [
|
||||
"comment",
|
||||
"string",
|
||||
"number",
|
||||
"keyword",
|
||||
"namespace",
|
||||
"variable",
|
||||
"type",
|
||||
"function",
|
||||
"operator",
|
||||
"source",
|
||||
];
|
||||
monaco.editor.defineTheme("cadl", {
|
||||
base: "vs",
|
||||
inherit: true,
|
||||
colors: {},
|
||||
rules: [
|
||||
{ token: "macro", foreground: "#800000" },
|
||||
{ token: "function", foreground: "#795E26" },
|
||||
],
|
||||
});
|
||||
monaco.editor.setTheme("cadl");
|
||||
|
||||
function mapToken(tok: Token) {
|
||||
switch (tok) {
|
||||
case Token.SingleLineComment:
|
||||
case Token.MultiLineComment:
|
||||
return 0;
|
||||
case Token.StringLiteral:
|
||||
return 1;
|
||||
case Token.NumericLiteral:
|
||||
return 2;
|
||||
case Token.TrueKeyword:
|
||||
case Token.FalseKeyword:
|
||||
case Token.IfKeyword:
|
||||
case Token.IsKeyword:
|
||||
case Token.AliasKeyword:
|
||||
case Token.OpKeyword:
|
||||
case Token.ElseKeyword:
|
||||
case Token.EnumKeyword:
|
||||
case Token.VoidKeyword:
|
||||
case Token.ModelKeyword:
|
||||
case Token.NeverKeyword:
|
||||
case Token.UnionKeyword:
|
||||
case Token.UsingKeyword:
|
||||
case Token.ImportKeyword:
|
||||
case Token.ReturnKeyword:
|
||||
case Token.ExtendsKeyword:
|
||||
case Token.InterfaceKeyword:
|
||||
case Token.NamespaceKeyword:
|
||||
case Token.ProjectionKeyword:
|
||||
return 3;
|
||||
default:
|
||||
return 9;
|
||||
}
|
||||
}
|
||||
monaco.languages.registerDocumentSemanticTokensProvider("cadl", {
|
||||
getLegend() {
|
||||
const legend = lsConfig.capabilities.semanticTokensProvider!.legend;
|
||||
return {
|
||||
tokenTypes,
|
||||
tokenModifiers: [],
|
||||
tokenModifiers: legend.tokenModifiers,
|
||||
tokenTypes: legend.tokenTypes.map((entry) => {
|
||||
switch (entry) {
|
||||
case "namespace":
|
||||
case "class":
|
||||
case "enum":
|
||||
case "typeParameter":
|
||||
case "struct":
|
||||
case "interface":
|
||||
return "type";
|
||||
case "property":
|
||||
case "enumMember":
|
||||
return "variable";
|
||||
default:
|
||||
return entry;
|
||||
}
|
||||
}),
|
||||
};
|
||||
},
|
||||
provideDocumentSemanticTokens(model) {
|
||||
const content = model.getValue();
|
||||
const file = createSourceFile(content, "");
|
||||
const scanner = createScanner(file, () => {});
|
||||
const tokens = [];
|
||||
let prevLine = 0;
|
||||
let prevChar = 0;
|
||||
|
||||
let tok = scanner.scan();
|
||||
while (tok !== Token.EndOfFile) {
|
||||
const pos = file.getLineAndCharacterOfPosition(scanner.tokenPosition);
|
||||
|
||||
tokens.push(
|
||||
pos.line - prevLine,
|
||||
prevLine === pos.line ? pos.character - prevChar : pos.character,
|
||||
scanner.position - scanner.tokenPosition,
|
||||
mapToken(tok),
|
||||
0
|
||||
);
|
||||
prevLine = pos.line;
|
||||
prevChar = pos.character;
|
||||
|
||||
tok = scanner.scan();
|
||||
}
|
||||
|
||||
async provideDocumentSemanticTokens(model) {
|
||||
const result = await serverLib.buildSemanticTokens(lspDocumentArgs(model));
|
||||
return {
|
||||
data: new Uint32Array(tokens),
|
||||
resultId: result.resultId,
|
||||
data: new Uint32Array(result.data),
|
||||
};
|
||||
},
|
||||
releaseDocumentSemanticTokens() {},
|
||||
|
|
Загрузка…
Ссылка в новой задаче