Merge pull request #5 from ashalkhakov/postiats

Adding support for ATS/Postiats.
This commit is contained in:
Alexandru Dima 2016-08-15 15:55:54 +02:00 коммит произвёл GitHub
Родитель 14358c5207 ba5e90065a
Коммит 0d4fd0f14c
7 изменённых файлов: 1583 добавлений и 1 удалений

Просмотреть файл

@ -14,6 +14,7 @@ Colorization and configuration supports for multiple languages for the Monaco Ed
* jade
* lua
* objective-c
* postiats
* powershell
* python
* r

Просмотреть файл

@ -61,6 +61,7 @@ gulp.task('release', ['clean-release','compile'], function() {
bundleOne('src/markdown'),
bundleOne('src/objective-c'),
bundleOne('src/powershell'),
bundleOne('src/postiats'),
bundleOne('src/python'),
bundleOne('src/r'),
bundleOne('src/ruby'),

Просмотреть файл

@ -129,6 +129,12 @@ registerLanguage({
extensions: [ '.m' ],
aliases: [ 'Objective-C'],
module: './objective-c'
});
registerLanguage({
id: 'postiats',
extensions: [ '.dats', '.sats', '.hats' ],
aliases: [ 'ATS', 'ATS/Postiats' ],
module: './postiats'
});
registerLanguage({
id: 'powershell',

656
src/postiats.ts Normal file
Просмотреть файл

@ -0,0 +1,656 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Artyom Shalkhakov. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*
* Based on the ATS/Postiats lexer by Hongwei Xi.
*--------------------------------------------------------------------------------------------*/
'use strict';
import IRichLanguageConfiguration = monaco.languages.LanguageConfiguration;
import ILanguage = monaco.languages.IMonarchLanguage;
export var conf:IRichLanguageConfiguration = {
comments: {
lineComment: '//',
blockComment: ['(*', '*)'],
},
brackets: [['{','}'], ['[',']'], ['(',')'], ['<','>']],
autoClosingPairs: [
{ open: '"', close: '"', notIn: ['string', 'comment'] },
{ open: '{', close: '}', notIn: ['string', 'comment'] },
{ open: '[', close: ']', notIn: ['string', 'comment'] },
{ open: '(', close: ')', notIn: ['string', 'comment'] },
]
};
export var language = <ILanguage> {
tokenPostfix: '.pats',
// TODO: staload and dynload are followed by a special kind of string literals
// with {$IDENTIFER} variables, and it also may make sense to highlight
// the punctuation (. and / and \) differently.
// Set defaultToken to invalid to see what you do not tokenize yet
defaultToken: 'invalid',
// keyword reference: https://github.com/githwxi/ATS-Postiats/blob/master/src/pats_lexing_token.dats
keywords: [
//
"abstype", // ABSTYPE
"abst0ype", // ABST0YPE
"absprop", // ABSPROP
"absview", // ABSVIEW
"absvtype", // ABSVIEWTYPE
"absviewtype", // ABSVIEWTYPE
"absvt0ype", // ABSVIEWT0YPE
"absviewt0ype", // ABSVIEWT0YPE
//
"as", // T_AS
//
"and", // T_AND
//
"assume", // T_ASSUME
//
"begin", // T_BEGIN
//
/*
"case", // CASE
*/
//
"classdec", // T_CLASSDEC
//
"datasort", // T_DATASORT
//
"datatype", // DATATYPE
"dataprop", // DATAPROP
"dataview", // DATAVIEW
"datavtype", // DATAVIEWTYPE
"dataviewtype", // DATAVIEWTYPE
//
"do", // T_DO
//
"end", // T_END
//
"extern", // T_EXTERN
"extype", // T_EXTYPE
"extvar", // T_EXTVAR
//
"exception", // T_EXCEPTION
//
"fn", // FN // non-recursive
"fnx", // FNX // mutual tail-rec.
"fun", // FUN // general-recursive
//
"prfn", // PRFN
"prfun", // PRFUN
//
"praxi", // PRAXI
"castfn", // CASTFN
//
"if", // T_IF
"then", // T_THEN
"else", // T_ELSE
//
"ifcase", // T_IFCASE
//
"in", // T_IN
//
"infix", // INFIX
"infixl", // INFIXL
"infixr", // INFIXR
"prefix", // PREFIX
"postfix", // POSTFIX
//
"implmnt", // IMPLMNT // 0
"implement", // IMPLEMENT // 1
//
"primplmnt", // PRIMPLMNT // ~1
"primplement", // PRIMPLMNT // ~1
//
"import", // T_IMPORT // for importing packages
//
/*
"lam", // LAM
"llam", // LLAM
"fix", // FIX
*/
//
"let", // T_LET
//
"local", // T_LOCAL
//
"macdef", // MACDEF
"macrodef", // MACRODEF
//
"nonfix", // T_NONFIX
//
"symelim", // T_SYMELIM
"symintr", // T_SYMINTR
"overload", // T_OVERLOAD
//
"of", // T_OF
"op", // T_OP
//
"rec", // T_REC
//
"sif", // T_SIF
"scase", // T_SCASE
//
"sortdef", // T_SORTDEF
/*
// HX: [sta] is now deprecated
*/
"sta", // T_STACST
"stacst", // T_STACST
"stadef", // T_STADEF
"static", // T_STATIC
/*
"stavar", // T_STAVAR
*/
//
"staload", // T_STALOAD
"dynload", // T_DYNLOAD
//
"try", // T_TRY
//
"tkindef", // T_TKINDEF // HX-2012-05-23
//
/*
"type", // TYPE
*/
"typedef", // TYPEDEF
"propdef", // PROPDEF
"viewdef", // VIEWDEF
"vtypedef", // VIEWTYPEDEF
"viewtypedef", // VIEWTYPEDEF
//
/*
"val", // VAL
*/
"prval", // PRVAL
//
"var", // VAR
"prvar", // PRVAR
//
"when", // T_WHEN
"where", // T_WHERE
//
/*
"for", // T_FOR
"while", // T_WHILE
*/
//
"with", // T_WITH
//
"withtype", // WITHTYPE
"withprop", // WITHPROP
"withview", // WITHVIEW
"withvtype", // WITHVIEWTYPE
"withviewtype", // WITHVIEWTYPE
//
],
keywords_dlr: [
"$delay", // DLRDELAY
"$ldelay", // DLRLDELAY
//
"$arrpsz", // T_DLRARRPSZ
"$arrptrsize", // T_DLRARRPSZ
//
"$d2ctype", // T_DLRD2CTYPE
//
"$effmask", // DLREFFMASK
"$effmask_ntm", // DLREFFMASK_NTM
"$effmask_exn", // DLREFFMASK_EXN
"$effmask_ref", // DLREFFMASK_REF
"$effmask_wrt", // DLREFFMASK_WRT
"$effmask_all", // DLREFFMASK_ALL
//
"$extern", // T_DLREXTERN
"$extkind", // T_DLREXTKIND
"$extype", // T_DLREXTYPE
"$extype_struct", // T_DLREXTYPE_STRUCT
//
"$extval", // T_DLREXTVAL
"$extfcall", // T_DLREXTFCALL
"$extmcall", // T_DLREXTMCALL
//
"$literal", // T_DLRLITERAL
//
"$myfilename", // T_DLRMYFILENAME
"$mylocation", // T_DLRMYLOCATION
"$myfunction", // T_DLRMYFUNCTION
//
"$lst", // DLRLST
"$lst_t", // DLRLST_T
"$lst_vt", // DLRLST_VT
"$list", // DLRLST
"$list_t", // DLRLST_T
"$list_vt", // DLRLST_VT
//
"$rec", // DLRREC
"$rec_t", // DLRREC_T
"$rec_vt", // DLRREC_VT
"$record", // DLRREC
"$record_t", // DLRREC_T
"$record_vt", // DLRREC_VT
//
"$tup", // DLRTUP
"$tup_t", // DLRTUP_T
"$tup_vt", // DLRTUP_VT
"$tuple", // DLRTUP
"$tuple_t", // DLRTUP_T
"$tuple_vt", // DLRTUP_VT
//
"$break", // T_DLRBREAK
"$continue", // T_DLRCONTINUE
//
"$raise", // T_DLRRAISE
//
"$showtype", // T_DLRSHOWTYPE
//
"$vcopyenv_v", // DLRVCOPYENV_V
"$vcopyenv_vt", // DLRVCOPYENV_VT
//
"$tempenver", // T_DLRTEMPENVER
//
"$solver_assert", // T_DLRSOLASSERT
"$solver_verify", // T_DLRSOLVERIFY
],
keywords_srp: [
//
"#if", // T_SRPIF
"#ifdef", // T_SRPIFDEF
"#ifndef", // T_SRPIFNDEF
//
"#then", // T_SRPTHEN
//
"#elif", // T_SRPELIF
"#elifdef", // T_SRPELIFDEF
"#elifndef", // T_SRPELIFNDEF
//
"#else", // T_SRPELSE
"#endif", // T_SRPENDIF
//
"#error", // T_SRPERROR
//
"#prerr", // T_SRPPRERR // outpui to stderr
"#print", // T_SRPPRINT // output to stdout
//
"#assert", // T_SRPASSERT
//
"#undef", // T_SRPUNDEF
"#define", // T_SRPDEFINE
//
"#include", // T_SRPINCLUDE
"#require", // T_SRPREQUIRE
//
"#pragma", // T_SRPPRAGMA // HX: general pragma
"#codegen2", // T_SRPCODEGEN2 // for level-2 codegen
"#codegen3", // T_SRPCODEGEN3 // for level-3 codegen
//
// HX: end of special tokens
//
],
irregular_keyword_list: [
"val+",
"val-",
"val",
"case+",
"case-",
"case",
"addr@",
"addr",
"fold@",
"free@",
"fix@",
"fix",
"lam@",
"lam",
"llam@",
"llam",
"viewt@ype+",
"viewt@ype-",
"viewt@ype",
"viewtype+",
"viewtype-",
"viewtype",
"view+",
"view-",
"view@",
"view",
"type+",
"type-",
"type",
"vtype+",
"vtype-",
"vtype",
"vt@ype+",
"vt@ype-",
"vt@ype",
"viewt@ype+",
"viewt@ype-",
"viewt@ype",
"viewtype+",
"viewtype-",
"viewtype",
"prop+",
"prop-",
"prop",
"type+",
"type-",
"type",
"t@ype",
"t@ype+",
"t@ype-",
"abst@ype",
"abstype",
"absviewt@ype",
"absvt@ype",
"for*",
"for",
"while*",
"while"
],
keywords_types: [
'bool',
'double',
'byte',
'int',
'short',
'char',
'void',
'unit',
'long',
'float',
'string',
'strptr'
],
// TODO: reference for this?
keywords_effects: [
"0", // no effects
"fun",
"clo",
"prf",
"funclo",
"cloptr",
"cloref",
"ref",
"ntm",
"1" // all effects
],
operators: [
"@", // T_AT
"!", // T_BANG
"|", // T_BAR
"`", // T_BQUOTE
":", // T_COLON
"$", // T_DOLLAR
".", // T_DOT
"=", // T_EQ
"#", // T_HASH
"~", // T_TILDE
//
"..", // T_DOTDOT
"...", // T_DOTDOTDOT
//
"=>", // T_EQGT
// "=<", // T_EQLT
"=<>", // T_EQLTGT
"=/=>", // T_EQSLASHEQGT
"=>>", // T_EQGTGT
"=/=>>", // T_EQSLASHEQGTGT
//
"<", // T_LT // opening a tmparg
">", // T_GT // closing a tmparg
//
"><", // T_GTLT
//
".<", // T_DOTLT
">.", // T_GTDOT
//
".<>.", // T_DOTLTGTDOT
//
"->", // T_MINUSGT
//"-<", // T_MINUSLT
"-<>", // T_MINUSLTGT
//
/*
":<", // T_COLONLT
*/
],
brackets: [
{open: ',(', close: ')', token: 'delimiter.parenthesis'}, // meta-programming syntax
{open: '`(', close: ')', token: 'delimiter.parenthesis'},
{open: '%(', close: ')', token: 'delimiter.parenthesis'},
{open: '\'(', close: ')', token: 'delimiter.parenthesis'},
{open: '\'{', close: '}', token: 'delimiter.parenthesis'},
{open: '@(', close: ')', token: 'delimiter.parenthesis'},
{open: '@{', close: '}', token: 'delimiter.brace'},
{open: '@[', close: ']', token: 'delimiter.square'},
{open: '#[', close: ']', token: 'delimiter.square'},
{open: '{', close: '}', token: 'delimiter.curly'},
{open: '[', close: ']', token: 'delimiter.square'},
{open: '(', close: ')', token: 'delimiter.parenthesis'},
{open: '<', close: '>', token:'delimiter.angle'}
],
// we include these common regular expressions
symbols: /[=><!~?:&|+\-*\/\^%]+/,
IDENTFST: /[a-zA-Z_]/,
IDENTRST: /[a-zA-Z0-9_'$]/,
symbolic: /[%&+-./:=@~`^|*!$#?<>]/,
digit: /[0-9]/,
digitseq0: /@digit*/,
xdigit: /[0-9A-Za-z]/,
xdigitseq0: /@xdigit*/,
INTSP: /[lLuU]/,
FLOATSP: /[fFlL]/,
fexponent: /[eE][+-]?[0-9]+/,
fexponent_bin: /[pP][+-]?[0-9]+/,
deciexp: /\.[0-9]*@fexponent?/,
hexiexp: /\.[0-9a-zA-Z]*@fexponent_bin?/,
irregular_keywords: /val[+-]?|case[+-]?|addr\@?|fold\@|free\@|fix\@?|lam\@?|llam\@?|prop[+-]?|type[+-]?|view[+-@]?|viewt@?ype[+-]?|t@?ype[+-]?|v(iew)?t@?ype[+-]?|abst@?ype|absv(iew)?t@?ype|for\*?|while\*?/,
ESCHAR: /[ntvbrfa\\\?'"\(\[\{]/,
start: 'root',
// The main tokenizer for ATS/Postiats
// reference: https://github.com/githwxi/ATS-Postiats/blob/master/src/pats_lexing.dats
tokenizer: {
root: [
// lexing_blankseq0
{regex: /[ \t\r\n]+/, action: {token: ''}},
// NOTE: (*) is an invalid ML-like comment!
{regex: /\(\*\)/, action: { token: 'invalid' }},
{regex: /\(\*/, action: { token: 'comment', next: 'lexing_COMMENT_block_ml' } },
{regex: /\(/, action: '@brackets'/*{ token: 'delimiter.parenthesis', bracket: '@open' }*/ },
{regex: /\)/, action: '@brackets'/*{ token: 'delimiter.parenthesis', bracket: '@close' }*/ },
{regex: /\[/, action: '@brackets'/*{ token: 'delimiter.bracket', bracket: '@open' }*/ },
{regex: /\]/, action: '@brackets'/*{ token: 'delimiter.bracket', bracket: '@close' }*/ },
{regex: /\{/, action: '@brackets'/*{ token: 'delimiter.brace', bracket: '@open' }*/ },
{regex: /\}/, action: '@brackets'/*{ token: 'delimiter.brace', bracket: '@close' }*/ },
// lexing_COMMA
{regex: /,\(/, action: '@brackets'/*{ token: 'delimiter.parenthesis', bracket: '@open' }*/ }, // meta-programming syntax
{regex: /,/, action: { token: 'delimiter.comma' } },
{regex: /;/, action: { token: 'delimiter.semicolon' } },
// lexing_AT
{regex: /@\(/, action: '@brackets'/* { token: 'delimiter.parenthesis', bracket: '@open' }*/ },
{regex: /@\[/, action: '@brackets'/* { token: 'delimiter.bracket', bracket: '@open' }*/ },
{regex: /@\{/, action: '@brackets'/*{ token: 'delimiter.brace', bracket: '@open' }*/ },
// lexing_COLON
{regex: /:</, action: { token: 'keyword', next: '@lexing_EFFECT_commaseq0'} }, // T_COLONLT
/*
lexing_DOT:
. // SYMBOLIC => lexing_IDENT_sym
. FLOATDOT => lexing_FLOAT_deciexp
. DIGIT => T_DOTINT
*/
{regex: /\.@symbolic+/, action: { token: 'identifier.sym' } },
// FLOATDOT case
{regex: /\.@digit*@fexponent@FLOATSP*/, action: { token: 'number.float' } },
{regex: /\.@digit+/, action: { token: 'number.float' } }, // T_DOTINT
// lexing_DOLLAR:
// '$' IDENTFST IDENTRST* => lexing_IDENT_dlr, _ => lexing_IDENT_sym
{regex: /\$@IDENTFST@IDENTRST*/,
action: {
cases: {
'@keywords_dlr': { token: 'keyword.dlr' },
'@default': { token: 'namespace' }, // most likely a module qualifier
}
}
},
// lexing_SHARP:
// '#' IDENTFST IDENTRST* => lexing_ident_srp, _ => lexing_IDENT_sym
{regex: /\#@IDENTFST@IDENTRST*/,
action: {
cases: {
'@keywords_srp': { token: 'keyword.srp' },
'@default': { token: 'identifier' },
}
}
},
// lexing_PERCENT:
{regex: /%\(/, action: { token: 'delimiter.parenthesis', bracket: '@open' } },
{regex: /^%{(#|\^|\$)?/, action: { token: 'keyword', bracket: '@open', next: '@lexing_EXTCODE', nextEmbedded: 'text/javascript' } },
{regex: /^%}/, action: { token: 'keyword', bracket: '@close' } },
// lexing_QUOTE
{regex: /'\(/, action: { token: 'delimiter.parenthesis', bracket: '@open' } },
{regex: /'\[/, action: { token: 'delimiter.bracket', bracket: '@open' } },
{regex: /'\{/, action: { token: 'delimiter.brace', bracket: '@open' } },
[/(')(\\@ESCHAR|\\[xX]@xdigit+|\\@digit+)(')/, ['string', 'string.escape', 'string']],
[/'[^\\']'/, 'string'],
// lexing_DQUOTE
[/"/, 'string.quote', '@lexing_DQUOTE'],
// lexing_BQUOTE
{regex: /`\(/, action: '@brackets'/* { token: 'delimiter.parenthesis', bracket: '@open' }*/},
// TODO: otherwise, try lexing_IDENT_sym
{regex: /\\/, action: { token: 'punctuation' } }, // just T_BACKSLASH
// lexing_IDENT_alp:
// NOTE: (?!regex) is syntax for "not-followed-by" regex
// to resolve ambiguity such as foreach$fwork being incorrectly lexed as [for] [each$fwork]!
{regex: /@irregular_keywords(?!@IDENTRST)/, action: { token: 'keyword' } },
{regex: /@IDENTFST@IDENTRST*[<!\[]?/,
action: {
cases: {
// TODO: dynload and staload should be specially parsed
// dynload whitespace+ "special_string"
// this special string is really:
// '/' '\\' '.' => punctuation
// ({\$)([a-zA-Z_][a-zA-Z_0-9]*)(}) => punctuation,keyword,punctuation
// [^"] => identifier/literal
'@keywords': { token: 'keyword' },
'@keywords_types': { token: 'type' },
'@default': { token: 'identifier' }
}
}
},
// lexing_IDENT_sym:
{regex: /\/\/\/\//, action: { token: 'comment', next: '@lexing_COMMENT_rest' }},
{regex: /\/\/.*$/, action: { token: 'comment' }},
{regex: /\/\*/, action: { token: 'comment', next: '@lexing_COMMENT_block_c' }},
// AS-20160627: specifically for effect annotations
{regex: /-<|=</, action: { token: 'keyword', next: '@lexing_EFFECT_commaseq0'}},
{regex: /@symbolic+/,
action: {
cases: {
'@operators': 'keyword',
'@default': 'operator'
}
}
},
// lexing_ZERO:
// FIXME: this one is quite messy/unfinished yet
// TODO: lexing_INT_hex
// - testing_hexiexp => lexing_FLOAT_hexiexp
// - testing_fexponent_bin => lexing_FLOAT_hexiexp
// - testing_intspseq0 => T_INT_hex
// lexing_INT_hex:
{regex: /0[xX]@xdigit+(@hexiexp|@fexponent_bin)@FLOATSP*/, action: { token: 'number.float' } },
{regex: /0[xX]@xdigit+@INTSP*/, action: {token: 'number.hex' } },
{regex: /0[0-7]+(?![0-9])@INTSP*/, action: { token: 'number.octal' } }, // lexing_INT_oct
//{regex: /0/, action: { token: 'number' } }, // INTZERO
// lexing_INT_dec:
// - testing_deciexp => lexing_FLOAT_deciexp
// - testing_fexponent => lexing_FLOAT_deciexp
// - otherwise => intspseq0 ([0-9]*[lLuU]?)
{regex: /@digit+(@fexponent|@deciexp)@FLOATSP*/, action: { token: 'number.float' } },
{regex: /@digit@digitseq0@INTSP*/, action: { token: 'number.decimal' } },
// DIGIT, if followed by digitseq0, is lexing_INT_dec
{regex: /@digit+@INTSP*/, action: { token: 'number' } },
],
lexing_COMMENT_block_ml: [
[/[^\(\*]+/, 'comment'],
[/\(\*/, 'comment', '@push' ],
[/\(\*/, 'comment.invalid' ],
[/\*\)/, 'comment', '@pop' ],
[/\*/, 'comment']
],
lexing_COMMENT_block_c: [
[/[^\/*]+/, 'comment' ],
// [/\/\*/, 'comment', '@push' ], // nested C-style block comments not allowed
// [/\/\*/, 'comment.invalid' ], // NOTE: this breaks block comments in the shape of /* //*/
[/\*\//, 'comment', '@pop' ],
[/[\/*]/, 'comment' ]
],
lexing_COMMENT_rest: [
[/$/, 'comment', '@pop'], // FIXME: does it match? docs say 'no'
[/.*/, 'comment']
],
// NOTE: added by AS, specifically for highlighting
lexing_EFFECT_commaseq0: [
{regex: /@IDENTFST@IDENTRST+|@digit+/,
action: {
cases: {
'@keywords_effects': {token: 'type.effect'},
'@default': {token: 'identifier'}
}
}
},
{regex: /,/, action: {token: 'punctuation'}},
{regex: />/, action: { token: '@rematch', next: '@pop' }},
],
lexing_EXTCODE: [
{regex: /^%}/, action: { token: '@rematch', next: '@pop', nextEmbedded: '@pop' }},
{regex: /[^%]+/, action: ''},
],
lexing_DQUOTE: [
{regex: /"/, action: { token: 'string.quote', bracket: '@close', next: '@pop' }},
// AS-20160628: additional hi-lighting for variables in staload/dynload strings
{regex: /(\{\$)(@IDENTFST@IDENTRST*)(\})/, action: [{token: 'string.escape'},{ token: 'identifier' },{token: 'string.escape'}]},
{regex: /\\$/, action: { token: 'string.escape' }},
{regex: /\\(@ESCHAR|[xX]@xdigit+|@digit+)/, action: { token: 'string.escape' }},
{regex: /[^\\"]+/, action: {token: 'string' }}
],
},
};

Просмотреть файл

@ -35,6 +35,7 @@ requirejs([
'out/test/lua.test',
'out/test/markdown.test',
'out/test/objective-c.test',
'out/test/postiats.test',
'out/test/powershell.test',
'out/test/python.test',
'out/test/r.test',

915
test/postiats.test.ts Normal file
Просмотреть файл

@ -0,0 +1,915 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Artyom Shalkhakov. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import {testTokenization} from './testRunner';
testTokenization('postiats', [
// Keywords
[{
line: 'implement main(argc, argv) =',
tokens: [
{ startIndex: 0, type: 'keyword.pats' },
{ startIndex: 9, type: '' },
{ startIndex: 10, type: 'identifier.pats' },
{ startIndex: 14, type: 'delimiter.parenthesis.pats' },
{ startIndex: 15, type: 'identifier.pats' },
{ startIndex: 19, type: 'delimiter.comma.pats' },
{ startIndex: 20, type: '' },
{ startIndex: 21, type: 'identifier.pats' },
{ startIndex: 25, type: 'delimiter.parenthesis.pats' },
{ startIndex: 26, type: '' },
{ startIndex: 27, type: 'keyword.pats'}
]}],
// Comments - single line
[{
line: '//',
tokens: [
{ startIndex: 0, type: 'comment.pats' }
]}],
[{
line: ' // a comment',
tokens: [
{ startIndex: 0, type: '' },
{ startIndex: 4, type: 'comment.pats' }
]}],
[{
line: '// a comment',
tokens: [
{ startIndex: 0, type: 'comment.pats' }
]}],
[{
line: '//sticky comment',
tokens: [
{ startIndex: 0, type: 'comment.pats' }
]}],
[{
line: '/almost a comment',
tokens: [
{ startIndex: 0, type: 'operator.pats' },
{ startIndex: 1, type: 'identifier.pats' },
{ startIndex: 7, type: '' },
{ startIndex: 8, type: 'identifier.pats' },
{ startIndex: 9, type: '' },
{ startIndex: 10, type: 'identifier.pats' }
]}],
[{
line: '/* //*/ a',
tokens: [
{ startIndex: 0, type: 'comment.pats' },
{ startIndex: 7, type: '' },
{ startIndex: 8, type: 'identifier.pats' }
]}],
[{
line: '1 / 2; /* comment',
tokens: [
{ startIndex: 0, type: 'number.decimal.pats' },
{ startIndex: 1, type: '' },
{ startIndex: 2, type: 'operator.pats' },
{ startIndex: 3, type: '' },
{ startIndex: 4, type: 'number.decimal.pats' },
{ startIndex: 5, type: 'delimiter.semicolon.pats' },
{ startIndex: 6, type: '' },
{ startIndex: 7, type: 'comment.pats' }
]}],
[{
line: 'val x:int = 1; // my comment // is a nice one',
tokens: [
{ startIndex: 0, type: 'keyword.pats' },
{ startIndex: 3, type: '' },
{ startIndex: 4, type: 'identifier.pats' },
{ startIndex: 5, type: 'keyword.pats'},
{ startIndex: 6, type: 'type.pats' },
{ startIndex: 9, type: '' },
{ startIndex: 10, type: 'keyword.pats' },
{ startIndex: 11, type: '' },
{ startIndex: 12, type: 'number.decimal.pats' },
{ startIndex: 13, type: 'delimiter.semicolon.pats'},
{ startIndex: 14, type: ''},
{ startIndex: 15, type: 'comment.pats' }
]}],
// Comments - range comment, single line
[{
line: '/* a simple comment */',
tokens: [
{ startIndex: 0, type: 'comment.pats' }
]}],
[{
line: 'var x : int = /* a simple comment */ 1;',
tokens: [
{ startIndex: 0, type: 'keyword.pats' },
{ startIndex: 3, type: '' },
{ startIndex: 4, type: 'identifier.pats' },
{ startIndex: 5, type: '' },
{ startIndex: 6, type: 'keyword.pats' },
{ startIndex: 7, type: '' },
{ startIndex: 8, type: 'type.pats' },
{ startIndex: 11, type: '' },
{ startIndex: 12, type: 'keyword.pats' },
{ startIndex: 13, type: '' },
{ startIndex: 14, type: 'comment.pats' },
{ startIndex: 36, type: '' },
{ startIndex: 37, type: 'number.decimal.pats' },
{ startIndex: 38, type: 'delimiter.semicolon.pats' }
]}],
[{
line: 'val x = /* comment */ 1; */',
tokens: [
{ startIndex: 0, type: 'keyword.pats' },
{ startIndex: 3, type: '' },
{ startIndex: 4, type: 'identifier.pats' },
{ startIndex: 5, type: '' },
{ startIndex: 6, type: 'keyword.pats' },
{ startIndex: 7, type: '' },
{ startIndex: 8, type: 'comment.pats' },
{ startIndex: 21, type: '' },
{ startIndex: 22, type: 'number.decimal.pats' },
{ startIndex: 23, type: 'delimiter.semicolon.pats' },
{ startIndex: 24, type: '' },
{ startIndex: 25, type: 'operator.pats' }
]}],
[{
line: 'x = /**/;',
tokens: [
{ startIndex: 0, type: 'identifier.pats' },
{ startIndex: 1, type: '' },
{ startIndex: 2, type: 'keyword.pats' },
{ startIndex: 3, type: '' },
{ startIndex: 4, type: 'comment.pats' },
{ startIndex: 8, type: 'delimiter.semicolon.pats' }
]}],
[{
line: 'x = /*/;',
tokens: [
{ startIndex: 0, type: 'identifier.pats' },
{ startIndex: 1, type: '' },
{ startIndex: 2, type: 'keyword.pats' },
{ startIndex: 3, type: '' },
{ startIndex: 4, type: 'comment.pats' }
]}],
// block comments, single line
[{
line: '(* a simple comment *)',
tokens: [
{ startIndex: 0, type: 'comment.pats' }
]}],
[{
line: '(* a simple (* nested *) comment *)',
tokens: [
{ startIndex: 0, type: 'comment.pats' }
]}],
[{
line: '(* ****** ****** *)',
tokens: [
{ startIndex: 0, type: 'comment.pats' }
]}],
[{
line: 'var x : int = (* a simple comment *) 1;',
tokens: [
{ startIndex: 0, type: 'keyword.pats' },
{ startIndex: 3, type: '' },
{ startIndex: 4, type: 'identifier.pats' },
{ startIndex: 5, type: '' },
{ startIndex: 6, type: 'keyword.pats' },
{ startIndex: 7, type: '' },
{ startIndex: 8, type: 'type.pats' },
{ startIndex: 11, type: '' },
{ startIndex: 12, type: 'keyword.pats' },
{ startIndex: 13, type: '' },
{ startIndex: 14, type: 'comment.pats' },
{ startIndex: 36, type: '' },
{ startIndex: 37, type: 'number.decimal.pats' },
{ startIndex: 38, type: 'delimiter.semicolon.pats' }
]}],
[{
line: 'val x = (* comment *) 1; *)',
tokens: [
{ startIndex: 0, type: 'keyword.pats' },
{ startIndex: 3, type: '' },
{ startIndex: 4, type: 'identifier.pats' },
{ startIndex: 5, type: '' },
{ startIndex: 6, type: 'keyword.pats' },
{ startIndex: 7, type: '' },
{ startIndex: 8, type: 'comment.pats' },
{ startIndex: 21, type: '' },
{ startIndex: 22, type: 'number.decimal.pats' },
{ startIndex: 23, type: 'delimiter.semicolon.pats' },
{ startIndex: 24, type: '' },
{ startIndex: 25, type: 'operator.pats' },
{ startIndex: 26, type: 'delimiter.parenthesis.pats' },
]}],
[{
line: 'x = (**);',
tokens: [
{ startIndex: 0, type: 'identifier.pats' },
{ startIndex: 1, type: '' },
{ startIndex: 2, type: 'keyword.pats' },
{ startIndex: 3, type: '' },
{ startIndex: 4, type: 'comment.pats' },
{ startIndex: 8, type: 'delimiter.semicolon.pats' }
]}],
[{
line: '(*)',
tokens: [
{ startIndex: 0, type: 'invalid.pats' }, // not a comment!
]}],
// Numbers
[{
line: '0',
tokens: [
{ startIndex: 0, type: 'number.decimal.pats' }
]}],
[{
line: '12l',
tokens: [
{ startIndex: 0, type: 'number.decimal.pats' }
]}],
[{
line: '34U',
tokens: [
{ startIndex: 0, type: 'number.decimal.pats' }
]}],
[{
line: '55LL',
tokens: [
{ startIndex: 0, type: 'number.decimal.pats' }
]}],
[{
line: '34ul',
tokens: [
{ startIndex: 0, type: 'number.decimal.pats' }
]}],
[{
line: '55llU',
tokens: [
{ startIndex: 0, type: 'number.decimal.pats' }
]}],
/*
[{
line: '5\'5llU',
tokens: [
{ startIndex: 0, type: 'number.pats' }
]}],
[{
line: '100\'000\'000',
tokens: [
{ startIndex: 0, type: 'number.pats' }
]}],
*/
[{
line: '0x100aafllU',
tokens: [
{ startIndex: 0, type: 'number.hex.pats' }
]}],
[{
line: '0342325',
tokens: [
{ startIndex: 0, type: 'number.octal.pats' }
]}],
[{
line: '0x123',
tokens: [
{ startIndex: 0, type: 'number.hex.pats' }
]}],
[{
line: '23.5',
tokens: [
{ startIndex: 0, type: 'number.float.pats' }
]}],
[{
line: '23.5e3',
tokens: [
{ startIndex: 0, type: 'number.float.pats' }
]}],
[{
line: '23.5E3',
tokens: [
{ startIndex: 0, type: 'number.float.pats' }
]}],
[{
line: '23.5F',
tokens: [
{ startIndex: 0, type: 'number.float.pats' }
]}],
[{
line: '23.5f',
tokens: [
{ startIndex: 0, type: 'number.float.pats' }
]}],
[{
line: '1.72E3F',
tokens: [
{ startIndex: 0, type: 'number.float.pats' }
]}],
[{
line: '1.72E3f',
tokens: [
{ startIndex: 0, type: 'number.float.pats' }
]}],
[{
line: '1.72e3F',
tokens: [
{ startIndex: 0, type: 'number.float.pats' }
]}],
[{
line: '1.72e3f',
tokens: [
{ startIndex: 0, type: 'number.float.pats' }
]}],
[{
line: '23.5L',
tokens: [
{ startIndex: 0, type: 'number.float.pats' }
]}],
[{
line: '23.5l',
tokens: [
{ startIndex: 0, type: 'number.float.pats' }
]}],
[{
line: '1.72E3L',
tokens: [
{ startIndex: 0, type: 'number.float.pats' }
]}],
[{
line: '1.72E3l',
tokens: [
{ startIndex: 0, type: 'number.float.pats' }
]}],
[{
line: '1.72e3L',
tokens: [
{ startIndex: 0, type: 'number.float.pats' }
]}],
[{
line: '1.72e3l',
tokens: [
{ startIndex: 0, type: 'number.float.pats' }
]}],
[{
line: '0+0',
tokens: [
{ startIndex: 0, type: 'number.decimal.pats' },
{ startIndex: 1, type: 'operator.pats' },
{ startIndex: 2, type: 'number.decimal.pats' }
]}],
[{
line: '100+10',
tokens: [
{ startIndex: 0, type: 'number.decimal.pats' },
{ startIndex: 3, type: 'operator.pats' },
{ startIndex: 4, type: 'number.decimal.pats' }
]}],
[{
line: '0 + 0',
tokens: [
{ startIndex: 0, type: 'number.decimal.pats' },
{ startIndex: 1, type: '' },
{ startIndex: 2, type: 'operator.pats' },
{ startIndex: 3, type: '' },
{ startIndex: 4, type: 'number.decimal.pats' }
]}],
// hi-lighting of variables in staload/dynload
[{
line: '"{$LIBATSCC2JS}/staloadall.hats"',
tokens: [
{ startIndex: 0, type: 'string.quote.pats' },
{ startIndex: 1, type: 'string.escape.pats' },
{ startIndex: 3, type: 'identifier.pats' },
{ startIndex: 14, type: 'string.escape.pats' },
{ startIndex: 15, type: 'string.pats' },
{ startIndex: 31, type: 'string.quote.pats' }
]}],
// Monarch Generated
[{
line: '#include "/path/to/my/file.h"',
tokens: [
{ startIndex: 0, type: 'keyword.srp.pats' },
{ startIndex: 8, type: '' },
{ startIndex: 9, type: 'string.quote.pats' },
{ startIndex: 10, type: 'string.pats' },
{ startIndex: 28, type: 'string.quote.pats' }
]}, {
line: '',
tokens: [
]}, {
line: '#ifdef VAR #then',
tokens: [
{ startIndex: 0, type: 'keyword.srp.pats' },
{ startIndex: 6, type: '' },
{ startIndex: 7, type: 'identifier.pats' },
{ startIndex: 10, type: '' },
{ startIndex: 11, type: 'keyword.srp.pats' }
]}, {
line: '#define SUM(A,B) (A) + (B)',
tokens: [
{ startIndex: 0, type: 'keyword.srp.pats' },
{ startIndex: 7, type: '' },
{ startIndex: 8, type: 'identifier.pats' },
{ startIndex: 11, type: 'delimiter.parenthesis.pats' },
{ startIndex: 12, type: 'identifier.pats' },
{ startIndex: 13, type: 'delimiter.comma.pats' },
{ startIndex: 14, type: 'identifier.pats' },
{ startIndex: 15, type: 'delimiter.parenthesis.pats' },
{ startIndex: 16, type: '' },
{ startIndex: 17, type: 'delimiter.parenthesis.pats' },
{ startIndex: 18, type: 'identifier.pats' },
{ startIndex: 19, type: 'delimiter.parenthesis.pats' },
{ startIndex: 20, type: '' },
{ startIndex: 21, type: 'operator.pats' },
{ startIndex: 22, type: '' },
{ startIndex: 23, type: 'delimiter.parenthesis.pats' },
{ startIndex: 24, type: 'identifier.pats' },
{ startIndex: 25, type: 'delimiter.parenthesis.pats' }
]}, {
line: 'staload Asdf_CDE = "./myfile.sats"',
tokens: [
{ startIndex: 0, type: 'keyword.pats' },
{ startIndex: 7, type: '' },
{ startIndex: 8, type: 'identifier.pats' },
{ startIndex: 16, type: '' },
{ startIndex: 17, type: 'keyword.pats' },
{ startIndex: 18, type: '' },
{ startIndex: 19, type: 'string.quote.pats' },
{ startIndex: 20, type: 'string.pats' },
{ startIndex: 33, type: 'string.quote.pats' },
]}, {
line: '',
tokens: [
]}, {
line: 'implement main(argc, argv)',
tokens: [
{ startIndex: 0, type: 'keyword.pats' },
{ startIndex: 9, type: '' },
{ startIndex: 10, type: 'identifier.pats' },
{ startIndex: 14, type: 'delimiter.parenthesis.pats' },
{ startIndex: 15, type: 'identifier.pats' },
{ startIndex: 19, type: 'delimiter.comma.pats' },
{ startIndex: 20, type: '' },
{ startIndex: 21, type: 'identifier.pats' },
{ startIndex: 25, type: 'delimiter.parenthesis.pats' }
]}, {
line: ' = begin',
tokens: [
{ startIndex: 0, type: '' },
{ startIndex: 1, type: 'keyword.pats' },
{ startIndex: 2, type: '' },
{ startIndex: 3, type: 'keyword.pats' }
]}, {
line: '0',
tokens: [
{ startIndex: 0, type: 'number.decimal.pats' },
]}, {
line: 'end',
tokens: [
{ startIndex: 0, type: 'keyword.pats' }
]}, {
line: '',
tokens: [
]}, {
line: '',
tokens: [
]}, {
line: 'dataprop FACT (int, int) =',
tokens: [
{ startIndex: 0, type: 'keyword.pats' },
{ startIndex: 8, type: '' },
{ startIndex: 9, type: 'identifier.pats' },
{ startIndex: 13, type: '' },
{ startIndex: 14, type: 'delimiter.parenthesis.pats' },
{ startIndex: 15, type: 'type.pats' },
{ startIndex: 18, type: 'delimiter.comma.pats' },
{ startIndex: 19, type: '' },
{ startIndex: 20, type: 'type.pats' },
{ startIndex: 23, type: 'delimiter.parenthesis.pats' },
{ startIndex: 24, type: '' },
{ startIndex: 25, type: 'keyword.pats' }
]}, {
line: ' | FACTbas (0, 1) of ()',
tokens: [
{ startIndex: 0, type: '' },
{ startIndex: 1, type: 'keyword.pats' },
{ startIndex: 2, type: '' },
{ startIndex: 3, type: 'identifier.pats' },
{ startIndex: 10, type: '' },
{ startIndex: 11, type: 'delimiter.parenthesis.pats' },
{ startIndex: 12, type: 'number.decimal.pats' },
{ startIndex: 13, type: 'delimiter.comma.pats' },
{ startIndex: 14, type: '' },
{ startIndex: 15, type: 'number.decimal.pats' },
{ startIndex: 16, type: 'delimiter.parenthesis.pats' },
{ startIndex: 17, type: '' },
{ startIndex: 18, type: 'keyword.pats' },
{ startIndex: 20, type: '' },
{ startIndex: 21, type: 'delimiter.parenthesis.pats' }
]}, {
line: ' | {n:pos}{r:int} FACTind (n, n*r) of FACT (n-1, r)',
tokens: [
{ startIndex: 0, type: '' },
{ startIndex: 1, type: 'keyword.pats' },
{ startIndex: 2, type: '' },
{ startIndex: 3, type: 'delimiter.curly.pats' },
{ startIndex: 4, type: 'identifier.pats' },
{ startIndex: 5, type: 'keyword.pats' },
{ startIndex: 6, type: 'identifier.pats' },
{ startIndex: 9, type: 'delimiter.parenthesis.pats' },
{ startIndex: 10, type: 'delimiter.curly.pats' },
{ startIndex: 11, type: 'identifier.pats' },
{ startIndex: 12, type: 'keyword.pats' },
{ startIndex: 13, type: 'type.pats' },
{ startIndex: 16, type: 'delimiter.parenthesis.pats' },
{ startIndex: 17, type: '' },
{ startIndex: 18, type: 'identifier.pats' },
{ startIndex: 25, type: '' },
{ startIndex: 26, type: 'delimiter.parenthesis.pats' },
{ startIndex: 27, type: 'identifier.pats' },
{ startIndex: 28, type: 'delimiter.comma.pats' },
{ startIndex: 29, type: '' },
{ startIndex: 30, type: 'identifier.pats' },
{ startIndex: 31, type: 'operator.pats' },
{ startIndex: 32, type: 'identifier.pats' },
{ startIndex: 33, type: 'delimiter.parenthesis.pats' },
{ startIndex: 34, type: '' },
{ startIndex: 35, type: 'keyword.pats' },
{ startIndex: 37, type: '' },
{ startIndex: 38, type: 'identifier.pats' },
{ startIndex: 42, type: '' },
{ startIndex: 43, type: 'delimiter.parenthesis.pats' },
{ startIndex: 44, type: 'identifier.pats' },
{ startIndex: 45, type: 'operator.pats' },
{ startIndex: 46, type: 'number.decimal.pats' },
{ startIndex: 47, type: 'delimiter.comma.pats' },
{ startIndex: 48, type: '' },
{ startIndex: 49, type: 'identifier.pats' },
{ startIndex: 50, type: 'delimiter.parenthesis.pats' },
]}, {
line: '',
tokens: [
]}, {
line: 'fun fact {n:nat} .<n>. (x: int n) : [r:int] (FACT(n, r) | int(r)) = (',
tokens: [
{ startIndex: 0, type: 'keyword.pats' },
{ startIndex: 3, type: '' },
{ startIndex: 4, type: 'identifier.pats' },
{ startIndex: 8, type: '' },
{ startIndex: 9, type: 'delimiter.curly.pats' },
{ startIndex: 10, type: 'identifier.pats' },
{ startIndex: 11, type: 'keyword.pats' },
{ startIndex: 12, type: 'identifier.pats' },
{ startIndex: 15, type: 'delimiter.parenthesis.pats' },
{ startIndex: 16, type: '' },
{ startIndex: 17, type: 'identifier.sym.pats' },
{ startIndex: 19, type: 'identifier.pats' },
{ startIndex: 20, type: 'keyword.pats' },
{ startIndex: 22, type: '' },
{ startIndex: 23, type: 'delimiter.parenthesis.pats' },
{ startIndex: 24, type: 'identifier.pats' },
{ startIndex: 25, type: 'keyword.pats' },
{ startIndex: 26, type: '' },
{ startIndex: 27, type: 'type.pats' },
{ startIndex: 30, type: '' },
{ startIndex: 31, type: 'identifier.pats' },
{ startIndex: 32, type: 'delimiter.parenthesis.pats' },
{ startIndex: 33, type: '' },
{ startIndex: 34, type: 'keyword.pats' },
{ startIndex: 35, type: '' },
{ startIndex: 36, type: 'delimiter.square.pats' },
{ startIndex: 37, type: 'identifier.pats' },
{ startIndex: 38, type: 'keyword.pats' },
{ startIndex: 39, type: 'type.pats' },
{ startIndex: 42, type: 'delimiter.square.pats' },
{ startIndex: 43, type: '' },
{ startIndex: 44, type: 'delimiter.parenthesis.pats' },
{ startIndex: 45, type: 'identifier.pats' },
{ startIndex: 49, type: 'delimiter.parenthesis.pats' },
{ startIndex: 50, type: 'identifier.pats' },
{ startIndex: 51, type: 'delimiter.comma.pats' },
{ startIndex: 52, type: '' },
{ startIndex: 53, type: 'identifier.pats' },
{ startIndex: 54, type: 'delimiter.parenthesis.pats' },
{ startIndex: 55, type: '' },
{ startIndex: 56, type: 'keyword.pats' },
{ startIndex: 57, type: '' },
{ startIndex: 58, type: 'type.pats' },
{ startIndex: 61, type: 'delimiter.parenthesis.pats' },
{ startIndex: 62, type: 'identifier.pats' },
{ startIndex: 63, type: 'delimiter.parenthesis.pats' },
{ startIndex: 65, type: '' },
{ startIndex: 66, type: 'keyword.pats' },
{ startIndex: 67, type: '' },
{ startIndex: 68, type: 'delimiter.parenthesis.pats' },
]}, {
line: 'if x > 0 then let',
tokens: [
{ startIndex: 0, type: 'keyword.pats' },
{ startIndex: 2, type: '' },
{ startIndex: 3, type: 'identifier.pats' },
{ startIndex: 4, type: '' },
{ startIndex: 5, type: 'keyword.pats' },
{ startIndex: 6, type: '' },
{ startIndex: 7, type: 'number.decimal.pats' },
{ startIndex: 8, type: '' },
{ startIndex: 9, type: 'keyword.pats' },
{ startIndex: 13, type: '' },
{ startIndex: 14, type: 'keyword.pats' },
]}, {
line: ' val [r1:int] (pf1 | r1) = fact (x-1)',
tokens: [
{ startIndex: 0, type: '' },
{ startIndex: 2, type: 'keyword.pats' },
{ startIndex: 5, type: '' },
{ startIndex: 6, type: 'delimiter.square.pats' },
{ startIndex: 7, type: 'identifier.pats' },
{ startIndex: 9, type: 'keyword.pats' },
{ startIndex: 10, type: 'type.pats' },
{ startIndex: 13, type: 'delimiter.square.pats' },
{ startIndex: 14, type: '' },
{ startIndex: 15, type: 'delimiter.parenthesis.pats' },
{ startIndex: 16, type: 'identifier.pats' },
{ startIndex: 19, type: '' },
{ startIndex: 20, type: 'keyword.pats' },
{ startIndex: 21, type: '' },
{ startIndex: 22, type: 'identifier.pats' },
{ startIndex: 24, type: 'delimiter.parenthesis.pats' },
{ startIndex: 25, type: '' },
{ startIndex: 26, type: 'keyword.pats' },
{ startIndex: 27, type: '' },
{ startIndex: 28, type: 'identifier.pats' },
{ startIndex: 32, type: '' },
{ startIndex: 33, type: 'delimiter.parenthesis.pats' },
{ startIndex: 34, type: 'identifier.pats' },
{ startIndex: 35, type: 'operator.pats' },
{ startIndex: 36, type: 'number.decimal.pats' },
{ startIndex: 37, type: 'delimiter.parenthesis.pats' },
]}, {
line: ' prval pf = FACTind {n}{r1} (pf1)',
tokens: [
{ startIndex: 0, type: '' },
{ startIndex: 2, type: 'keyword.pats' },
{ startIndex: 7, type: '' },
{ startIndex: 8, type: 'identifier.pats' },
{ startIndex: 10, type: '' },
{ startIndex: 11, type: 'keyword.pats' },
{ startIndex: 12, type: '' },
{ startIndex: 13, type: 'identifier.pats' },
{ startIndex: 20, type: '' },
{ startIndex: 21, type: 'delimiter.curly.pats' },
{ startIndex: 22, type: 'identifier.pats' },
{ startIndex: 23, type: 'delimiter.parenthesis.pats' },
{ startIndex: 24, type: 'delimiter.curly.pats' },
{ startIndex: 25, type: 'identifier.pats' },
{ startIndex: 27, type: 'delimiter.parenthesis.pats' },
{ startIndex: 28, type: '' },
{ startIndex: 29, type: 'delimiter.parenthesis.pats' },
{ startIndex: 30, type: 'identifier.pats' },
{ startIndex: 33, type: 'delimiter.parenthesis.pats' }
]}, {
line: ' val r = x * r1',
tokens: [
{startIndex: 0, type: ''},
{startIndex: 2, type: 'keyword.pats'},
{startIndex: 5, type: ''},
{startIndex: 6, type: 'identifier.pats'},
{startIndex: 7, type: ''},
{startIndex: 8, type: 'keyword.pats'},
{startIndex: 9, type: ''},
{startIndex: 10, type: 'identifier.pats'},
{startIndex: 11, type: ''},
{startIndex: 12, type: 'operator.pats'},
{startIndex: 13, type: ''},
{startIndex: 14, type: 'identifier.pats'}
]}, {
line: 'in',
tokens: [
{startIndex: 0, type: 'keyword.pats'}
]}, {
line: ' (pf | r)',
tokens: [
{ startIndex: 0, type: '' },
{ startIndex: 2, type: 'delimiter.parenthesis.pats' },
{ startIndex: 3, type: 'identifier.pats' },
{ startIndex: 5, type: '' },
{ startIndex: 6, type: 'keyword.pats' },
{ startIndex: 7, type: '' },
{ startIndex: 8, type: 'identifier.pats' },
{ startIndex: 9, type: 'delimiter.parenthesis.pats' }
]}, {
line: 'end // end of [then]',
tokens: [
{startIndex: 0, type: 'keyword.pats'},
{startIndex: 3, type: ''},
{startIndex: 4, type: 'comment.pats'}
]}, {
line: 'else (FACTbas () | 1)',
tokens: [
{ startIndex: 0, type: 'keyword.pats' },
{ startIndex: 4, type: '' },
{ startIndex: 5, type: 'delimiter.parenthesis.pats' },
{ startIndex: 6, type: 'identifier.pats' },
{ startIndex: 13, type: '' },
{ startIndex: 14, type: 'delimiter.parenthesis.pats' },
{ startIndex: 16, type: '' },
{ startIndex: 17, type: 'keyword.pats' },
{ startIndex: 18, type: '' },
{ startIndex: 19, type: 'number.decimal.pats' },
{ startIndex: 20, type: 'delimiter.parenthesis.pats' }
]}, {
line: ') (* end of [fact] *)',
tokens: [
{startIndex: 0, type: 'delimiter.parenthesis.pats'},
{startIndex: 1, type: ''},
{startIndex: 2, type: 'comment.pats'}
]}, {
line: '',
tokens: [
]}, {
line: 'local',
tokens: [
{ startIndex: 0, type: 'keyword.pats' }
]}, {
line: 'var __count: int = 0 // it is statically allocated',
tokens: [
{ startIndex: 0, type: 'keyword.pats' },
{ startIndex: 3, type: '' },
{ startIndex: 4, type: 'identifier.pats' },
{ startIndex: 11, type: 'keyword.pats' },
{ startIndex: 12, type: '' },
{ startIndex: 13, type: 'type.pats' },
{ startIndex: 16, type: '' },
{ startIndex: 17, type: 'keyword.pats' },
{ startIndex: 18, type: '' },
{ startIndex: 19, type: 'number.decimal.pats' },
{ startIndex: 20, type: '' },
{ startIndex: 21, type: 'comment.pats' }
]}, {
line: '',
tokens: [
]}, {
line: 'val theCount =',
tokens: [
{ startIndex: 0, type: 'keyword.pats' },
{ startIndex: 3, type: '' },
{ startIndex: 4, type: 'identifier.pats' },
{ startIndex: 12, type: '' },
{ startIndex: 13, type: 'keyword.pats' }
]}, {
line: ' ref_make_viewptr{int}(view@(__count) | addr@(__count))',
tokens: [
{ startIndex: 0, type: '' },
{ startIndex: 2, type: 'identifier.pats' },
{ startIndex: 18, type: 'delimiter.curly.pats' },
{ startIndex: 19, type: 'type.pats' },
{ startIndex: 22, type: 'delimiter.parenthesis.pats' },
{ startIndex: 24, type: 'keyword.pats' },
{ startIndex: 29, type: 'delimiter.parenthesis.pats' },
{ startIndex: 30, type: 'identifier.pats' },
{ startIndex: 37, type: 'delimiter.parenthesis.pats' },
{ startIndex: 38, type: '' },
{ startIndex: 39, type: 'keyword.pats' },
{ startIndex: 40, type: '' },
{ startIndex: 41, type: 'keyword.pats' },
{ startIndex: 46, type: 'delimiter.parenthesis.pats' },
{ startIndex: 47, type: 'identifier.pats' },
{ startIndex: 54, type: 'delimiter.parenthesis.pats' },
]}, {
line: '// end of [val]',
tokens: [
{ startIndex: 0, type: 'comment.pats' }
]}, {
line: '',
tokens: [
]}, {
line: 'in (* in of [local] *)',
tokens: [
{ startIndex: 0, type: 'keyword.pats' },
{ startIndex: 2, type: '' },
{ startIndex: 3, type: 'comment.pats' }
]}, {
line: '',
tokens: [
]}, {
line: 'fun theCount_get (): int = !theCount',
tokens: [
{ startIndex: 0, type: 'keyword.pats' },
{ startIndex: 3, type: '' },
{ startIndex: 4, type: 'identifier.pats' },
{ startIndex: 16, type: '' },
{ startIndex: 17, type: 'delimiter.parenthesis.pats' },
{ startIndex: 19, type: 'keyword.pats' },
{ startIndex: 20, type: '' },
{ startIndex: 21, type: 'type.pats' },
{ startIndex: 24, type: '' },
{ startIndex: 25, type: 'keyword.pats' },
{ startIndex: 26, type: '' },
{ startIndex: 27, type: 'keyword.pats' },
{ startIndex: 28, type: 'identifier.pats' }
]}, {
line: '',
tokens: [
]}, {
line: 'fun theCount_inc (): void = !theCount := !theCount + 1',
tokens: [
{ startIndex: 0, type: 'keyword.pats' },
{ startIndex: 3, type: '' },
{ startIndex: 4, type: 'identifier.pats' },
{ startIndex: 16, type: '' },
{ startIndex: 17, type: 'delimiter.parenthesis.pats' },
{ startIndex: 19, type: 'keyword.pats' },
{ startIndex: 20, type: '' },
{ startIndex: 21, type: 'type.pats' },
{ startIndex: 25, type: '' },
{ startIndex: 26, type: 'keyword.pats' },
{ startIndex: 27, type: '' },
{ startIndex: 28, type: 'keyword.pats' },
{ startIndex: 29, type: 'identifier.pats' },
{ startIndex: 37, type: '' },
{ startIndex: 38, type: 'operator.pats' },
{ startIndex: 40, type: '' },
{ startIndex: 41, type: 'keyword.pats' },
{ startIndex: 42, type: 'identifier.pats' },
{ startIndex: 50, type: '' },
{ startIndex: 51, type: 'operator.pats' },
{ startIndex: 52, type: '' },
{ startIndex: 53, type: 'number.decimal.pats' }
]}, {
line: '',
tokens: [
]}, {
line: 'end // end of [local]',
tokens: [
{ startIndex: 0, type: 'keyword.pats' },
{ startIndex: 3, type: '' },
{ startIndex: 4, type: 'comment.pats' }
]}, {
line: '',
tokens: [
]}, {
line: '#endif',
tokens: [
{ startIndex: 0, type: 'keyword.srp.pats' }
]}]
]);

Просмотреть файл

@ -26,6 +26,7 @@
"src/markdown.ts",
"src/monaco.contribution.ts",
"src/objective-c.ts",
"src/postiats.ts",
"src/powershell.ts",
"src/python.ts",
"src/r.ts",
@ -49,6 +50,7 @@
"test/markdown.test.ts",
"test/mocha.d.ts",
"test/objective-c.test.ts",
"test/postiats.test.ts",
"test/powershell.test.ts",
"test/python.test.ts",
"test/r.test.ts",