diff --git a/src/postiats.ts b/src/postiats.ts index 7461ef1b..5dc09a7a 100644 --- a/src/postiats.ts +++ b/src/postiats.ts @@ -1,8 +1,9 @@ -// see: https://microsoft.github.io/monaco-editor/monarch.html - -// ATS/Postiats lexical syntax definition -// Written by Artyom Shalkhakov over the week of 20th - 27th of June, 2016. -// License: BSD v3 (but probably GPL, since ATS/Postiats lexer is GPL? not a lawyer!) +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Artyom Shalkhakov. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + * + * Based on the ATS/Postiats lexer by Hongwei Xi. + *--------------------------------------------------------------------------------------------*/ 'use strict'; @@ -26,14 +27,14 @@ export var conf:IRichLanguageConfiguration = { export var language = { tokenPostfix: '.pats', -// TODO: staload and dynload are followed by a special kind of string literals -// with {$IDENTIFER} variables, and it also may make sense to highlight -// the punctuation (. and / and \) differently. + // TODO: staload and dynload are followed by a special kind of string literals + // with {$IDENTIFER} variables, and it also may make sense to highlight + // the punctuation (. and / and \) differently. // Set defaultToken to invalid to see what you do not tokenize yet defaultToken: 'invalid', - // reference: https://github.com/githwxi/ATS-Postiats/blob/master/src/pats_lexing_token.dats + // keyword reference: https://github.com/githwxi/ATS-Postiats/blob/master/src/pats_lexing_token.dats keywords: [ // @@ -462,6 +463,7 @@ export var language = { start: 'root', // The main tokenizer for ATS/Postiats + // reference: https://github.com/githwxi/ATS-Postiats/blob/master/src/pats_lexing.dats tokenizer: { root: [ // lexing_blankseq0 @@ -604,7 +606,6 @@ export var language = { ], lexing_COMMENT_block_ml: [ - //{regex: /\(\*(?!\))|\*\)/, action: { cases: { '\\(\\*': {token: 'comment', next: '@push'}, '@default': {token: 'comment', next: '@pop'} } }}, [/[^\(\*]+/, 'comment'], [/\(\*/, 'comment', '@push' ], [/\(\*/, 'comment.invalid' ], @@ -613,8 +614,8 @@ export var language = { ], lexing_COMMENT_block_c: [ [/[^\/*]+/, 'comment' ], - // [/\/\*/, 'comment', '@push' ], // nested comment not allowed :-( - // [/\/\*/, 'comment.invalid' ], // this breaks block comments in the shape of /* //*/ + // [/\/\*/, 'comment', '@push' ], // nested C-style block comments not allowed + // [/\/\*/, 'comment.invalid' ], // NOTE: this breaks block comments in the shape of /* //*/ [/\*\//, 'comment', '@pop' ], [/[\/*]/, 'comment' ] ], diff --git a/test/postiats.test.ts b/test/postiats.test.ts index db3c8c69..fd7b0657 100644 --- a/test/postiats.test.ts +++ b/test/postiats.test.ts @@ -89,7 +89,7 @@ testTokenization('postiats', [ { startIndex: 0, type: 'keyword.pats' }, { startIndex: 3, type: '' }, { startIndex: 4, type: 'identifier.pats' }, - { startIndex: 5, type: 'keyword.pats'} + { startIndex: 5, type: 'keyword.pats'}, { startIndex: 6, type: 'type.pats' }, { startIndex: 9, type: '' }, { startIndex: 10, type: 'keyword.pats' }, @@ -415,6 +415,18 @@ testTokenization('postiats', [ { startIndex: 4, type: 'number.decimal.pats' } ]}], + // hi-lighting of variables in staload/dynload + [{ + line: '"{$LIBATSCC2JS}/staloadall.hats"', + tokens: [ + { startIndex: 0, type: 'string.quote.pats' }, + { startIndex: 1, type: 'string.escape.pats' }, + { startIndex: 3, type: 'identifier.pats' }, + { startIndex: 14, type: 'string.escape.pats' }, + { startIndex: 15, type: 'string.pats' }, + { startIndex: 31, type: 'string.quote.pats' } + ]}], + // Monarch Generated [{ line: '#include "/path/to/my/file.h"', @@ -536,7 +548,7 @@ testTokenization('postiats', [ { startIndex: 13, type: 'delimiter.comma.pats' }, { startIndex: 14, type: '' }, { startIndex: 15, type: 'number.decimal.pats' }, - { startIndex: 16, type: 'delimiter.parenthesis.pats' } + { startIndex: 16, type: 'delimiter.parenthesis.pats' }, { startIndex: 17, type: '' }, { startIndex: 18, type: 'keyword.pats' }, { startIndex: 20, type: '' }, @@ -611,9 +623,9 @@ testTokenization('postiats', [ { startIndex: 31, type: 'identifier.pats' }, { startIndex: 32, type: 'delimiter.parenthesis.pats' }, { startIndex: 33, type: '' }, - { startIndex: 34, type: 'keyword.pats' } + { startIndex: 34, type: 'keyword.pats' }, { startIndex: 35, type: '' }, - { startIndex: 36, type: 'delimiter.square.pats' } + { startIndex: 36, type: 'delimiter.square.pats' }, { startIndex: 37, type: 'identifier.pats' }, { startIndex: 38, type: 'keyword.pats' }, { startIndex: 39, type: 'type.pats' },