mirror of
https://github.com/microsoft/monaco-editor.git
synced 2025-12-22 23:13:02 +01:00
Implemented Lexon highlighting
This commit is contained in:
parent
209730c94f
commit
6655a9028e
5 changed files with 285 additions and 1 deletions
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
|
|
@ -10,5 +10,6 @@
|
|||
"editor.tabSize": 4,
|
||||
"editor.insertSpaces": false,
|
||||
"editor.detectIndentation": false,
|
||||
"typescript.tsdk": "./node_modules/typescript/lib"
|
||||
"typescript.tsdk": "./node_modules/typescript/lib",
|
||||
"vscode-corda.isCordaProject": false
|
||||
}
|
||||
|
|
|
|||
14
src/lexon/lexon.contribution.ts
Normal file
14
src/lexon/lexon.contribution.ts
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
|
||||
import { registerLanguage } from '../_.contribution';
|
||||
|
||||
registerLanguage({
|
||||
id: 'lexon',
|
||||
extensions: ['.lex'],
|
||||
aliases: ['Lexon'],
|
||||
loader: () => import('./lexon')
|
||||
});
|
||||
131
src/lexon/lexon.test.ts
Normal file
131
src/lexon/lexon.test.ts
Normal file
|
|
@ -0,0 +1,131 @@
|
|||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import { testTokenization } from '../test/testRunner';
|
||||
|
||||
testTokenization('lexon', [
|
||||
// Tests
|
||||
|
||||
[{
|
||||
line: 'LEX Paid Escrow',
|
||||
tokens: [
|
||||
{ startIndex: 0, type: 'keyword.lexon' },
|
||||
{ startIndex: 3, type: 'white.lexon' },
|
||||
{ startIndex: 4, type: 'identifier.lexon' },
|
||||
{ startIndex: 8, type: 'white.lexon' },
|
||||
{ startIndex: 9, type: 'identifier.lexon' },
|
||||
]
|
||||
}],
|
||||
|
||||
[{
|
||||
line: 'LEXON: 0.2.20',
|
||||
tokens: [
|
||||
{ startIndex: 0, type: 'keyword.lexon' },
|
||||
{ startIndex: 5, type: 'delimiter.lexon' },
|
||||
{ startIndex: 6, type: 'white.lexon' },
|
||||
{ startIndex: 7, type: 'number.semver.lexon' },
|
||||
]
|
||||
}],
|
||||
|
||||
[{
|
||||
line: 'COMMENT: 3.f - an escrow that is controlled by a third party for a fee.',
|
||||
tokens: [
|
||||
{ startIndex: 0, type: 'comment.lexon' },
|
||||
]
|
||||
}],
|
||||
|
||||
[{
|
||||
line: '"Payer" is a person.',
|
||||
tokens: [
|
||||
{ startIndex: 0, type: 'identifier.quote.lexon' },
|
||||
{ startIndex: 1, type: 'identifier.lexon' },
|
||||
{ startIndex: 6, type: 'identifier.quote.lexon' },
|
||||
{ startIndex: 7, type: 'white.lexon' },
|
||||
{ startIndex: 8, type: 'operator.lexon' },
|
||||
{ startIndex: 10, type: 'white.lexon' },
|
||||
{ startIndex: 11, type: 'identifier.lexon' },
|
||||
{ startIndex: 12, type: 'white.lexon' },
|
||||
{ startIndex: 13, type: 'keyword.type.lexon' },
|
||||
{ startIndex: 19, type: 'delimiter.lexon' },
|
||||
]
|
||||
}],
|
||||
|
||||
[{
|
||||
line: '"Fee" is an amount.',
|
||||
tokens: [
|
||||
{ startIndex: 0, type: 'identifier.quote.lexon' },
|
||||
{ startIndex: 1, type: 'identifier.lexon' },
|
||||
{ startIndex: 4, type: 'identifier.quote.lexon' },
|
||||
{ startIndex: 5, type: 'white.lexon' },
|
||||
{ startIndex: 6, type: 'operator.lexon' },
|
||||
{ startIndex: 8, type: 'white.lexon' },
|
||||
{ startIndex: 9, type: 'identifier.lexon' },
|
||||
{ startIndex: 11, type: 'white.lexon' },
|
||||
{ startIndex: 12, type: 'keyword.type.lexon' },
|
||||
{ startIndex: 18, type: 'delimiter.lexon' },
|
||||
]
|
||||
}],
|
||||
|
||||
[{
|
||||
line: 'The Payer pays an Amount into escrow,',
|
||||
tokens: [
|
||||
{ startIndex: 0, type: 'identifier.lexon' }, // The
|
||||
{ startIndex: 3, type: 'white.lexon' },
|
||||
{ startIndex: 4, type: 'identifier.lexon' }, // Payer
|
||||
{ startIndex: 9, type: 'white.lexon' },
|
||||
{ startIndex: 10, type: 'keyword.lexon' }, // pays
|
||||
{ startIndex: 14, type: 'white.lexon' },
|
||||
{ startIndex: 15, type: 'identifier.lexon' }, // an
|
||||
{ startIndex: 17, type: 'white.lexon' },
|
||||
{ startIndex: 18, type: 'keyword.type.lexon' }, // Amount
|
||||
{ startIndex: 24, type: 'white.lexon' },
|
||||
{ startIndex: 25, type: 'keyword.lexon' }, // into
|
||||
{ startIndex: 29, type: 'white.lexon' },
|
||||
{ startIndex: 30, type: 'identifier.lexon' }, // escrow
|
||||
{ startIndex: 36, type: 'delimiter.lexon' }, // ,
|
||||
]
|
||||
}],
|
||||
|
||||
[{
|
||||
line: 'appoints the Payee,',
|
||||
tokens: [
|
||||
{ startIndex: 0, type: 'keyword.lexon' }, // Appoints
|
||||
{ startIndex: 8, type: 'white.lexon' },
|
||||
{ startIndex: 9, type: 'identifier.lexon' }, // the
|
||||
{ startIndex: 12, type: 'white.lexon' },
|
||||
{ startIndex: 13, type: 'identifier.lexon' }, // Payee
|
||||
{ startIndex: 18, type: 'delimiter.lexon' }, // ,
|
||||
]
|
||||
}],
|
||||
|
||||
[{
|
||||
line: 'and also fixes the Fee.',
|
||||
tokens: [
|
||||
{ startIndex: 0, type: 'operator.lexon' }, // and
|
||||
{ startIndex: 3, type: 'white.lexon' },
|
||||
{ startIndex: 4, type: 'identifier.lexon' }, // also
|
||||
{ startIndex: 8, type: 'white.lexon' },
|
||||
{ startIndex: 9, type: 'identifier.lexon' }, // fixes
|
||||
{ startIndex: 14, type: 'white.lexon' },
|
||||
{ startIndex: 15, type: 'identifier.lexon' }, // the
|
||||
{ startIndex: 18, type: 'white.lexon' },
|
||||
{ startIndex: 19, type: 'identifier.lexon' }, // Fee
|
||||
{ startIndex: 22, type: 'delimiter.lexon' }, // .
|
||||
]
|
||||
}],
|
||||
|
||||
[{
|
||||
line: 'CLAUSE: Pay Out.',
|
||||
tokens: [
|
||||
{ startIndex: 0, type: 'keyword.lexon' }, // CLAUSE
|
||||
{ startIndex: 6, type: 'delimiter.lexon' }, // :
|
||||
{ startIndex: 7, type: 'white.lexon' },
|
||||
{ startIndex: 8, type: 'identifier.lexon' }, // Pay out
|
||||
{ startIndex: 15, type: 'delimiter.lexon' }, // .
|
||||
]
|
||||
}],
|
||||
]);
|
||||
137
src/lexon/lexon.ts
Normal file
137
src/lexon/lexon.ts
Normal file
|
|
@ -0,0 +1,137 @@
|
|||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import IRichLanguageConfiguration = monaco.languages.LanguageConfiguration;
|
||||
import ILanguage = monaco.languages.IMonarchLanguage;
|
||||
|
||||
export const conf: IRichLanguageConfiguration = {
|
||||
comments: {
|
||||
lineComment: 'COMMENT',
|
||||
// blockComment: ['COMMENT', '.'],
|
||||
},
|
||||
brackets: [
|
||||
['(', ')']
|
||||
],
|
||||
autoClosingPairs: [
|
||||
{ open: '{', close: '}' },
|
||||
{ open: '[', close: ']' },
|
||||
{ open: '(', close: ')' },
|
||||
{ open: '"', close: '"', },
|
||||
{ open: ':', close: '.', },
|
||||
],
|
||||
surroundingPairs: [
|
||||
{ open: '{', close: '}' },
|
||||
{ open: '[', close: ']' },
|
||||
{ open: '(', close: ')' },
|
||||
{ open: '`', close: '`' },
|
||||
{ open: '"', close: '"' },
|
||||
{ open: '\'', close: '\'' },
|
||||
{ open: ':', close: '.', },
|
||||
],
|
||||
folding: {
|
||||
markers: {
|
||||
start: new RegExp("^\\s*(::\\s*|COMMENT\\s+)#region"),
|
||||
end: new RegExp("^\\s*(::\\s*|COMMENT\\s+)#endregion")
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const language = <ILanguage> {
|
||||
// Set defaultToken to invalid to see what you do not tokenize yet
|
||||
// defaultToken: 'invalid',
|
||||
tokenPostfix: '.lexon',
|
||||
ignoreCase: true,
|
||||
|
||||
keywords: [
|
||||
'lexon', 'lex', 'clause', 'terms', 'contracts', 'may', 'pay',
|
||||
'pays', 'appoints', 'into', 'to'
|
||||
],
|
||||
|
||||
typeKeywords: [
|
||||
'amount', 'person', 'key', 'time', 'date', 'asset', 'text'
|
||||
],
|
||||
|
||||
operators: [
|
||||
'less', 'greater', 'equal', 'le', 'gt', 'or', 'and',
|
||||
'add', 'added', 'subtract', 'subtracted', 'multiply', 'multiplied', 'times', 'divide', 'divided',
|
||||
'is', 'be', 'certified'
|
||||
],
|
||||
|
||||
// we include these common regular expressions
|
||||
symbols: /[=><!~?:&|+\-*\/\^%]+/,
|
||||
|
||||
|
||||
// The main tokenizer for our languages
|
||||
tokenizer: {
|
||||
root: [
|
||||
// comment
|
||||
[/^(\s*)(comment:?(?:\s.*|))$/, ['', 'comment']],
|
||||
|
||||
// special identifier cases
|
||||
[/"/, { token: 'identifier.quote', bracket: '@open', next: '@quoted_identifier' } ],
|
||||
['LEX$', { token: 'keyword', bracket: '@open', next: '@identifier_until_period' }],
|
||||
['LEXON', { token: 'keyword', bracket: '@open', next: '@semver' }],
|
||||
[':', { token: 'delimiter', bracket: '@open', next: '@identifier_until_period' }],
|
||||
|
||||
// identifiers and keywords
|
||||
[/[a-z_$][\w$]*/, { cases: {
|
||||
'@operators': 'operator',
|
||||
'@typeKeywords': 'keyword.type',
|
||||
'@keywords': 'keyword',
|
||||
'@default': 'identifier' } }],
|
||||
|
||||
// whitespace
|
||||
{ include: '@whitespace' },
|
||||
|
||||
// delimiters and operators
|
||||
[/[{}()\[\]]/, '@brackets'],
|
||||
[/[<>](?!@symbols)/, '@brackets'],
|
||||
[/@symbols/, 'delimiter'],
|
||||
|
||||
// numbers
|
||||
[/\d*\.\d*\.\d*/, 'number.semver'],
|
||||
[/\d*\.\d+([eE][\-+]?\d+)?/, 'number.float'],
|
||||
[/0[xX][0-9a-fA-F]+/, 'number.hex'],
|
||||
[/\d+/, 'number'],
|
||||
|
||||
// delimiter: after number because of .\d floats
|
||||
[/[;,.]/, 'delimiter'],
|
||||
],
|
||||
|
||||
quoted_identifier: [
|
||||
[/[^\\"]+/, 'identifier'],
|
||||
[/"/, { token: 'identifier.quote', bracket: '@close', next: '@pop' } ]
|
||||
],
|
||||
|
||||
space_identifier_until_period: [
|
||||
[':', 'delimiter'],
|
||||
[' ', { token: 'white', next: '@identifier_rest' }],
|
||||
],
|
||||
|
||||
identifier_until_period: [
|
||||
{ include: '@whitespace' },
|
||||
[':', { token: 'delimiter', next: '@identifier_rest' }],
|
||||
[/[^\\.]+/, 'identifier'],
|
||||
[/\./, { token: 'delimiter', bracket: '@close', next: '@pop' } ]
|
||||
],
|
||||
|
||||
identifier_rest: [
|
||||
[/[^\\.]+/, 'identifier'],
|
||||
[/\./, { token: 'delimiter', bracket: '@close', next: '@pop' } ]
|
||||
],
|
||||
|
||||
semver: [
|
||||
{ include: '@whitespace' },
|
||||
[':', 'delimiter'],
|
||||
[/\d*\.\d*\.\d*/, { token: 'number.semver', bracket: '@close', next: '@pop' } ]
|
||||
],
|
||||
|
||||
whitespace: [
|
||||
[/[ \t\r\n]+/, 'white'],
|
||||
],
|
||||
},
|
||||
};
|
||||
|
|
@ -27,6 +27,7 @@ import './javascript/javascript.contribution';
|
|||
import './julia/julia.contribution';
|
||||
import './kotlin/kotlin.contribution';
|
||||
import './less/less.contribution';
|
||||
import './lexon/lexon.contribution';
|
||||
import './lua/lua.contribution';
|
||||
import './markdown/markdown.contribution';
|
||||
import './mips/mips.contribution';
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue